aboutsummaryrefslogtreecommitdiff
path: root/core/src/main
diff options
context:
space:
mode:
authorCharles Reiss <woggle@apache.org>2013-07-10 17:59:43 -0700
committerCharles Reiss <woggle@apache.org>2013-07-12 12:58:25 -0700
commit531a7e55745ad76d17d41f80dbdea0072acce079 (patch)
tree0671b6ffa1d506b17f9f0875219c718dff6cd32d /core/src/main
parentcd7259b4b8d8abbff6db963fd8f84d4bd0b3737b (diff)
downloadspark-531a7e55745ad76d17d41f80dbdea0072acce079.tar.gz
spark-531a7e55745ad76d17d41f80dbdea0072acce079.tar.bz2
spark-531a7e55745ad76d17d41f80dbdea0072acce079.zip
Pass executor env vars (e.g. SPARK_CLASSPATH) to compute-classpath.
Diffstat (limited to 'core/src/main')
-rw-r--r--core/src/main/scala/spark/Utils.scala12
-rw-r--r--core/src/main/scala/spark/deploy/worker/ExecutorRunner.scala4
2 files changed, 12 insertions, 4 deletions
diff --git a/core/src/main/scala/spark/Utils.scala b/core/src/main/scala/spark/Utils.scala
index 64547bbdcd..d2bf151cbf 100644
--- a/core/src/main/scala/spark/Utils.scala
+++ b/core/src/main/scala/spark/Utils.scala
@@ -6,6 +6,7 @@ import java.util.{Locale, Random, UUID}
import java.util.concurrent.{ConcurrentHashMap, Executors, ThreadFactory, ThreadPoolExecutor}
import java.util.regex.Pattern
+import scala.collection.Map
import scala.collection.mutable.{ArrayBuffer, HashMap}
import scala.collection.JavaConversions._
import scala.io.Source
@@ -545,10 +546,15 @@ private object Utils extends Logging {
/**
* Execute a command and get its output, throwing an exception if it yields a code other than 0.
*/
- def executeAndGetOutput(command: Seq[String], workingDir: File = new File(".")): String = {
- val process = new ProcessBuilder(command: _*)
+ def executeAndGetOutput(command: Seq[String], workingDir: File = new File("."),
+ extraEnvironment: Map[String, String] = Map.empty): String = {
+ val builder = new ProcessBuilder(command: _*)
.directory(workingDir)
- .start()
+ val environment = builder.environment()
+ for ((key, value) <- extraEnvironment) {
+ environment.put(key, value)
+ }
+ val process = builder.start()
new Thread("read stderr for " + command(0)) {
override def run() {
for (line <- Source.fromInputStream(process.getErrorStream).getLines) {
diff --git a/core/src/main/scala/spark/deploy/worker/ExecutorRunner.scala b/core/src/main/scala/spark/deploy/worker/ExecutorRunner.scala
index d7f58b2cb1..5d3d54c65e 100644
--- a/core/src/main/scala/spark/deploy/worker/ExecutorRunner.scala
+++ b/core/src/main/scala/spark/deploy/worker/ExecutorRunner.scala
@@ -98,7 +98,9 @@ private[spark] class ExecutorRunner(
// Figure out our classpath with the external compute-classpath script
val ext = if (System.getProperty("os.name").startsWith("Windows")) ".cmd" else ".sh"
- val classPath = Utils.executeAndGetOutput(Seq(sparkHome + "/bin/compute-classpath" + ext))
+ val classPath = Utils.executeAndGetOutput(
+ Seq(sparkHome + "/bin/compute-classpath" + ext),
+ extraEnvironment=appDesc.command.environment)
Seq("-cp", classPath) ++ libraryOpts ++ userOpts ++ memoryOpts
}