diff options
author | Matei Zaharia <matei@eecs.berkeley.edu> | 2013-06-25 15:47:40 -0400 |
---|---|---|
committer | Matei Zaharia <matei@eecs.berkeley.edu> | 2013-06-25 15:47:40 -0400 |
commit | 7e0191c6eabe7258f28a8b723702c3a089d1f4b3 (patch) | |
tree | ca91077e408a5d879da953fc674825ef6342ee84 /core/src | |
parent | f5e32ed13a7820fdc38f56dd9a19c160800fd652 (diff) | |
parent | f39f2b7636f52568a556987c8b7f7393299b0351 (diff) | |
download | spark-7e0191c6eabe7258f28a8b723702c3a089d1f4b3.tar.gz spark-7e0191c6eabe7258f28a8b723702c3a089d1f4b3.tar.bz2 spark-7e0191c6eabe7258f28a8b723702c3a089d1f4b3.zip |
Merge remote-tracking branch 'cgrothaus/SPARK-698'
Conflicts:
run
Diffstat (limited to 'core/src')
-rw-r--r-- | core/src/main/scala/spark/deploy/worker/ExecutorRunner.scala | 33 |
1 files changed, 29 insertions, 4 deletions
diff --git a/core/src/main/scala/spark/deploy/worker/ExecutorRunner.scala b/core/src/main/scala/spark/deploy/worker/ExecutorRunner.scala index 04a774658e..4d31657d9e 100644 --- a/core/src/main/scala/spark/deploy/worker/ExecutorRunner.scala +++ b/core/src/main/scala/spark/deploy/worker/ExecutorRunner.scala @@ -77,9 +77,35 @@ private[spark] class ExecutorRunner( def buildCommandSeq(): Seq[String] = { val command = appDesc.command - val script = if (System.getProperty("os.name").startsWith("Windows")) "run.cmd" else "run" - val runScript = new File(sparkHome, script).getCanonicalPath - Seq(runScript, command.mainClass) ++ (command.arguments ++ Seq(appId)).map(substituteVariables) + val runner = if (System.getenv("JAVA_HOME") == null) { + "java" + } else { + System.getenv("JAVA_HOME") + "/bin/java" + } + // SPARK-698: do not call the run.cmd script, as process.destroy() + // fails to kill a process tree on Windows + Seq(runner) ++ buildJavaOpts() ++ Seq(command.mainClass) ++ + command.arguments.map(substituteVariables) + } + + /* + * Attention: this must always be aligned with the environment variables in the run scripts and the + * way the JAVA_OPTS are assembled there. + */ + def buildJavaOpts(): Seq[String] = { + val _javaLibPath = if (System.getenv("SPARK_LIBRARY_PATH") == null) { + "" + } else { + "-Djava.library.path=" + System.getenv("SPARK_LIBRARY_PATH") + } + + Seq("-cp", + System.getenv("CLASSPATH"), + System.getenv("SPARK_JAVA_OPTS"), + _javaLibPath, + "-Xms" + memory.toString + "M", + "-Xmx" + memory.toString + "M") + .filter(_ != null) } /** Spawn a thread that will redirect a given stream to a file */ @@ -115,7 +141,6 @@ private[spark] class ExecutorRunner( for ((key, value) <- appDesc.command.environment) { env.put(key, value) } - env.put("SPARK_MEM", memory.toString + "m") // In case we are running this from within the Spark Shell, avoid creating a "scala" // parent process for the executor command env.put("SPARK_LAUNCH_WITH_SCALA", "0") |