aboutsummaryrefslogtreecommitdiff
path: root/core/src
diff options
context:
space:
mode:
authorChristoph Grothaus <cgrothaus@zeb.de>2013-02-15 14:11:34 +0100
committerChristoph Grothaus <cgrothaus@zeb.de>2013-02-20 21:42:11 +0100
commit85a35c68401e171df0b72b172a689d8c4e412199 (patch)
treeff21c944fee0f8254b74f22e07d36970ba4e1a33 /core/src
parent05bc02e80be78d83937bf57f726946e297d0dd08 (diff)
downloadspark-85a35c68401e171df0b72b172a689d8c4e412199.tar.gz
spark-85a35c68401e171df0b72b172a689d8c4e412199.tar.bz2
spark-85a35c68401e171df0b72b172a689d8c4e412199.zip
Fix SPARK-698. From ExecutorRunner, launch java directly instead via the run scripts.
Diffstat (limited to 'core/src')
-rw-r--r--core/src/main/scala/spark/deploy/worker/ExecutorRunner.scala43
1 files changed, 39 insertions, 4 deletions
diff --git a/core/src/main/scala/spark/deploy/worker/ExecutorRunner.scala b/core/src/main/scala/spark/deploy/worker/ExecutorRunner.scala
index de11771c8e..214c44fc88 100644
--- a/core/src/main/scala/spark/deploy/worker/ExecutorRunner.scala
+++ b/core/src/main/scala/spark/deploy/worker/ExecutorRunner.scala
@@ -75,9 +75,45 @@ private[spark] class ExecutorRunner(
def buildCommandSeq(): Seq[String] = {
val command = appDesc.command
- val script = if (System.getProperty("os.name").startsWith("Windows")) "run.cmd" else "run"
- val runScript = new File(sparkHome, script).getCanonicalPath
- Seq(runScript, command.mainClass) ++ (command.arguments ++ Seq(appId)).map(substituteVariables)
+ val runner = if (getEnvOrEmpty("JAVA_HOME") == "") {
+ "java"
+ } else {
+ getEnvOrEmpty("JAVA_HOME") + "/bin/java"
+ }
+ // SPARK-698: do not call the run.cmd script, as process.destroy()
+ // fails to kill a process tree on Windows
+ Seq(runner) ++ buildJavaOpts() ++ Seq(command.mainClass) ++
+ command.arguments.map(substituteVariables)
+ }
+
+ /*
+ * Attention: this must always be aligned with the environment variables in the run scripts and the
+ * way the JAVA_OPTS are assembled there.
+ */
+ def buildJavaOpts(): Seq[String] = {
+ val _javaLibPath = if (getEnvOrEmpty("SPARK_LIBRARY_PATH") == "") {
+ ""
+ } else {
+ "-Djava.library.path=" + getEnvOrEmpty("SPARK_LIBRARY_PATH")
+ }
+
+ Seq("-cp",
+ getEnvOrEmpty("CLASSPATH"),
+ // SPARK_JAVA_OPTS is overwritten with SPARK_DAEMON_JAVA_OPTS for running the worker
+ getEnvOrEmpty("SPARK_NONDAEMON_JAVA_OPTS"),
+ _javaLibPath,
+ "-Xms" + memory.toString + "M",
+ "-Xmx" + memory.toString + "M")
+ .filter(_ != "")
+ }
+
+ def getEnvOrEmpty(key: String): String = {
+ val result = System.getenv(key)
+ if (result == null) {
+ ""
+ } else {
+ result
+ }
}
/** Spawn a thread that will redirect a given stream to a file */
@@ -113,7 +149,6 @@ private[spark] class ExecutorRunner(
for ((key, value) <- appDesc.command.environment) {
env.put(key, value)
}
- env.put("SPARK_MEM", memory.toString + "m")
// In case we are running this from within the Spark Shell, avoid creating a "scala"
// parent process for the executor command
env.put("SPARK_LAUNCH_WITH_SCALA", "0")