diff options
author | Christoph Grothaus <cgrothaus@zeb.de> | 2013-02-15 14:11:34 +0100 |
---|---|---|
committer | Christoph Grothaus <cgrothaus@zeb.de> | 2013-02-20 21:42:11 +0100 |
commit | 85a35c68401e171df0b72b172a689d8c4e412199 (patch) | |
tree | ff21c944fee0f8254b74f22e07d36970ba4e1a33 | |
parent | 05bc02e80be78d83937bf57f726946e297d0dd08 (diff) | |
download | spark-85a35c68401e171df0b72b172a689d8c4e412199.tar.gz spark-85a35c68401e171df0b72b172a689d8c4e412199.tar.bz2 spark-85a35c68401e171df0b72b172a689d8c4e412199.zip |
Fix SPARK-698. From ExecutorRunner, launch java directly instead via the run scripts.
-rw-r--r-- | core/src/main/scala/spark/deploy/worker/ExecutorRunner.scala | 43 | ||||
-rwxr-xr-x | run | 3 | ||||
-rw-r--r-- | run2.cmd | 3 |
3 files changed, 45 insertions, 4 deletions
diff --git a/core/src/main/scala/spark/deploy/worker/ExecutorRunner.scala b/core/src/main/scala/spark/deploy/worker/ExecutorRunner.scala index de11771c8e..214c44fc88 100644 --- a/core/src/main/scala/spark/deploy/worker/ExecutorRunner.scala +++ b/core/src/main/scala/spark/deploy/worker/ExecutorRunner.scala @@ -75,9 +75,45 @@ private[spark] class ExecutorRunner( def buildCommandSeq(): Seq[String] = { val command = appDesc.command - val script = if (System.getProperty("os.name").startsWith("Windows")) "run.cmd" else "run" - val runScript = new File(sparkHome, script).getCanonicalPath - Seq(runScript, command.mainClass) ++ (command.arguments ++ Seq(appId)).map(substituteVariables) + val runner = if (getEnvOrEmpty("JAVA_HOME") == "") { + "java" + } else { + getEnvOrEmpty("JAVA_HOME") + "/bin/java" + } + // SPARK-698: do not call the run.cmd script, as process.destroy() + // fails to kill a process tree on Windows + Seq(runner) ++ buildJavaOpts() ++ Seq(command.mainClass) ++ + command.arguments.map(substituteVariables) + } + + /* + * Attention: this must always be aligned with the environment variables in the run scripts and the + * way the JAVA_OPTS are assembled there. + */ + def buildJavaOpts(): Seq[String] = { + val _javaLibPath = if (getEnvOrEmpty("SPARK_LIBRARY_PATH") == "") { + "" + } else { + "-Djava.library.path=" + getEnvOrEmpty("SPARK_LIBRARY_PATH") + } + + Seq("-cp", + getEnvOrEmpty("CLASSPATH"), + // SPARK_JAVA_OPTS is overwritten with SPARK_DAEMON_JAVA_OPTS for running the worker + getEnvOrEmpty("SPARK_NONDAEMON_JAVA_OPTS"), + _javaLibPath, + "-Xms" + memory.toString + "M", + "-Xmx" + memory.toString + "M") + .filter(_ != "") + } + + def getEnvOrEmpty(key: String): String = { + val result = System.getenv(key) + if (result == null) { + "" + } else { + result + } } /** Spawn a thread that will redirect a given stream to a file */ @@ -113,7 +149,6 @@ private[spark] class ExecutorRunner( for ((key, value) <- appDesc.command.environment) { env.put(key, value) } - env.put("SPARK_MEM", memory.toString + "m") // In case we are running this from within the Spark Shell, avoid creating a "scala" // parent process for the executor command env.put("SPARK_LAUNCH_WITH_SCALA", "0") @@ -22,6 +22,8 @@ fi # values for that; it doesn't need a lot if [ "$1" = "spark.deploy.master.Master" -o "$1" = "spark.deploy.worker.Worker" ]; then SPARK_MEM=${SPARK_DAEMON_MEMORY:-512m} + # Backup current SPARK_JAVA_OPTS for use in ExecutorRunner.scala + SPARK_NONDAEMON_JAVA_OPTS=$SPARK_JAVA_OPTS SPARK_JAVA_OPTS=$SPARK_DAEMON_JAVA_OPTS # Empty by default fi @@ -70,6 +72,7 @@ if [ -e $FWDIR/conf/java-opts ] ; then JAVA_OPTS+=" `cat $FWDIR/conf/java-opts`" fi export JAVA_OPTS +# Attention: when changing the way the JAVA_OPTS are assembled, the change must be reflected in ExecutorRunner.scala! CORE_DIR="$FWDIR/core" REPL_DIR="$FWDIR/repl" @@ -22,6 +22,8 @@ if "%1"=="spark.deploy.master.Master" set RUNNING_DAEMON=1 if "%1"=="spark.deploy.worker.Worker" set RUNNING_DAEMON=1 if "x%SPARK_DAEMON_MEMORY%" == "x" set SPARK_DAEMON_MEMORY=512m if "%RUNNING_DAEMON%"=="1" set SPARK_MEM=%SPARK_DAEMON_MEMORY% +rem Backup current SPARK_JAVA_OPTS for use in ExecutorRunner.scala +if "%RUNNING_DAEMON%"=="1" set SPARK_NONDAEMON_JAVA_OPTS=%SPARK_JAVA_OPTS% if "%RUNNING_DAEMON%"=="1" set SPARK_JAVA_OPTS=%SPARK_DAEMON_JAVA_OPTS% rem Check that SCALA_HOME has been specified @@ -42,6 +44,7 @@ rem Set JAVA_OPTS to be able to load native libraries and to set heap size set JAVA_OPTS=%SPARK_JAVA_OPTS% -Djava.library.path=%SPARK_LIBRARY_PATH% -Xms%SPARK_MEM% -Xmx%SPARK_MEM% rem Load extra JAVA_OPTS from conf/java-opts, if it exists if exist "%FWDIR%conf\java-opts.cmd" call "%FWDIR%conf\java-opts.cmd" +rem Attention: when changing the way the JAVA_OPTS are assembled, the change must be reflected in ExecutorRunner.scala! set CORE_DIR=%FWDIR%core set REPL_DIR=%FWDIR%repl |