aboutsummaryrefslogtreecommitdiff
path: root/core/src/main
diff options
context:
space:
mode:
authorJosh Rosen <joshrosen@apache.org>2014-09-29 23:36:10 -0700
committerAndrew Or <andrewor14@gmail.com>2014-09-29 23:36:40 -0700
commit48be6576c33367741727521864ba1dbdca288d9d (patch)
tree20289458ae06ec8a6089dca08c17bbf065d8175c /core/src/main
parent85dd5139e1fcd23efcc987b2bb7590caa53ee6bf (diff)
downloadspark-48be6576c33367741727521864ba1dbdca288d9d.tar.gz
spark-48be6576c33367741727521864ba1dbdca288d9d.tar.bz2
spark-48be6576c33367741727521864ba1dbdca288d9d.zip
[SPARK-3734] DriverRunner should not read SPARK_HOME from submitter's environment
When using spark-submit in `cluster` mode to submit a job to a Spark Standalone cluster, if the JAVA_HOME environment variable was set on the submitting machine then DriverRunner would attempt to use the submitter's JAVA_HOME to launch the driver process (instead of the worker's JAVA_HOME), causing the driver to fail unless the submitter and worker had the same Java location. This commit fixes this by reading JAVA_HOME from sys.env instead of command.environment. Author: Josh Rosen <joshrosen@apache.org> Closes #2586 from JoshRosen/SPARK-3734 and squashes the following commits: e9513d9 [Josh Rosen] [SPARK-3734] DriverRunner should not read SPARK_HOME from submitter's environment. (cherry picked from commit b167a8c7e75d9e816784bd655bce1feb6c447210) Signed-off-by: Andrew Or <andrewor14@gmail.com>
Diffstat (limited to 'core/src/main')
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala5
1 files changed, 1 insertions, 4 deletions
diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala b/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala
index 687e492a0d..e4eadf0619 100644
--- a/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala
@@ -30,7 +30,7 @@ import org.apache.spark.util.Utils
private[spark]
object CommandUtils extends Logging {
def buildCommandSeq(command: Command, memory: Int, sparkHome: String): Seq[String] = {
- val runner = getEnv("JAVA_HOME", command).map(_ + "/bin/java").getOrElse("java")
+ val runner = sys.env.get("JAVA_HOME").map(_ + "/bin/java").getOrElse("java")
// SPARK-698: do not call the run.cmd script, as process.destroy()
// fails to kill a process tree on Windows
@@ -38,9 +38,6 @@ object CommandUtils extends Logging {
command.arguments
}
- private def getEnv(key: String, command: Command): Option[String] =
- command.environment.get(key).orElse(Option(System.getenv(key)))
-
/**
* Attention: this must always be aligned with the environment variables in the run scripts and
* the way the JAVA_OPTS are assembled there.