aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorTimothy Chen <tnachen@gmail.com>2015-12-16 10:54:15 -0800
committerAndrew Or <andrew@databricks.com>2015-12-16 10:54:15 -0800
commitad8c1f0b840284d05da737fb2cc5ebf8848f4490 (patch)
treee4d4647927e77ec94a2b19f4164e8f746a9f7117
parent26d70bd2b42617ff731b6e9e6d77933b38597ebe (diff)
downloadspark-ad8c1f0b840284d05da737fb2cc5ebf8848f4490.tar.gz
spark-ad8c1f0b840284d05da737fb2cc5ebf8848f4490.tar.bz2
spark-ad8c1f0b840284d05da737fb2cc5ebf8848f4490.zip
[SPARK-12345][MESOS] Filter SPARK_HOME when submitting Spark jobs with Mesos cluster mode.
SPARK_HOME is now causing problem with Mesos cluster mode since spark-submit script has been changed recently to take precendence when running spark-class scripts to look in SPARK_HOME if it's defined. We should skip passing SPARK_HOME from the Spark client in cluster mode with Mesos, since Mesos shouldn't use this configuration but should use spark.executor.home instead. Author: Timothy Chen <tnachen@gmail.com> Closes #10332 from tnachen/scheduler_ui.
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/rest/mesos/MesosRestServer.scala7
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerUtils.scala2
2 files changed, 7 insertions, 2 deletions
diff --git a/core/src/main/scala/org/apache/spark/deploy/rest/mesos/MesosRestServer.scala b/core/src/main/scala/org/apache/spark/deploy/rest/mesos/MesosRestServer.scala
index 868cc35d06..24510db2bd 100644
--- a/core/src/main/scala/org/apache/spark/deploy/rest/mesos/MesosRestServer.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/rest/mesos/MesosRestServer.scala
@@ -94,7 +94,12 @@ private[mesos] class MesosSubmitRequestServlet(
val driverMemory = sparkProperties.get("spark.driver.memory")
val driverCores = sparkProperties.get("spark.driver.cores")
val appArgs = request.appArgs
- val environmentVariables = request.environmentVariables
+ // We don't want to pass down SPARK_HOME when launching Spark apps
+ // with Mesos cluster mode since it's populated by default on the client and it will
+ // cause spark-submit script to look for files in SPARK_HOME instead.
+ // We only need the ability to specify where to find spark-submit script
+ // which user can user spark.executor.home or spark.home configurations.
+ val environmentVariables = request.environmentVariables.filter(!_.equals("SPARK_HOME"))
val name = request.sparkProperties.get("spark.app.name").getOrElse(mainClass)
// Construct driver description
diff --git a/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerUtils.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerUtils.scala
index 721861fbbc..573355ba58 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerUtils.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerUtils.scala
@@ -34,7 +34,7 @@ import org.apache.spark.util.Utils
/**
* Shared trait for implementing a Mesos Scheduler. This holds common state and helper
- * methods and Mesos scheduler will use.
+ * methods the Mesos scheduler will use.
*/
private[mesos] trait MesosSchedulerUtils extends Logging {
// Lock used to wait for scheduler to be registered