diff options
author | Michael Armbrust <michael@databricks.com> | 2014-05-16 20:25:10 -0700 |
---|---|---|
committer | Patrick Wendell <pwendell@gmail.com> | 2014-05-16 20:25:10 -0700 |
commit | a80a6a139e729ee3f81ec4f0028e084d2d9f7e82 (patch) | |
tree | 23c395a5904260c543a53118e9c80011a2c255c2 /core/src | |
parent | fed6303f29250bd5e656dbdd731b38938c933a61 (diff) | |
download | spark-a80a6a139e729ee3f81ec4f0028e084d2d9f7e82.tar.gz spark-a80a6a139e729ee3f81ec4f0028e084d2d9f7e82.tar.bz2 spark-a80a6a139e729ee3f81ec4f0028e084d2d9f7e82.zip |
SPARK-1864 Look in spark conf instead of system properties when propagating configuration to executors.
Author: Michael Armbrust <michael@databricks.com>
Closes #808 from marmbrus/confClasspath and squashes the following commits:
4c31d57 [Michael Armbrust] Look in spark conf instead of system properties when propagating configuration to executors.
Diffstat (limited to 'core/src')
-rw-r--r-- | core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala | 9 |
1 files changed, 5 insertions, 4 deletions
diff --git a/core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala index 933f6e0571..9768670855 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala @@ -46,12 +46,13 @@ private[spark] class SparkDeploySchedulerBackend( CoarseGrainedSchedulerBackend.ACTOR_NAME) val args = Seq(driverUrl, "{{EXECUTOR_ID}}", "{{HOSTNAME}}", "{{CORES}}", "{{WORKER_URL}}") val extraJavaOpts = sc.conf.getOption("spark.executor.extraJavaOptions") - val classPathEntries = sys.props.get("spark.executor.extraClassPath").toSeq.flatMap { cp => - cp.split(java.io.File.pathSeparator) - } - val libraryPathEntries = sys.props.get("spark.executor.extraLibraryPath").toSeq.flatMap { cp => + val classPathEntries = sc.conf.getOption("spark.executor.extraClassPath").toSeq.flatMap { cp => cp.split(java.io.File.pathSeparator) } + val libraryPathEntries = + sc.conf.getOption("spark.executor.extraLibraryPath").toSeq.flatMap { cp => + cp.split(java.io.File.pathSeparator) + } val command = Command( "org.apache.spark.executor.CoarseGrainedExecutorBackend", args, sc.executorEnvs, |