aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorAndrew Or <andrewor14@gmail.com>2014-08-27 14:46:56 -0700
committerPatrick Wendell <pwendell@gmail.com>2014-08-27 14:46:56 -0700
commit63a053ab140d7bf605e8c5b7fb5a7bd52aca29b2 (patch)
treea93d686e47d1c7f1c9eb323960fa6d4e80043c73 /core
parent7faf755ae4f0cf510048e432340260a6e609066d (diff)
downloadspark-63a053ab140d7bf605e8c5b7fb5a7bd52aca29b2.tar.gz
spark-63a053ab140d7bf605e8c5b7fb5a7bd52aca29b2.tar.bz2
spark-63a053ab140d7bf605e8c5b7fb5a7bd52aca29b2.zip
[SPARK-3243] Don't use stale spark-driver.* system properties
If we set both `spark.driver.extraClassPath` and `--driver-class-path`, then the latter correctly overrides the former. However, the value of the system property `spark.driver.extraClassPath` still uses the former, which is actually not added to the class path. This may cause some confusion... Of course, this also affects other options (i.e. java options, library path, memory...). Author: Andrew Or <andrewor14@gmail.com> Closes #2154 from andrewor14/driver-submit-configs-fix and squashes the following commits: 17ec6fc [Andrew Or] Fix tests 0140836 [Andrew Or] Don't forget spark.driver.memory e39d20f [Andrew Or] Also set spark.driver.extra* configs in client mode
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala19
1 files changed, 9 insertions, 10 deletions
diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
index 550ee72538..0fdb5ae3c2 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
@@ -173,6 +173,14 @@ object SparkSubmit {
OptionAssigner(args.master, ALL_CLUSTER_MGRS, ALL_DEPLOY_MODES, sysProp = "spark.master"),
OptionAssigner(args.name, ALL_CLUSTER_MGRS, ALL_DEPLOY_MODES, sysProp = "spark.app.name"),
OptionAssigner(args.jars, ALL_CLUSTER_MGRS, ALL_DEPLOY_MODES, sysProp = "spark.jars"),
+ OptionAssigner(args.driverMemory, ALL_CLUSTER_MGRS, CLIENT,
+ sysProp = "spark.driver.memory"),
+ OptionAssigner(args.driverExtraClassPath, ALL_CLUSTER_MGRS, ALL_DEPLOY_MODES,
+ sysProp = "spark.driver.extraClassPath"),
+ OptionAssigner(args.driverExtraJavaOptions, ALL_CLUSTER_MGRS, ALL_DEPLOY_MODES,
+ sysProp = "spark.driver.extraJavaOptions"),
+ OptionAssigner(args.driverExtraLibraryPath, ALL_CLUSTER_MGRS, ALL_DEPLOY_MODES,
+ sysProp = "spark.driver.extraLibraryPath"),
// Standalone cluster only
OptionAssigner(args.driverMemory, STANDALONE, CLUSTER, clOption = "--memory"),
@@ -202,16 +210,7 @@ object SparkSubmit {
OptionAssigner(args.totalExecutorCores, STANDALONE | MESOS, ALL_DEPLOY_MODES,
sysProp = "spark.cores.max"),
OptionAssigner(args.files, LOCAL | STANDALONE | MESOS, ALL_DEPLOY_MODES,
- sysProp = "spark.files"),
-
- // Only process driver specific options for cluster mode here,
- // because they have already been processed in bash for client mode
- OptionAssigner(args.driverExtraClassPath, STANDALONE | YARN, CLUSTER,
- sysProp = "spark.driver.extraClassPath"),
- OptionAssigner(args.driverExtraJavaOptions, STANDALONE | YARN, CLUSTER,
- sysProp = "spark.driver.extraJavaOptions"),
- OptionAssigner(args.driverExtraLibraryPath, STANDALONE | YARN, CLUSTER,
- sysProp = "spark.driver.extraLibraryPath")
+ sysProp = "spark.files")
)
// In client mode, launch the application main class directly