aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorJeff Zhang <zjffdu@apache.org>2016-05-02 11:03:37 -0700
committerMarcelo Vanzin <vanzin@cloudera.com>2016-05-02 11:03:37 -0700
commit0a3026990bd0cbad53f0001da793349201104958 (patch)
tree69218a1601e0b1e3b3e52cad9ef4e79ddd27132a /core
parent0513c3ac93e0a25d6eedbafe6c0561e71c92880a (diff)
downloadspark-0a3026990bd0cbad53f0001da793349201104958.tar.gz
spark-0a3026990bd0cbad53f0001da793349201104958.tar.bz2
spark-0a3026990bd0cbad53f0001da793349201104958.zip
[SPARK-14845][SPARK_SUBMIT][YARN] spark.files in properties file is n…
## What changes were proposed in this pull request? initialize SparkSubmitArgument#files first from spark-submit arguments then from properties file, so that sys property spark.yarn.dist.files will be set correctly. ``` OptionAssigner(args.files, YARN, ALL_DEPLOY_MODES, sysProp = "spark.yarn.dist.files"), ``` ## How was this patch tested? manul test. file defined in properties file is also distributed to driver in yarn-cluster mode. Author: Jeff Zhang <zjffdu@apache.org> Closes #12656 from zjffdu/SPARK-14845.
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala1
1 files changed, 1 insertions, 0 deletions
diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
index 78da1b70c5..206c130c76 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
@@ -173,6 +173,7 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
.orNull
name = Option(name).orElse(sparkProperties.get("spark.app.name")).orNull
jars = Option(jars).orElse(sparkProperties.get("spark.jars")).orNull
+ files = Option(files).orElse(sparkProperties.get("spark.files")).orNull
ivyRepoPath = sparkProperties.get("spark.jars.ivy").orNull
packages = Option(packages).orElse(sparkProperties.get("spark.jars.packages")).orNull
packagesExclusions = Option(packagesExclusions)