aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala22
1 files changed, 16 insertions, 6 deletions
diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
index faa8780288..c896842943 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
@@ -77,12 +77,8 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
if (verbose) SparkSubmit.printStream.println(s"Using properties file: $propertiesFile")
Option(propertiesFile).foreach { filename =>
Utils.getPropertiesFromFile(filename).foreach { case (k, v) =>
- if (k.startsWith("spark.")) {
- defaultProperties(k) = v
- if (verbose) SparkSubmit.printStream.println(s"Adding default property: $k=$v")
- } else {
- SparkSubmit.printWarning(s"Ignoring non-spark config property: $k=$v")
- }
+ defaultProperties(k) = v
+ if (verbose) SparkSubmit.printStream.println(s"Adding default property: $k=$v")
}
}
defaultProperties
@@ -97,6 +93,8 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
}
// Populate `sparkProperties` map from properties file
mergeDefaultSparkProperties()
+ // Remove keys that don't start with "spark." from `sparkProperties`.
+ ignoreNonSparkProperties()
// Use `sparkProperties` map along with env vars to fill in any missing parameters
loadEnvironmentArguments()
@@ -118,6 +116,18 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
}
/**
+ * Remove keys that don't start with "spark." from `sparkProperties`.
+ */
+ private def ignoreNonSparkProperties(): Unit = {
+ sparkProperties.foreach { case (k, v) =>
+ if (!k.startsWith("spark.")) {
+ sparkProperties -= k
+ SparkSubmit.printWarning(s"Ignoring non-spark config property: $k=$v")
+ }
+ }
+ }
+
+ /**
* Load arguments from environment variables, Spark properties etc.
*/
private def loadEnvironmentArguments(): Unit = {