aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorCheolsoo Park <cheolsoop@netflix.com>2015-04-23 20:10:55 -0400
committerSean Owen <sowen@cloudera.com>2015-04-23 20:10:55 -0400
commit336f7f5373e5f6960ecd9967d3703c8507e329ec (patch)
tree6a663eee5053cea47707bfcdfccb5681eaf19bc6 /core
parent73db132bf503341c7a5cf9409351c282a8464175 (diff)
downloadspark-336f7f5373e5f6960ecd9967d3703c8507e329ec.tar.gz
spark-336f7f5373e5f6960ecd9967d3703c8507e329ec.tar.bz2
spark-336f7f5373e5f6960ecd9967d3703c8507e329ec.zip
[SPARK-7037] [CORE] Inconsistent behavior for non-spark config properties in spark-shell and spark-submit
When specifying non-spark properties (i.e. names don't start with spark.) in the command line and config file, spark-submit and spark-shell behave differently, causing confusion to users. Here is the summary- * spark-submit * --conf k=v => silently ignored * spark-defaults.conf => applied * spark-shell * --conf k=v => show a warning message and ignored * spark-defaults.conf => show a warning message and ignored I assume that ignoring non-spark properties is intentional. If so, it should always be ignored with a warning message in all cases. Author: Cheolsoo Park <cheolsoop@netflix.com> Closes #5617 from piaozhexiu/SPARK-7037 and squashes the following commits: 8957950 [Cheolsoo Park] Add IgnoreNonSparkProperties method fedd01c [Cheolsoo Park] Ignore non-spark properties with a warning message in all cases
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala22
1 files changed, 16 insertions, 6 deletions
diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
index faa8780288..c896842943 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
@@ -77,12 +77,8 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
if (verbose) SparkSubmit.printStream.println(s"Using properties file: $propertiesFile")
Option(propertiesFile).foreach { filename =>
Utils.getPropertiesFromFile(filename).foreach { case (k, v) =>
- if (k.startsWith("spark.")) {
- defaultProperties(k) = v
- if (verbose) SparkSubmit.printStream.println(s"Adding default property: $k=$v")
- } else {
- SparkSubmit.printWarning(s"Ignoring non-spark config property: $k=$v")
- }
+ defaultProperties(k) = v
+ if (verbose) SparkSubmit.printStream.println(s"Adding default property: $k=$v")
}
}
defaultProperties
@@ -97,6 +93,8 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
}
// Populate `sparkProperties` map from properties file
mergeDefaultSparkProperties()
+ // Remove keys that don't start with "spark." from `sparkProperties`.
+ ignoreNonSparkProperties()
// Use `sparkProperties` map along with env vars to fill in any missing parameters
loadEnvironmentArguments()
@@ -118,6 +116,18 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
}
/**
+ * Remove keys that don't start with "spark." from `sparkProperties`.
+ */
+ private def ignoreNonSparkProperties(): Unit = {
+ sparkProperties.foreach { case (k, v) =>
+ if (!k.startsWith("spark.")) {
+ sparkProperties -= k
+ SparkSubmit.printWarning(s"Ignoring non-spark config property: $k=$v")
+ }
+ }
+ }
+
+ /**
* Load arguments from environment variables, Spark properties etc.
*/
private def loadEnvironmentArguments(): Unit = {