aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala5
-rw-r--r--yarn/src/main/scala/org/apache/spark/scheduler/cluster/YarnClientSchedulerBackend.scala17
2 files changed, 20 insertions, 2 deletions
diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
index 607b4df73d..f174bc1af5 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
@@ -214,7 +214,10 @@ private[spark] class SparkSubmitArguments(args: Seq[String], env: Map[String, St
""".stripMargin
}
- /** Fill in values by parsing user options. */
+ /**
+ * Fill in values by parsing user options.
+ * NOTE: Any changes here must be reflected in YarnClientSchedulerBackend.
+ */
private def parseOpts(opts: Seq[String]): Unit = {
val EQ_SEPARATED_OPT="""(--[^=]+)=(.+)""".r
diff --git a/yarn/src/main/scala/org/apache/spark/scheduler/cluster/YarnClientSchedulerBackend.scala b/yarn/src/main/scala/org/apache/spark/scheduler/cluster/YarnClientSchedulerBackend.scala
index 2923e6729c..09597bd0e6 100644
--- a/yarn/src/main/scala/org/apache/spark/scheduler/cluster/YarnClientSchedulerBackend.scala
+++ b/yarn/src/main/scala/org/apache/spark/scheduler/cluster/YarnClientSchedulerBackend.scala
@@ -65,7 +65,8 @@ private[spark] class YarnClientSchedulerBackend(
*/
private def getExtraClientArguments: Seq[String] = {
val extraArgs = new ArrayBuffer[String]
- val optionTuples = // List of (target Client argument, environment variable, Spark property)
+ // List of (target Client argument, environment variable, Spark property)
+ val optionTuples =
List(
("--driver-memory", "SPARK_MASTER_MEMORY", "spark.master.memory"),
("--driver-memory", "SPARK_DRIVER_MEMORY", "spark.driver.memory"),
@@ -78,11 +79,25 @@ private[spark] class YarnClientSchedulerBackend(
("--queue", "SPARK_YARN_QUEUE", "spark.yarn.queue"),
("--name", "SPARK_YARN_APP_NAME", "spark.app.name")
)
+ // Warn against the following deprecated environment variables: env var -> suggestion
+ val deprecatedEnvVars = Map(
+ "SPARK_MASTER_MEMORY" -> "SPARK_DRIVER_MEMORY or --driver-memory through spark-submit",
+ "SPARK_WORKER_INSTANCES" -> "SPARK_WORKER_INSTANCES or --num-executors through spark-submit",
+ "SPARK_WORKER_MEMORY" -> "SPARK_EXECUTOR_MEMORY or --executor-memory through spark-submit",
+ "SPARK_WORKER_CORES" -> "SPARK_EXECUTOR_CORES or --executor-cores through spark-submit")
+ // Do the same for deprecated properties: property -> suggestion
+ val deprecatedProps = Map("spark.master.memory" -> "--driver-memory through spark-submit")
optionTuples.foreach { case (optionName, envVar, sparkProp) =>
if (System.getenv(envVar) != null) {
extraArgs += (optionName, System.getenv(envVar))
+ if (deprecatedEnvVars.contains(envVar)) {
+ logWarning(s"NOTE: $envVar is deprecated. Use ${deprecatedEnvVars(envVar)} instead.")
+ }
} else if (sc.getConf.contains(sparkProp)) {
extraArgs += (optionName, sc.getConf.get(sparkProp))
+ if (deprecatedProps.contains(sparkProp)) {
+ logWarning(s"NOTE: $sparkProp is deprecated. Use ${deprecatedProps(sparkProp)} instead.")
+ }
}
}
extraArgs