aboutsummaryrefslogtreecommitdiff
path: root/core/src
diff options
context:
space:
mode:
authorYong Tang <yong.tang.github@outlook.com>2017-03-10 13:33:58 -0800
committerMarcelo Vanzin <vanzin@cloudera.com>2017-03-10 13:34:01 -0800
commit8f0490e22b4c7f1fdf381c70c5894d46b7f7e6fb (patch)
tree69fd6b9c92caa17b58e05b847dff8a26ae7052f2 /core/src
parentdd9049e0492cc70b629518fee9b3d1632374c612 (diff)
downloadspark-8f0490e22b4c7f1fdf381c70c5894d46b7f7e6fb.tar.gz
spark-8f0490e22b4c7f1fdf381c70c5894d46b7f7e6fb.tar.bz2
spark-8f0490e22b4c7f1fdf381c70c5894d46b7f7e6fb.zip
[SPARK-17979][SPARK-14453] Remove deprecated SPARK_YARN_USER_ENV and SPARK_JAVA_OPTS
This fix removes deprecated support for config `SPARK_YARN_USER_ENV`, as is mentioned in SPARK-17979. This fix also removes deprecated support for the following: ``` SPARK_YARN_USER_ENV SPARK_JAVA_OPTS SPARK_CLASSPATH SPARK_WORKER_INSTANCES ``` Related JIRA: [SPARK-14453]: https://issues.apache.org/jira/browse/SPARK-14453 [SPARK-12344]: https://issues.apache.org/jira/browse/SPARK-12344 [SPARK-15781]: https://issues.apache.org/jira/browse/SPARK-15781 Existing tests should pass. Author: Yong Tang <yong.tang.github@outlook.com> Closes #17212 from yongtang/SPARK-17979.
Diffstat (limited to 'core/src')
-rw-r--r--core/src/main/scala/org/apache/spark/SparkConf.scala65
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/FaultToleranceTest.scala3
-rw-r--r--core/src/main/scala/org/apache/spark/launcher/WorkerCommandBuilder.scala1
3 files changed, 1 insertions, 68 deletions
diff --git a/core/src/main/scala/org/apache/spark/SparkConf.scala b/core/src/main/scala/org/apache/spark/SparkConf.scala
index fe912e639b..2a2ce0504d 100644
--- a/core/src/main/scala/org/apache/spark/SparkConf.scala
+++ b/core/src/main/scala/org/apache/spark/SparkConf.scala
@@ -518,71 +518,6 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging with Seria
}
}
- // Check for legacy configs
- sys.env.get("SPARK_JAVA_OPTS").foreach { value =>
- val warning =
- s"""
- |SPARK_JAVA_OPTS was detected (set to '$value').
- |This is deprecated in Spark 1.0+.
- |
- |Please instead use:
- | - ./spark-submit with conf/spark-defaults.conf to set defaults for an application
- | - ./spark-submit with --driver-java-options to set -X options for a driver
- | - spark.executor.extraJavaOptions to set -X options for executors
- | - SPARK_DAEMON_JAVA_OPTS to set java options for standalone daemons (master or worker)
- """.stripMargin
- logWarning(warning)
-
- for (key <- Seq(executorOptsKey, driverOptsKey)) {
- if (getOption(key).isDefined) {
- throw new SparkException(s"Found both $key and SPARK_JAVA_OPTS. Use only the former.")
- } else {
- logWarning(s"Setting '$key' to '$value' as a work-around.")
- set(key, value)
- }
- }
- }
-
- sys.env.get("SPARK_CLASSPATH").foreach { value =>
- val warning =
- s"""
- |SPARK_CLASSPATH was detected (set to '$value').
- |This is deprecated in Spark 1.0+.
- |
- |Please instead use:
- | - ./spark-submit with --driver-class-path to augment the driver classpath
- | - spark.executor.extraClassPath to augment the executor classpath
- """.stripMargin
- logWarning(warning)
-
- for (key <- Seq(executorClasspathKey, driverClassPathKey)) {
- if (getOption(key).isDefined) {
- throw new SparkException(s"Found both $key and SPARK_CLASSPATH. Use only the former.")
- } else {
- logWarning(s"Setting '$key' to '$value' as a work-around.")
- set(key, value)
- }
- }
- }
-
- if (!contains(sparkExecutorInstances)) {
- sys.env.get("SPARK_WORKER_INSTANCES").foreach { value =>
- val warning =
- s"""
- |SPARK_WORKER_INSTANCES was detected (set to '$value').
- |This is deprecated in Spark 1.0+.
- |
- |Please instead use:
- | - ./spark-submit with --num-executors to specify the number of executors
- | - Or set SPARK_EXECUTOR_INSTANCES
- | - spark.executor.instances to configure the number of instances in the spark config.
- """.stripMargin
- logWarning(warning)
-
- set("spark.executor.instances", value)
- }
- }
-
if (contains("spark.master") && get("spark.master").startsWith("yarn-")) {
val warning = s"spark.master ${get("spark.master")} is deprecated in Spark 2.0+, please " +
"instead use \"yarn\" with specified deploy mode."
diff --git a/core/src/main/scala/org/apache/spark/deploy/FaultToleranceTest.scala b/core/src/main/scala/org/apache/spark/deploy/FaultToleranceTest.scala
index 320af5cf97..c6307da61c 100644
--- a/core/src/main/scala/org/apache/spark/deploy/FaultToleranceTest.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/FaultToleranceTest.scala
@@ -43,8 +43,7 @@ import org.apache.spark.util.{ThreadUtils, Utils}
* Execute using
* ./bin/spark-class org.apache.spark.deploy.FaultToleranceTest
*
- * Make sure that the environment includes the following properties in SPARK_DAEMON_JAVA_OPTS
- * *and* SPARK_JAVA_OPTS:
+ * Make sure that the environment includes the following properties in SPARK_DAEMON_JAVA_OPTS:
* - spark.deploy.recoveryMode=ZOOKEEPER
* - spark.deploy.zookeeper.url=172.17.42.1:2181
* Note that 172.17.42.1 is the default docker ip for the host and 2181 is the default ZK port.
diff --git a/core/src/main/scala/org/apache/spark/launcher/WorkerCommandBuilder.scala b/core/src/main/scala/org/apache/spark/launcher/WorkerCommandBuilder.scala
index 3fd812e9fc..4216b26273 100644
--- a/core/src/main/scala/org/apache/spark/launcher/WorkerCommandBuilder.scala
+++ b/core/src/main/scala/org/apache/spark/launcher/WorkerCommandBuilder.scala
@@ -39,7 +39,6 @@ private[spark] class WorkerCommandBuilder(sparkHome: String, memoryMb: Int, comm
val cmd = buildJavaCommand(command.classPathEntries.mkString(File.pathSeparator))
cmd.add(s"-Xmx${memoryMb}M")
command.javaOpts.foreach(cmd.add)
- addOptionString(cmd, getenv("SPARK_JAVA_OPTS"))
cmd
}