aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorPatrick Wendell <pwendell@gmail.com>2014-05-16 22:58:47 -0700
committerPatrick Wendell <pwendell@gmail.com>2014-05-16 22:58:47 -0700
commit442808a7482b81c8de887c901b424683da62022e (patch)
treeb4d8322fbe48be562ba27500a55370c3e3bbda5a
parentcf6cbe9f76c3b322a968c836d039fc5b70d4ce43 (diff)
downloadspark-442808a7482b81c8de887c901b424683da62022e.tar.gz
spark-442808a7482b81c8de887c901b424683da62022e.tar.bz2
spark-442808a7482b81c8de887c901b424683da62022e.zip
Make deprecation warning less severe
Just a small change. I think it's good not to scare people who are using the old options. Author: Patrick Wendell <pwendell@gmail.com> Closes #810 from pwendell/warnings and squashes the following commits: cb8a311 [Patrick Wendell] Make deprecation warning less severe
-rw-r--r--core/src/main/scala/org/apache/spark/SparkConf.scala12
1 files changed, 6 insertions, 6 deletions
diff --git a/core/src/main/scala/org/apache/spark/SparkConf.scala b/core/src/main/scala/org/apache/spark/SparkConf.scala
index 800616622d..8ce4b91cae 100644
--- a/core/src/main/scala/org/apache/spark/SparkConf.scala
+++ b/core/src/main/scala/org/apache/spark/SparkConf.scala
@@ -238,10 +238,10 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
// Check for legacy configs
sys.env.get("SPARK_JAVA_OPTS").foreach { value =>
- val error =
+ val warning =
s"""
|SPARK_JAVA_OPTS was detected (set to '$value').
- |This has undefined behavior when running on a cluster and is deprecated in Spark 1.0+.
+ |This is deprecated in Spark 1.0+.
|
|Please instead use:
| - ./spark-submit with conf/spark-defaults.conf to set defaults for an application
@@ -249,7 +249,7 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
| - spark.executor.extraJavaOptions to set -X options for executors
| - SPARK_DAEMON_JAVA_OPTS to set java options for standalone daemons (master or worker)
""".stripMargin
- logError(error)
+ logWarning(warning)
for (key <- Seq(executorOptsKey, driverOptsKey)) {
if (getOption(key).isDefined) {
@@ -262,16 +262,16 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
}
sys.env.get("SPARK_CLASSPATH").foreach { value =>
- val error =
+ val warning =
s"""
|SPARK_CLASSPATH was detected (set to '$value').
- | This has undefined behavior when running on a cluster and is deprecated in Spark 1.0+.
+ |This is deprecated in Spark 1.0+.
|
|Please instead use:
| - ./spark-submit with --driver-class-path to augment the driver classpath
| - spark.executor.extraClassPath to augment the executor classpath
""".stripMargin
- logError(error)
+ logWarning(warning)
for (key <- Seq(executorClasspathKey, driverClassPathKey)) {
if (getOption(key).isDefined) {