aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAndrew Or <andrew@databricks.com>2015-03-02 16:36:42 -0800
committerPatrick Wendell <patrick@databricks.com>2015-03-02 16:36:42 -0800
commit258d154c9f1afdd52dce19f03d81683ee34effac (patch)
treec5a94b159950d22902d90d0c5adcc1a014ad73bf
parent6776cb33ea691f7843b956b3e80979282967e826 (diff)
downloadspark-258d154c9f1afdd52dce19f03d81683ee34effac.tar.gz
spark-258d154c9f1afdd52dce19f03d81683ee34effac.tar.bz2
spark-258d154c9f1afdd52dce19f03d81683ee34effac.zip
[SPARK-6048] SparkConf should not translate deprecated configs on set
There are multiple issues with translating on set outlined in the JIRA. This PR reverts the translation logic added to `SparkConf`. In the future, after the 1.3.0 release we will figure out a way to reorganize the internal structure more elegantly. For now, let's preserve the existing semantics of `SparkConf` since it's a public interface. Unfortunately this means duplicating some code for now, but this is all internal and we can always clean it up later. Author: Andrew Or <andrew@databricks.com> Closes #4799 from andrewor14/conf-set-translate and squashes the following commits: 11c525b [Andrew Or] Move warning to driver 10e77b5 [Andrew Or] Add documentation for deprecation precedence a369cb1 [Andrew Or] Merge branch 'master' of github.com:apache/spark into conf-set-translate c26a9e3 [Andrew Or] Revert all translate logic in SparkConf fef6c9c [Andrew Or] Restore deprecation logic for spark.executor.userClassPathFirst 94b4dfa [Andrew Or] Translate on get, not set
-rw-r--r--core/src/main/scala/org/apache/spark/SparkConf.scala15
-rw-r--r--core/src/main/scala/org/apache/spark/executor/Executor.scala13
-rw-r--r--core/src/test/scala/org/apache/spark/SparkConfSuite.scala12
-rw-r--r--docs/configuration.md4
-rw-r--r--yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala3
5 files changed, 25 insertions, 22 deletions
diff --git a/core/src/main/scala/org/apache/spark/SparkConf.scala b/core/src/main/scala/org/apache/spark/SparkConf.scala
index 61b34d524a..2ca19f53d2 100644
--- a/core/src/main/scala/org/apache/spark/SparkConf.scala
+++ b/core/src/main/scala/org/apache/spark/SparkConf.scala
@@ -68,7 +68,7 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
if (value == null) {
throw new NullPointerException("null value for " + key)
}
- settings.put(translateConfKey(key, warn = true), value)
+ settings.put(key, value)
this
}
@@ -140,7 +140,7 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
/** Set a parameter if it isn't already configured */
def setIfMissing(key: String, value: String): SparkConf = {
- settings.putIfAbsent(translateConfKey(key, warn = true), value)
+ settings.putIfAbsent(key, value)
this
}
@@ -176,7 +176,7 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
/** Get a parameter as an Option */
def getOption(key: String): Option[String] = {
- Option(settings.get(translateConfKey(key)))
+ Option(settings.get(key))
}
/** Get all parameters as a list of pairs */
@@ -229,7 +229,7 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
def getAppId: String = get("spark.app.id")
/** Does the configuration contain a given parameter? */
- def contains(key: String): Boolean = settings.containsKey(translateConfKey(key))
+ def contains(key: String): Boolean = settings.containsKey(key)
/** Copy this object */
override def clone: SparkConf = {
@@ -343,6 +343,13 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
}
}
}
+
+ // Warn against the use of deprecated configs
+ deprecatedConfigs.values.foreach { dc =>
+ if (contains(dc.oldName)) {
+ dc.warn()
+ }
+ }
}
/**
diff --git a/core/src/main/scala/org/apache/spark/executor/Executor.scala b/core/src/main/scala/org/apache/spark/executor/Executor.scala
index b684fb7049..bed0a08d4d 100644
--- a/core/src/main/scala/org/apache/spark/executor/Executor.scala
+++ b/core/src/main/scala/org/apache/spark/executor/Executor.scala
@@ -92,6 +92,12 @@ private[spark] class Executor(
private val executorActor = env.actorSystem.actorOf(
Props(new ExecutorActor(executorId)), "ExecutorActor")
+ // Whether to load classes in user jars before those in Spark jars
+ private val userClassPathFirst: Boolean = {
+ conf.getBoolean("spark.executor.userClassPathFirst",
+ conf.getBoolean("spark.files.userClassPathFirst", false))
+ }
+
// Create our ClassLoader
// do this after SparkEnv creation so can access the SecurityManager
private val urlClassLoader = createClassLoader()
@@ -309,7 +315,7 @@ private[spark] class Executor(
val urls = userClassPath.toArray ++ currentJars.keySet.map { uri =>
new File(uri.split("/").last).toURI.toURL
}
- if (conf.getBoolean("spark.executor.userClassPathFirst", false)) {
+ if (userClassPathFirst) {
new ChildFirstURLClassLoader(urls, currentLoader)
} else {
new MutableURLClassLoader(urls, currentLoader)
@@ -324,14 +330,13 @@ private[spark] class Executor(
val classUri = conf.get("spark.repl.class.uri", null)
if (classUri != null) {
logInfo("Using REPL class URI: " + classUri)
- val userClassPathFirst: java.lang.Boolean =
- conf.getBoolean("spark.executor.userClassPathFirst", false)
try {
+ val _userClassPathFirst: java.lang.Boolean = userClassPathFirst
val klass = Class.forName("org.apache.spark.repl.ExecutorClassLoader")
.asInstanceOf[Class[_ <: ClassLoader]]
val constructor = klass.getConstructor(classOf[SparkConf], classOf[String],
classOf[ClassLoader], classOf[Boolean])
- constructor.newInstance(conf, classUri, parent, userClassPathFirst)
+ constructor.newInstance(conf, classUri, parent, _userClassPathFirst)
} catch {
case _: ClassNotFoundException =>
logError("Could not find org.apache.spark.repl.ExecutorClassLoader on classpath!")
diff --git a/core/src/test/scala/org/apache/spark/SparkConfSuite.scala b/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
index ea6b73bc68..e08210ae60 100644
--- a/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
@@ -197,18 +197,6 @@ class SparkConfSuite extends FunSuite with LocalSparkContext with ResetSystemPro
serializer.newInstance().serialize(new StringBuffer())
}
- test("deprecated config keys") {
- val conf = new SparkConf()
- .set("spark.files.userClassPathFirst", "true")
- .set("spark.yarn.user.classpath.first", "true")
- assert(conf.contains("spark.files.userClassPathFirst"))
- assert(conf.contains("spark.executor.userClassPathFirst"))
- assert(conf.contains("spark.yarn.user.classpath.first"))
- assert(conf.getBoolean("spark.files.userClassPathFirst", false))
- assert(conf.getBoolean("spark.executor.userClassPathFirst", false))
- assert(conf.getBoolean("spark.yarn.user.classpath.first", false))
- }
-
}
class Class1 {}
diff --git a/docs/configuration.md b/docs/configuration.md
index c11787b17e..ae90fe1f8f 100644
--- a/docs/configuration.md
+++ b/docs/configuration.md
@@ -70,7 +70,9 @@ each line consists of a key and a value separated by whitespace. For example:
Any values specified as flags or in the properties file will be passed on to the application
and merged with those specified through SparkConf. Properties set directly on the SparkConf
take highest precedence, then flags passed to `spark-submit` or `spark-shell`, then options
-in the `spark-defaults.conf` file.
+in the `spark-defaults.conf` file. A few configuration keys have been renamed since earlier
+versions of Spark; in such cases, the older key names are still accepted, but take lower
+precedence than any instance of the newer key.
## Viewing Spark Properties
diff --git a/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala b/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala
index 46d9df9348..61f8fc3f5a 100644
--- a/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala
+++ b/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala
@@ -955,7 +955,8 @@ object Client extends Logging {
if (isDriver) {
conf.getBoolean("spark.driver.userClassPathFirst", false)
} else {
- conf.getBoolean("spark.executor.userClassPathFirst", false)
+ conf.getBoolean("spark.executor.userClassPathFirst",
+ conf.getBoolean("spark.files.userClassPathFirst", false))
}
}