aboutsummaryrefslogtreecommitdiff
path: root/core/src
diff options
context:
space:
mode:
authorPatrick Wendell <pwendell@gmail.com>2014-01-18 16:17:34 -0800
committerPatrick Wendell <pwendell@gmail.com>2014-01-18 16:20:00 -0800
commitbf5699543bf69fc850dbc2676caac97fa27818da (patch)
tree6a67ad6a1977c164b0f22d4206bb2248e073df18 /core/src
parentaa981e4e97a11dbd5a4d012bfbdb395982968372 (diff)
downloadspark-bf5699543bf69fc850dbc2676caac97fa27818da.tar.gz
spark-bf5699543bf69fc850dbc2676caac97fa27818da.tar.bz2
spark-bf5699543bf69fc850dbc2676caac97fa27818da.zip
Merge pull request #462 from mateiz/conf-file-fix
Remove Typesafe Config usage and conf files to fix nested property names With Typesafe Config we had the subtle problem of no longer allowing nested property names, which are used for a few of our properties: http://apache-spark-developers-list.1001551.n3.nabble.com/Config-properties-broken-in-master-td208.html This PR is for branch 0.9 but should be added into master too. (cherry picked from commit 34e911ce9a9f91f3259189861779032069257852) Signed-off-by: Patrick Wendell <pwendell@gmail.com>
Diffstat (limited to 'core/src')
-rw-r--r--core/src/main/scala/org/apache/spark/SparkConf.scala18
-rw-r--r--core/src/test/resources/spark.conf8
-rw-r--r--core/src/test/scala/org/apache/spark/SparkConfSuite.scala47
3 files changed, 35 insertions, 38 deletions
diff --git a/core/src/main/scala/org/apache/spark/SparkConf.scala b/core/src/main/scala/org/apache/spark/SparkConf.scala
index 369c6ce78f..951bfd79d0 100644
--- a/core/src/main/scala/org/apache/spark/SparkConf.scala
+++ b/core/src/main/scala/org/apache/spark/SparkConf.scala
@@ -20,19 +20,17 @@ package org.apache.spark
import scala.collection.JavaConverters._
import scala.collection.mutable.HashMap
-import com.typesafe.config.ConfigFactory
import java.io.{ObjectInputStream, ObjectOutputStream, IOException}
/**
* Configuration for a Spark application. Used to set various Spark parameters as key-value pairs.
*
* Most of the time, you would create a SparkConf object with `new SparkConf()`, which will load
- * values from both the `spark.*` Java system properties and any `spark.conf` on your application's
- * classpath (if it has one). In this case, system properties take priority over `spark.conf`, and
- * any parameters you set directly on the `SparkConf` object take priority over both of those.
+ * values from any `spark.*` Java system properties set in your application as well. In this case,
+ * parameters you set directly on the `SparkConf` object take priority over system properties.
*
* For unit tests, you can also call `new SparkConf(false)` to skip loading external settings and
- * get the same configuration no matter what is on the classpath.
+ * get the same configuration no matter what the system properties are.
*
* All setter methods in this class support chaining. For example, you can write
* `new SparkConf().setMaster("local").setAppName("My app")`.
@@ -40,7 +38,7 @@ import java.io.{ObjectInputStream, ObjectOutputStream, IOException}
* Note that once a SparkConf object is passed to Spark, it is cloned and can no longer be modified
* by the user. Spark does not support modifying the configuration at runtime.
*
- * @param loadDefaults whether to load values from the system properties and classpath
+ * @param loadDefaults whether to also load values from Java system properties
*/
class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
@@ -50,11 +48,9 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
private val settings = new HashMap[String, String]()
if (loadDefaults) {
- ConfigFactory.invalidateCaches()
- val typesafeConfig = ConfigFactory.systemProperties()
- .withFallback(ConfigFactory.parseResources("spark.conf"))
- for (e <- typesafeConfig.entrySet().asScala if e.getKey.startsWith("spark.")) {
- settings(e.getKey) = e.getValue.unwrapped.toString
+ // Load any spark.* system properties
+ for ((k, v) <- System.getProperties.asScala if k.startsWith("spark.")) {
+ settings(k) = v
}
}
diff --git a/core/src/test/resources/spark.conf b/core/src/test/resources/spark.conf
deleted file mode 100644
index aa4e751235..0000000000
--- a/core/src/test/resources/spark.conf
+++ /dev/null
@@ -1,8 +0,0 @@
-# A simple spark.conf file used only in our unit tests
-
-spark.test.intTestProperty = 1
-
-spark.test {
- stringTestProperty = "hi"
- listTestProperty = ["a", "b"]
-}
diff --git a/core/src/test/scala/org/apache/spark/SparkConfSuite.scala b/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
index fa49974db4..87e9012622 100644
--- a/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
@@ -20,35 +20,23 @@ package org.apache.spark
import org.scalatest.FunSuite
class SparkConfSuite extends FunSuite with LocalSparkContext {
- // This test uses the spark.conf in core/src/test/resources, which has a few test properties
- test("loading from spark.conf") {
- val conf = new SparkConf()
- assert(conf.get("spark.test.intTestProperty") === "1")
- assert(conf.get("spark.test.stringTestProperty") === "hi")
- // NOTE: we don't use list properties yet, but when we do, we'll have to deal with this syntax
- assert(conf.get("spark.test.listTestProperty") === "[a, b]")
- }
-
- // This test uses the spark.conf in core/src/test/resources, which has a few test properties
- test("system properties override spark.conf") {
+ test("loading from system properties") {
try {
- System.setProperty("spark.test.intTestProperty", "2")
+ System.setProperty("spark.test.testProperty", "2")
val conf = new SparkConf()
- assert(conf.get("spark.test.intTestProperty") === "2")
- assert(conf.get("spark.test.stringTestProperty") === "hi")
+ assert(conf.get("spark.test.testProperty") === "2")
} finally {
- System.clearProperty("spark.test.intTestProperty")
+ System.clearProperty("spark.test.testProperty")
}
}
test("initializing without loading defaults") {
try {
- System.setProperty("spark.test.intTestProperty", "2")
+ System.setProperty("spark.test.testProperty", "2")
val conf = new SparkConf(false)
- assert(!conf.contains("spark.test.intTestProperty"))
- assert(!conf.contains("spark.test.stringTestProperty"))
+ assert(!conf.contains("spark.test.testProperty"))
} finally {
- System.clearProperty("spark.test.intTestProperty")
+ System.clearProperty("spark.test.testProperty")
}
}
@@ -124,4 +112,25 @@ class SparkConfSuite extends FunSuite with LocalSparkContext {
assert(sc.master === "local[2]")
assert(sc.appName === "My other app")
}
+
+ test("nested property names") {
+ // This wasn't supported by some external conf parsing libraries
+ try {
+ System.setProperty("spark.test.a", "a")
+ System.setProperty("spark.test.a.b", "a.b")
+ System.setProperty("spark.test.a.b.c", "a.b.c")
+ val conf = new SparkConf()
+ assert(conf.get("spark.test.a") === "a")
+ assert(conf.get("spark.test.a.b") === "a.b")
+ assert(conf.get("spark.test.a.b.c") === "a.b.c")
+ conf.set("spark.test.a.b", "A.B")
+ assert(conf.get("spark.test.a") === "a")
+ assert(conf.get("spark.test.a.b") === "A.B")
+ assert(conf.get("spark.test.a.b.c") === "a.b.c")
+ } finally {
+ System.clearProperty("spark.test.a")
+ System.clearProperty("spark.test.a.b")
+ System.clearProperty("spark.test.a.b.c")
+ }
+ }
}