aboutsummaryrefslogtreecommitdiff
path: root/core/src/test/scala/org/apache
diff options
context:
space:
mode:
authorMarcelo Vanzin <vanzin@cloudera.com>2016-08-15 11:09:54 -0700
committerMarcelo Vanzin <vanzin@cloudera.com>2016-08-15 11:09:54 -0700
commit5da6c4b24f512b63cd4e6ba7dd8968066a9396f5 (patch)
tree7bf18c71a8c55eef6539bc484a73a2e20d97d4e1 /core/src/test/scala/org/apache
parent564fe614c11deb657e0ac9e6b75e65370c48b7fe (diff)
downloadspark-5da6c4b24f512b63cd4e6ba7dd8968066a9396f5.tar.gz
spark-5da6c4b24f512b63cd4e6ba7dd8968066a9396f5.tar.bz2
spark-5da6c4b24f512b63cd4e6ba7dd8968066a9396f5.zip
[SPARK-16671][CORE][SQL] Consolidate code to do variable substitution.
Both core and sql have slightly different code that does variable substitution of config values. This change refactors that code and encapsulates the logic of reading config values and expading variables in a new helper class, which can be configured so that both core and sql can use it without losing existing functionality, and allows for easier testing and makes it easier to add more features in the future. Tested with existing and new unit tests, and by running spark-shell with some configs referencing variables and making sure it behaved as expected. Author: Marcelo Vanzin <vanzin@cloudera.com> Closes #14468 from vanzin/SPARK-16671.
Diffstat (limited to 'core/src/test/scala/org/apache')
-rw-r--r--core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala78
-rw-r--r--core/src/test/scala/org/apache/spark/internal/config/ConfigReaderSuite.scala62
2 files changed, 93 insertions, 47 deletions
diff --git a/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala b/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala
index ebdb69f31e..91a96bdda6 100644
--- a/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala
+++ b/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala
@@ -24,6 +24,7 @@ import scala.collection.mutable.HashMap
import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.network.util.ByteUnit
+import org.apache.spark.util.SparkConfWithEnv
class ConfigEntrySuite extends SparkFunSuite {
@@ -161,25 +162,9 @@ class ConfigEntrySuite extends SparkFunSuite {
assert(conf.get(stringConf) === null)
}
- test("variable expansion") {
+ test("variable expansion of spark config entries") {
val env = Map("ENV1" -> "env1")
- val conf = HashMap("spark.value1" -> "value1", "spark.value2" -> "value2")
-
- def getenv(key: String): String = env.getOrElse(key, null)
-
- def expand(value: String): String = ConfigEntry.expand(value, conf.asJava, getenv, Set())
-
- assert(expand("${spark.value1}") === "value1")
- assert(expand("spark.value1 is: ${spark.value1}") === "spark.value1 is: value1")
- assert(expand("${spark.value1} ${spark.value2}") === "value1 value2")
- assert(expand("${spark.value3}") === "${spark.value3}")
-
- // Make sure anything that is not in the "spark." namespace is ignored.
- conf("notspark.key") = "value"
- assert(expand("${notspark.key}") === "${notspark.key}")
-
- assert(expand("${env:ENV1}") === "env1")
- assert(expand("${system:user.name}") === sys.props("user.name"))
+ val conf = new SparkConfWithEnv(env)
val stringConf = ConfigBuilder(testKey("stringForExpansion"))
.stringConf
@@ -193,45 +178,44 @@ class ConfigEntrySuite extends SparkFunSuite {
val fallbackConf = ConfigBuilder(testKey("fallbackForExpansion"))
.fallbackConf(intConf)
- assert(expand("${" + stringConf.key + "}") === "string1")
- assert(expand("${" + optionalConf.key + "}") === "${" + optionalConf.key + "}")
- assert(expand("${" + intConf.key + "}") === "42")
- assert(expand("${" + fallbackConf.key + "}") === "42")
-
- conf(optionalConf.key) = "string2"
- assert(expand("${" + optionalConf.key + "}") === "string2")
+ val refConf = ConfigBuilder(testKey("configReferenceTest"))
+ .stringConf
+ .createWithDefault(null)
- conf(fallbackConf.key) = "84"
- assert(expand("${" + fallbackConf.key + "}") === "84")
+ def ref(entry: ConfigEntry[_]): String = "${" + entry.key + "}"
- assert(expand("${spark.value1") === "${spark.value1")
+ def testEntryRef(entry: ConfigEntry[_], expected: String): Unit = {
+ conf.set(refConf, ref(entry))
+ assert(conf.get(refConf) === expected)
+ }
- // Unknown prefixes.
- assert(expand("${unknown:value}") === "${unknown:value}")
+ testEntryRef(stringConf, "string1")
+ testEntryRef(intConf, "42")
+ testEntryRef(fallbackConf, "42")
- // Chained references.
- val conf1 = ConfigBuilder(testKey("conf1"))
- .stringConf
- .createWithDefault("value1")
- val conf2 = ConfigBuilder(testKey("conf2"))
- .stringConf
- .createWithDefault("value2")
+ testEntryRef(optionalConf, ref(optionalConf))
- conf(conf2.key) = "${" + conf1.key + "}"
- assert(expand("${" + conf2.key + "}") === conf1.defaultValueString)
+ conf.set(optionalConf, ref(stringConf))
+ testEntryRef(optionalConf, "string1")
- // Circular references.
- conf(conf1.key) = "${" + conf2.key + "}"
- val e = intercept[IllegalArgumentException] {
- expand("${" + conf2.key + "}")
- }
- assert(e.getMessage().contains("Circular"))
+ conf.set(optionalConf, ref(fallbackConf))
+ testEntryRef(optionalConf, "42")
// Default string values with variable references.
val parameterizedStringConf = ConfigBuilder(testKey("stringWithParams"))
.stringConf
- .createWithDefault("${spark.value1}")
- assert(parameterizedStringConf.readFrom(conf.asJava, getenv) === conf("spark.value1"))
+ .createWithDefault(ref(stringConf))
+ assert(conf.get(parameterizedStringConf) === conf.get(stringConf))
+
+ // Make sure SparkConf's env override works.
+ conf.set(refConf, "${env:ENV1}")
+ assert(conf.get(refConf) === env("ENV1"))
+
+ // Conf with null default value is not expanded.
+ val nullConf = ConfigBuilder(testKey("nullString"))
+ .stringConf
+ .createWithDefault(null)
+ testEntryRef(nullConf, ref(nullConf))
}
}
diff --git a/core/src/test/scala/org/apache/spark/internal/config/ConfigReaderSuite.scala b/core/src/test/scala/org/apache/spark/internal/config/ConfigReaderSuite.scala
new file mode 100644
index 0000000000..be57cc34e4
--- /dev/null
+++ b/core/src/test/scala/org/apache/spark/internal/config/ConfigReaderSuite.scala
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.internal.config
+
+import scala.collection.JavaConverters._
+
+import org.apache.spark.SparkFunSuite
+
+class ConfigReaderSuite extends SparkFunSuite {
+
+ test("variable expansion") {
+ val env = Map("ENV1" -> "env1")
+ val conf = Map("key1" -> "value1", "key2" -> "value2")
+
+ val reader = new ConfigReader(conf.asJava)
+ reader.bindEnv(new MapProvider(env.asJava))
+
+ assert(reader.substitute(null) === null)
+ assert(reader.substitute("${key1}") === "value1")
+ assert(reader.substitute("key1 is: ${key1}") === "key1 is: value1")
+ assert(reader.substitute("${key1} ${key2}") === "value1 value2")
+ assert(reader.substitute("${key3}") === "${key3}")
+ assert(reader.substitute("${env:ENV1}") === "env1")
+ assert(reader.substitute("${system:user.name}") === sys.props("user.name"))
+ assert(reader.substitute("${key1") === "${key1")
+
+ // Unknown prefixes.
+ assert(reader.substitute("${unknown:value}") === "${unknown:value}")
+ }
+
+ test("circular references") {
+ val conf = Map("key1" -> "${key2}", "key2" -> "${key1}")
+ val reader = new ConfigReader(conf.asJava)
+ val e = intercept[IllegalArgumentException] {
+ reader.substitute("${key1}")
+ }
+ assert(e.getMessage().contains("Circular"))
+ }
+
+ test("spark conf provider filters config keys") {
+ val conf = Map("nonspark.key" -> "value", "spark.key" -> "value")
+ val reader = new ConfigReader(new SparkConfigProvider(conf.asJava))
+ assert(reader.get("nonspark.key") === None)
+ assert(reader.get("spark.key") === Some("value"))
+ }
+
+}