aboutsummaryrefslogtreecommitdiff
path: root/core/src/test
diff options
context:
space:
mode:
authorMarcelo Vanzin <vanzin@cloudera.com>2016-07-20 18:24:35 -0700
committerMarcelo Vanzin <vanzin@cloudera.com>2016-07-20 18:24:35 -0700
commit75a06aa256aa256c112555609a93c1e1dbb1cb4b (patch)
tree1cc8d46270741d5650a5441b957f79940280b30f /core/src/test
parente651900bd562cc29a3eb13e92a5147979e347f61 (diff)
downloadspark-75a06aa256aa256c112555609a93c1e1dbb1cb4b.tar.gz
spark-75a06aa256aa256c112555609a93c1e1dbb1cb4b.tar.bz2
spark-75a06aa256aa256c112555609a93c1e1dbb1cb4b.zip
[SPARK-16272][CORE] Allow config values to reference conf, env, system props.
This allows configuration to be more flexible, for example, when the cluster does not have a homogeneous configuration (e.g. packages are installed on different paths in different nodes). By allowing one to reference the environment from the conf, it becomes possible to work around those in certain cases. As part of the implementation, ConfigEntry now keeps track of all "known" configs (i.e. those created through the use of ConfigBuilder), since that list is used by the resolution code. This duplicates some code in SQLConf, which could potentially be merged with this now. It will also make it simpler to implement some missing features such as filtering which configs show up in the UI or in event logs - which are not part of this change. Another change is in the way ConfigEntry reads config data; it now takes a string map and a function that reads env variables, so that it can be called both from SparkConf and SQLConf. This makes it so both places follow the same read path, instead of having to replicate certain logic in SQLConf. There are still a couple of methods in SQLConf that peek into fields of ConfigEntry directly, though. Tested via unit tests, and by using the new variable expansion functionality in a shell session with a custom spark.sql.hive.metastore.jars value. Author: Marcelo Vanzin <vanzin@cloudera.com> Closes #14022 from vanzin/SPARK-16272.
Diffstat (limited to 'core/src/test')
-rw-r--r--core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala112
1 files changed, 97 insertions, 15 deletions
diff --git a/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala b/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala
index 337fd7e85e..ebdb69f31e 100644
--- a/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala
+++ b/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala
@@ -19,14 +19,21 @@ package org.apache.spark.internal.config
import java.util.concurrent.TimeUnit
+import scala.collection.JavaConverters._
+import scala.collection.mutable.HashMap
+
import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.network.util.ByteUnit
class ConfigEntrySuite extends SparkFunSuite {
+ private val PREFIX = "spark.ConfigEntrySuite"
+
+ private def testKey(name: String): String = s"$PREFIX.$name"
+
test("conf entry: int") {
val conf = new SparkConf()
- val iConf = ConfigBuilder("spark.int").intConf.createWithDefault(1)
+ val iConf = ConfigBuilder(testKey("int")).intConf.createWithDefault(1)
assert(conf.get(iConf) === 1)
conf.set(iConf, 2)
assert(conf.get(iConf) === 2)
@@ -34,21 +41,21 @@ class ConfigEntrySuite extends SparkFunSuite {
test("conf entry: long") {
val conf = new SparkConf()
- val lConf = ConfigBuilder("spark.long").longConf.createWithDefault(0L)
+ val lConf = ConfigBuilder(testKey("long")).longConf.createWithDefault(0L)
conf.set(lConf, 1234L)
assert(conf.get(lConf) === 1234L)
}
test("conf entry: double") {
val conf = new SparkConf()
- val dConf = ConfigBuilder("spark.double").doubleConf.createWithDefault(0.0)
+ val dConf = ConfigBuilder(testKey("double")).doubleConf.createWithDefault(0.0)
conf.set(dConf, 20.0)
assert(conf.get(dConf) === 20.0)
}
test("conf entry: boolean") {
val conf = new SparkConf()
- val bConf = ConfigBuilder("spark.boolean").booleanConf.createWithDefault(false)
+ val bConf = ConfigBuilder(testKey("boolean")).booleanConf.createWithDefault(false)
assert(!conf.get(bConf))
conf.set(bConf, true)
assert(conf.get(bConf))
@@ -56,7 +63,7 @@ class ConfigEntrySuite extends SparkFunSuite {
test("conf entry: optional") {
val conf = new SparkConf()
- val optionalConf = ConfigBuilder("spark.optional").intConf.createOptional
+ val optionalConf = ConfigBuilder(testKey("optional")).intConf.createOptional
assert(conf.get(optionalConf) === None)
conf.set(optionalConf, 1)
assert(conf.get(optionalConf) === Some(1))
@@ -64,8 +71,8 @@ class ConfigEntrySuite extends SparkFunSuite {
test("conf entry: fallback") {
val conf = new SparkConf()
- val parentConf = ConfigBuilder("spark.int").intConf.createWithDefault(1)
- val confWithFallback = ConfigBuilder("spark.fallback").fallbackConf(parentConf)
+ val parentConf = ConfigBuilder(testKey("parent")).intConf.createWithDefault(1)
+ val confWithFallback = ConfigBuilder(testKey("fallback")).fallbackConf(parentConf)
assert(conf.get(confWithFallback) === 1)
conf.set(confWithFallback, 2)
assert(conf.get(parentConf) === 1)
@@ -74,7 +81,8 @@ class ConfigEntrySuite extends SparkFunSuite {
test("conf entry: time") {
val conf = new SparkConf()
- val time = ConfigBuilder("spark.time").timeConf(TimeUnit.SECONDS).createWithDefaultString("1h")
+ val time = ConfigBuilder(testKey("time")).timeConf(TimeUnit.SECONDS)
+ .createWithDefaultString("1h")
assert(conf.get(time) === 3600L)
conf.set(time.key, "1m")
assert(conf.get(time) === 60L)
@@ -82,7 +90,8 @@ class ConfigEntrySuite extends SparkFunSuite {
test("conf entry: bytes") {
val conf = new SparkConf()
- val bytes = ConfigBuilder("spark.bytes").bytesConf(ByteUnit.KiB).createWithDefaultString("1m")
+ val bytes = ConfigBuilder(testKey("bytes")).bytesConf(ByteUnit.KiB)
+ .createWithDefaultString("1m")
assert(conf.get(bytes) === 1024L)
conf.set(bytes.key, "1k")
assert(conf.get(bytes) === 1L)
@@ -90,7 +99,7 @@ class ConfigEntrySuite extends SparkFunSuite {
test("conf entry: string seq") {
val conf = new SparkConf()
- val seq = ConfigBuilder("spark.seq").stringConf.toSequence.createWithDefault(Seq())
+ val seq = ConfigBuilder(testKey("seq")).stringConf.toSequence.createWithDefault(Seq())
conf.set(seq.key, "1,,2, 3 , , 4")
assert(conf.get(seq) === Seq("1", "2", "3", "4"))
conf.set(seq, Seq("1", "2"))
@@ -99,7 +108,7 @@ class ConfigEntrySuite extends SparkFunSuite {
test("conf entry: int seq") {
val conf = new SparkConf()
- val seq = ConfigBuilder("spark.seq").intConf.toSequence.createWithDefault(Seq())
+ val seq = ConfigBuilder(testKey("intSeq")).intConf.toSequence.createWithDefault(Seq())
conf.set(seq.key, "1,,2, 3 , , 4")
assert(conf.get(seq) === Seq(1, 2, 3, 4))
conf.set(seq, Seq(1, 2))
@@ -108,7 +117,7 @@ class ConfigEntrySuite extends SparkFunSuite {
test("conf entry: transformation") {
val conf = new SparkConf()
- val transformationConf = ConfigBuilder("spark.transformation")
+ val transformationConf = ConfigBuilder(testKey("transformation"))
.stringConf
.transform(_.toLowerCase())
.createWithDefault("FOO")
@@ -120,7 +129,7 @@ class ConfigEntrySuite extends SparkFunSuite {
test("conf entry: valid values check") {
val conf = new SparkConf()
- val enum = ConfigBuilder("spark.enum")
+ val enum = ConfigBuilder(testKey("enum"))
.stringConf
.checkValues(Set("a", "b", "c"))
.createWithDefault("a")
@@ -138,7 +147,7 @@ class ConfigEntrySuite extends SparkFunSuite {
test("conf entry: conversion error") {
val conf = new SparkConf()
- val conversionTest = ConfigBuilder("spark.conversionTest").doubleConf.createOptional
+ val conversionTest = ConfigBuilder(testKey("conversionTest")).doubleConf.createOptional
conf.set(conversionTest.key, "abc")
val conversionError = intercept[IllegalArgumentException] {
conf.get(conversionTest)
@@ -148,8 +157,81 @@ class ConfigEntrySuite extends SparkFunSuite {
test("default value handling is null-safe") {
val conf = new SparkConf()
- val stringConf = ConfigBuilder("spark.string").stringConf.createWithDefault(null)
+ val stringConf = ConfigBuilder(testKey("string")).stringConf.createWithDefault(null)
assert(conf.get(stringConf) === null)
}
+ test("variable expansion") {
+ val env = Map("ENV1" -> "env1")
+ val conf = HashMap("spark.value1" -> "value1", "spark.value2" -> "value2")
+
+ def getenv(key: String): String = env.getOrElse(key, null)
+
+ def expand(value: String): String = ConfigEntry.expand(value, conf.asJava, getenv, Set())
+
+ assert(expand("${spark.value1}") === "value1")
+ assert(expand("spark.value1 is: ${spark.value1}") === "spark.value1 is: value1")
+ assert(expand("${spark.value1} ${spark.value2}") === "value1 value2")
+ assert(expand("${spark.value3}") === "${spark.value3}")
+
+ // Make sure anything that is not in the "spark." namespace is ignored.
+ conf("notspark.key") = "value"
+ assert(expand("${notspark.key}") === "${notspark.key}")
+
+ assert(expand("${env:ENV1}") === "env1")
+ assert(expand("${system:user.name}") === sys.props("user.name"))
+
+ val stringConf = ConfigBuilder(testKey("stringForExpansion"))
+ .stringConf
+ .createWithDefault("string1")
+ val optionalConf = ConfigBuilder(testKey("optionForExpansion"))
+ .stringConf
+ .createOptional
+ val intConf = ConfigBuilder(testKey("intForExpansion"))
+ .intConf
+ .createWithDefault(42)
+ val fallbackConf = ConfigBuilder(testKey("fallbackForExpansion"))
+ .fallbackConf(intConf)
+
+ assert(expand("${" + stringConf.key + "}") === "string1")
+ assert(expand("${" + optionalConf.key + "}") === "${" + optionalConf.key + "}")
+ assert(expand("${" + intConf.key + "}") === "42")
+ assert(expand("${" + fallbackConf.key + "}") === "42")
+
+ conf(optionalConf.key) = "string2"
+ assert(expand("${" + optionalConf.key + "}") === "string2")
+
+ conf(fallbackConf.key) = "84"
+ assert(expand("${" + fallbackConf.key + "}") === "84")
+
+ assert(expand("${spark.value1") === "${spark.value1")
+
+ // Unknown prefixes.
+ assert(expand("${unknown:value}") === "${unknown:value}")
+
+ // Chained references.
+ val conf1 = ConfigBuilder(testKey("conf1"))
+ .stringConf
+ .createWithDefault("value1")
+ val conf2 = ConfigBuilder(testKey("conf2"))
+ .stringConf
+ .createWithDefault("value2")
+
+ conf(conf2.key) = "${" + conf1.key + "}"
+ assert(expand("${" + conf2.key + "}") === conf1.defaultValueString)
+
+ // Circular references.
+ conf(conf1.key) = "${" + conf2.key + "}"
+ val e = intercept[IllegalArgumentException] {
+ expand("${" + conf2.key + "}")
+ }
+ assert(e.getMessage().contains("Circular"))
+
+ // Default string values with variable references.
+ val parameterizedStringConf = ConfigBuilder(testKey("stringWithParams"))
+ .stringConf
+ .createWithDefault("${spark.value1}")
+ assert(parameterizedStringConf.readFrom(conf.asJava, getenv) === conf("spark.value1"))
+ }
+
}