aboutsummaryrefslogtreecommitdiff
path: root/core/src
diff options
context:
space:
mode:
authorDilip Biswal <dbiswal@us.ibm.com>2017-04-06 08:33:14 +0800
committerWenchen Fan <wenchen@databricks.com>2017-04-06 08:33:14 +0800
commit9d68c67235481fa33983afb766916b791ca8212a (patch)
tree32fcbf67bc836f44b5e1c7770511280c904beb54 /core/src
parent9543fc0e08a21680961689ea772441c49fcd52ee (diff)
downloadspark-9d68c67235481fa33983afb766916b791ca8212a.tar.gz
spark-9d68c67235481fa33983afb766916b791ca8212a.tar.bz2
spark-9d68c67235481fa33983afb766916b791ca8212a.zip
[SPARK-20204][SQL][FOLLOWUP] SQLConf should react to change in default timezone settings
## What changes were proposed in this pull request? Make sure SESSION_LOCAL_TIMEZONE reflects the change in JVM's default timezone setting. Currently several timezone related tests fail as the change to default timezone is not picked up by SQLConf. ## How was this patch tested? Added an unit test in ConfigEntrySuite Author: Dilip Biswal <dbiswal@us.ibm.com> Closes #17537 from dilipbiswal/timezone_debug.
Diffstat (limited to 'core/src')
-rw-r--r--core/src/main/scala/org/apache/spark/internal/config/ConfigBuilder.scala8
-rw-r--r--core/src/main/scala/org/apache/spark/internal/config/ConfigEntry.scala17
-rw-r--r--core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala9
3 files changed, 34 insertions, 0 deletions
diff --git a/core/src/main/scala/org/apache/spark/internal/config/ConfigBuilder.scala b/core/src/main/scala/org/apache/spark/internal/config/ConfigBuilder.scala
index b9921138cc..e5d60a7ef0 100644
--- a/core/src/main/scala/org/apache/spark/internal/config/ConfigBuilder.scala
+++ b/core/src/main/scala/org/apache/spark/internal/config/ConfigBuilder.scala
@@ -147,6 +147,14 @@ private[spark] class TypedConfigBuilder[T](
}
}
+ /** Creates a [[ConfigEntry]] with a function to determine the default value */
+ def createWithDefaultFunction(defaultFunc: () => T): ConfigEntry[T] = {
+ val entry = new ConfigEntryWithDefaultFunction[T](parent.key, defaultFunc, converter,
+ stringConverter, parent._doc, parent._public)
+ parent._onCreate.foreach(_ (entry))
+ entry
+ }
+
/**
* Creates a [[ConfigEntry]] that has a default value. The default value is provided as a
* [[String]] and must be a valid value for the entry.
diff --git a/core/src/main/scala/org/apache/spark/internal/config/ConfigEntry.scala b/core/src/main/scala/org/apache/spark/internal/config/ConfigEntry.scala
index 4f3e42bb3c..e86712e84d 100644
--- a/core/src/main/scala/org/apache/spark/internal/config/ConfigEntry.scala
+++ b/core/src/main/scala/org/apache/spark/internal/config/ConfigEntry.scala
@@ -78,7 +78,24 @@ private class ConfigEntryWithDefault[T] (
def readFrom(reader: ConfigReader): T = {
reader.get(key).map(valueConverter).getOrElse(_defaultValue)
}
+}
+
+private class ConfigEntryWithDefaultFunction[T] (
+ key: String,
+ _defaultFunction: () => T,
+ valueConverter: String => T,
+ stringConverter: T => String,
+ doc: String,
+ isPublic: Boolean)
+ extends ConfigEntry(key, valueConverter, stringConverter, doc, isPublic) {
+
+ override def defaultValue: Option[T] = Some(_defaultFunction())
+ override def defaultValueString: String = stringConverter(_defaultFunction())
+
+ def readFrom(reader: ConfigReader): T = {
+ reader.get(key).map(valueConverter).getOrElse(_defaultFunction())
+ }
}
private class ConfigEntryWithDefaultString[T] (
diff --git a/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala b/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala
index 3ff7e84d73..e2ba0d2a53 100644
--- a/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala
+++ b/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala
@@ -251,4 +251,13 @@ class ConfigEntrySuite extends SparkFunSuite {
.createWithDefault(null)
testEntryRef(nullConf, ref(nullConf))
}
+
+ test("conf entry : default function") {
+ var data = 0
+ val conf = new SparkConf()
+ val iConf = ConfigBuilder(testKey("intval")).intConf.createWithDefaultFunction(() => data)
+ assert(conf.get(iConf) === 0)
+ data = 2
+ assert(conf.get(iConf) === 2)
+ }
}