aboutsummaryrefslogtreecommitdiff
path: root/core/src/test/scala/org
diff options
context:
space:
mode:
authorLiwei Lin <lwlin7@gmail.com>2017-02-09 00:48:47 -0500
committergatorsmile <gatorsmile@gmail.com>2017-02-09 00:48:47 -0500
commit9d9d67c7957f7cbbdbe889bdbc073568b2bfbb16 (patch)
tree1f919b0c976d20e0493923b4aa1869f8125fb51a /core/src/test/scala/org
parent50a991264c16e4c4126e88668ef4fbd048c782b8 (diff)
downloadspark-9d9d67c7957f7cbbdbe889bdbc073568b2bfbb16.tar.gz
spark-9d9d67c7957f7cbbdbe889bdbc073568b2bfbb16.tar.bz2
spark-9d9d67c7957f7cbbdbe889bdbc073568b2bfbb16.zip
[SPARK-19265][SQL][FOLLOW-UP] Configurable `tableRelationCache` maximum size
## What changes were proposed in this pull request? SPARK-19265 had made table relation cache general; this follow-up aims to make `tableRelationCache`'s maximum size configurable. In order to do sanity-check, this patch also adds a `checkValue()` method to `TypedConfigBuilder`. ## How was this patch tested? new test case: `test("conf entry: checkValue()")` Author: Liwei Lin <lwlin7@gmail.com> Closes #16736 from lw-lin/conf.
Diffstat (limited to 'core/src/test/scala/org')
-rw-r--r--core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala22
1 files changed, 22 insertions, 0 deletions
diff --git a/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala b/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala
index 91a96bdda6..71eed46488 100644
--- a/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala
+++ b/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala
@@ -128,6 +128,28 @@ class ConfigEntrySuite extends SparkFunSuite {
assert(conf.get(transformationConf) === "bar")
}
+ test("conf entry: checkValue()") {
+ def createEntry(default: Int): ConfigEntry[Int] =
+ ConfigBuilder(testKey("checkValue"))
+ .intConf
+ .checkValue(value => value >= 0, "value must be non-negative")
+ .createWithDefault(default)
+
+ val conf = new SparkConf()
+
+ val entry = createEntry(10)
+ conf.set(entry, -1)
+ val e1 = intercept[IllegalArgumentException] {
+ conf.get(entry)
+ }
+ assert(e1.getMessage == "value must be non-negative")
+
+ val e2 = intercept[IllegalArgumentException] {
+ createEntry(-1)
+ }
+ assert(e2.getMessage == "value must be non-negative")
+ }
+
test("conf entry: valid values check") {
val conf = new SparkConf()
val enum = ConfigBuilder(testKey("enum"))