diff options
author | Liwei Lin <lwlin7@gmail.com> | 2017-02-09 00:48:47 -0500 |
---|---|---|
committer | gatorsmile <gatorsmile@gmail.com> | 2017-02-09 00:48:47 -0500 |
commit | 9d9d67c7957f7cbbdbe889bdbc073568b2bfbb16 (patch) | |
tree | 1f919b0c976d20e0493923b4aa1869f8125fb51a /core | |
parent | 50a991264c16e4c4126e88668ef4fbd048c782b8 (diff) | |
download | spark-9d9d67c7957f7cbbdbe889bdbc073568b2bfbb16.tar.gz spark-9d9d67c7957f7cbbdbe889bdbc073568b2bfbb16.tar.bz2 spark-9d9d67c7957f7cbbdbe889bdbc073568b2bfbb16.zip |
[SPARK-19265][SQL][FOLLOW-UP] Configurable `tableRelationCache` maximum size
## What changes were proposed in this pull request?
SPARK-19265 had made table relation cache general; this follow-up aims to make `tableRelationCache`'s maximum size configurable.
In order to do sanity-check, this patch also adds a `checkValue()` method to `TypedConfigBuilder`.
## How was this patch tested?
new test case: `test("conf entry: checkValue()")`
Author: Liwei Lin <lwlin7@gmail.com>
Closes #16736 from lw-lin/conf.
Diffstat (limited to 'core')
-rw-r--r-- | core/src/main/scala/org/apache/spark/internal/config/ConfigBuilder.scala | 8 | ||||
-rw-r--r-- | core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala | 22 |
2 files changed, 30 insertions, 0 deletions
diff --git a/core/src/main/scala/org/apache/spark/internal/config/ConfigBuilder.scala b/core/src/main/scala/org/apache/spark/internal/config/ConfigBuilder.scala index 0f5c8a9e02..a177e66645 100644 --- a/core/src/main/scala/org/apache/spark/internal/config/ConfigBuilder.scala +++ b/core/src/main/scala/org/apache/spark/internal/config/ConfigBuilder.scala @@ -90,6 +90,14 @@ private[spark] class TypedConfigBuilder[T]( new TypedConfigBuilder(parent, s => fn(converter(s)), stringConverter) } + /** Checks if the user-provided value for the config matches the validator. */ + def checkValue(validator: T => Boolean, errorMsg: String): TypedConfigBuilder[T] = { + transform { v => + if (!validator(v)) throw new IllegalArgumentException(errorMsg) + v + } + } + /** Check that user-provided values for the config match a pre-defined set. */ def checkValues(validValues: Set[T]): TypedConfigBuilder[T] = { transform { v => diff --git a/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala b/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala index 91a96bdda6..71eed46488 100644 --- a/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala +++ b/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala @@ -128,6 +128,28 @@ class ConfigEntrySuite extends SparkFunSuite { assert(conf.get(transformationConf) === "bar") } + test("conf entry: checkValue()") { + def createEntry(default: Int): ConfigEntry[Int] = + ConfigBuilder(testKey("checkValue")) + .intConf + .checkValue(value => value >= 0, "value must be non-negative") + .createWithDefault(default) + + val conf = new SparkConf() + + val entry = createEntry(10) + conf.set(entry, -1) + val e1 = intercept[IllegalArgumentException] { + conf.get(entry) + } + assert(e1.getMessage == "value must be non-negative") + + val e2 = intercept[IllegalArgumentException] { + createEntry(-1) + } + assert(e2.getMessage == "value must be non-negative") + } + test("conf entry: valid values check") { val conf = new SparkConf() val enum = ConfigBuilder(testKey("enum")) |