aboutsummaryrefslogtreecommitdiff
path: root/core/src/test
diff options
context:
space:
mode:
authorMarcelo Vanzin <vanzin@cloudera.com>2016-03-07 14:13:44 -0800
committerMarcelo Vanzin <vanzin@cloudera.com>2016-03-07 14:13:44 -0800
commite1fb857992074164dcaa02498c5a9604fac6f57e (patch)
tree5f2a9de0230df4ebd0ca7317c879472eb8d3fbbc /core/src/test
parente9e67b39abb23a88d8be2d0fea5b5fd93184a25b (diff)
downloadspark-e1fb857992074164dcaa02498c5a9604fac6f57e.tar.gz
spark-e1fb857992074164dcaa02498c5a9604fac6f57e.tar.bz2
spark-e1fb857992074164dcaa02498c5a9604fac6f57e.zip
[SPARK-529][CORE][YARN] Add type-safe config keys to SparkConf.
This is, in a way, the basics to enable SPARK-529 (which was closed as won't fix but I think is still valuable). In fact, Spark SQL created something for that, and this change basically factors out that code and inserts it into SparkConf, with some extra bells and whistles. To showcase the usage of this pattern, I modified the YARN backend to use the new config keys (defined in the new `config` package object under `o.a.s.deploy.yarn`). Most of the changes are mechanic, although logic had to be slightly modified in a handful of places. Author: Marcelo Vanzin <vanzin@cloudera.com> Closes #10205 from vanzin/conf-opts.
Diffstat (limited to 'core/src/test')
-rw-r--r--core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala155
1 files changed, 155 insertions, 0 deletions
diff --git a/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala b/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala
new file mode 100644
index 0000000000..0644148eae
--- /dev/null
+++ b/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala
@@ -0,0 +1,155 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.internal.config
+
+import java.util.concurrent.TimeUnit
+
+import org.apache.spark.{SparkConf, SparkFunSuite}
+import org.apache.spark.network.util.ByteUnit
+
+class ConfigEntrySuite extends SparkFunSuite {
+
+ test("conf entry: int") {
+ val conf = new SparkConf()
+ val iConf = ConfigBuilder("spark.int").intConf.withDefault(1)
+ assert(conf.get(iConf) === 1)
+ conf.set(iConf, 2)
+ assert(conf.get(iConf) === 2)
+ }
+
+ test("conf entry: long") {
+ val conf = new SparkConf()
+ val lConf = ConfigBuilder("spark.long").longConf.withDefault(0L)
+ conf.set(lConf, 1234L)
+ assert(conf.get(lConf) === 1234L)
+ }
+
+ test("conf entry: double") {
+ val conf = new SparkConf()
+ val dConf = ConfigBuilder("spark.double").doubleConf.withDefault(0.0)
+ conf.set(dConf, 20.0)
+ assert(conf.get(dConf) === 20.0)
+ }
+
+ test("conf entry: boolean") {
+ val conf = new SparkConf()
+ val bConf = ConfigBuilder("spark.boolean").booleanConf.withDefault(false)
+ assert(!conf.get(bConf))
+ conf.set(bConf, true)
+ assert(conf.get(bConf))
+ }
+
+ test("conf entry: optional") {
+ val conf = new SparkConf()
+ val optionalConf = ConfigBuilder("spark.optional").intConf.optional
+ assert(conf.get(optionalConf) === None)
+ conf.set(optionalConf, 1)
+ assert(conf.get(optionalConf) === Some(1))
+ }
+
+ test("conf entry: fallback") {
+ val conf = new SparkConf()
+ val parentConf = ConfigBuilder("spark.int").intConf.withDefault(1)
+ val confWithFallback = ConfigBuilder("spark.fallback").fallbackConf(parentConf)
+ assert(conf.get(confWithFallback) === 1)
+ conf.set(confWithFallback, 2)
+ assert(conf.get(parentConf) === 1)
+ assert(conf.get(confWithFallback) === 2)
+ }
+
+ test("conf entry: time") {
+ val conf = new SparkConf()
+ val time = ConfigBuilder("spark.time").timeConf(TimeUnit.SECONDS).withDefaultString("1h")
+ assert(conf.get(time) === 3600L)
+ conf.set(time.key, "1m")
+ assert(conf.get(time) === 60L)
+ }
+
+ test("conf entry: bytes") {
+ val conf = new SparkConf()
+ val bytes = ConfigBuilder("spark.bytes").bytesConf(ByteUnit.KiB).withDefaultString("1m")
+ assert(conf.get(bytes) === 1024L)
+ conf.set(bytes.key, "1k")
+ assert(conf.get(bytes) === 1L)
+ }
+
+ test("conf entry: string seq") {
+ val conf = new SparkConf()
+ val seq = ConfigBuilder("spark.seq").stringConf.toSequence.withDefault(Seq())
+ conf.set(seq.key, "1,,2, 3 , , 4")
+ assert(conf.get(seq) === Seq("1", "2", "3", "4"))
+ conf.set(seq, Seq("1", "2"))
+ assert(conf.get(seq) === Seq("1", "2"))
+ }
+
+ test("conf entry: int seq") {
+ val conf = new SparkConf()
+ val seq = ConfigBuilder("spark.seq").intConf.toSequence.withDefault(Seq())
+ conf.set(seq.key, "1,,2, 3 , , 4")
+ assert(conf.get(seq) === Seq(1, 2, 3, 4))
+ conf.set(seq, Seq(1, 2))
+ assert(conf.get(seq) === Seq(1, 2))
+ }
+
+ test("conf entry: transformation") {
+ val conf = new SparkConf()
+ val transformationConf = ConfigBuilder("spark.transformation")
+ .stringConf
+ .transform(_.toLowerCase())
+ .withDefault("FOO")
+
+ assert(conf.get(transformationConf) === "foo")
+ conf.set(transformationConf, "BAR")
+ assert(conf.get(transformationConf) === "bar")
+ }
+
+ test("conf entry: valid values check") {
+ val conf = new SparkConf()
+ val enum = ConfigBuilder("spark.enum")
+ .stringConf
+ .checkValues(Set("a", "b", "c"))
+ .withDefault("a")
+ assert(conf.get(enum) === "a")
+
+ conf.set(enum, "b")
+ assert(conf.get(enum) === "b")
+
+ conf.set(enum, "d")
+ val enumError = intercept[IllegalArgumentException] {
+ conf.get(enum)
+ }
+ assert(enumError.getMessage === s"The value of ${enum.key} should be one of a, b, c, but was d")
+ }
+
+ test("conf entry: conversion error") {
+ val conf = new SparkConf()
+ val conversionTest = ConfigBuilder("spark.conversionTest").doubleConf.optional
+ conf.set(conversionTest.key, "abc")
+ val conversionError = intercept[IllegalArgumentException] {
+ conf.get(conversionTest)
+ }
+ assert(conversionError.getMessage === s"${conversionTest.key} should be double, but was abc")
+ }
+
+ test("default value handling is null-safe") {
+ val conf = new SparkConf()
+ val stringConf = ConfigBuilder("spark.string").stringConf.withDefault(null)
+ assert(conf.get(stringConf) === null)
+ }
+
+}