aboutsummaryrefslogtreecommitdiff
path: root/sql/core/src/test
diff options
context:
space:
mode:
authorReynold Xin <rxin@databricks.com>2017-02-07 18:55:19 +0100
committerReynold Xin <rxin@databricks.com>2017-02-07 18:55:19 +0100
commitb7277e03d1038e2a19495c0ef7707e2d77937ccf (patch)
tree2472f54a41aaa8fd47e27ae1641b57c1f0477141 /sql/core/src/test
parent7a7ce272fe9a703f58b0180a9d2001ecb5c4b8db (diff)
downloadspark-b7277e03d1038e2a19495c0ef7707e2d77937ccf.tar.gz
spark-b7277e03d1038e2a19495c0ef7707e2d77937ccf.tar.bz2
spark-b7277e03d1038e2a19495c0ef7707e2d77937ccf.zip
[SPARK-19495][SQL] Make SQLConf slightly more extensible
## What changes were proposed in this pull request? This pull request makes SQLConf slightly more extensible by removing the visibility limitations on the build* functions. ## How was this patch tested? N/A - there are no logic changes and everything should be covered by existing unit tests. Author: Reynold Xin <rxin@databricks.com> Closes #16835 from rxin/SPARK-19495.
Diffstat (limited to 'sql/core/src/test')
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala2
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfEntrySuite.scala20
2 files changed, 11 insertions, 11 deletions
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
index a77f920598..9c95b12795 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
@@ -999,7 +999,7 @@ class SQLQuerySuite extends QueryTest with SharedSQLContext {
test("SPARK-19218 `SET -v` should not fail with null value configuration") {
import SQLConf._
- val confEntry = SQLConfigBuilder("spark.test").doc("doc").stringConf.createWithDefault(null)
+ val confEntry = buildConf("spark.test").doc("doc").stringConf.createWithDefault(null)
try {
val result = sql("SET -v").collect()
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfEntrySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfEntrySuite.scala
index 95bfd05c1f..6c12f0ff7d 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfEntrySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfEntrySuite.scala
@@ -26,7 +26,7 @@ class SQLConfEntrySuite extends SparkFunSuite {
test("intConf") {
val key = "spark.sql.SQLConfEntrySuite.int"
- val confEntry = SQLConfigBuilder(key).intConf.createWithDefault(1)
+ val confEntry = buildConf(key).intConf.createWithDefault(1)
assert(conf.getConf(confEntry, 5) === 5)
conf.setConf(confEntry, 10)
@@ -45,7 +45,7 @@ class SQLConfEntrySuite extends SparkFunSuite {
test("longConf") {
val key = "spark.sql.SQLConfEntrySuite.long"
- val confEntry = SQLConfigBuilder(key).longConf.createWithDefault(1L)
+ val confEntry = buildConf(key).longConf.createWithDefault(1L)
assert(conf.getConf(confEntry, 5L) === 5L)
conf.setConf(confEntry, 10L)
@@ -64,7 +64,7 @@ class SQLConfEntrySuite extends SparkFunSuite {
test("booleanConf") {
val key = "spark.sql.SQLConfEntrySuite.boolean"
- val confEntry = SQLConfigBuilder(key).booleanConf.createWithDefault(true)
+ val confEntry = buildConf(key).booleanConf.createWithDefault(true)
assert(conf.getConf(confEntry, false) === false)
conf.setConf(confEntry, true)
@@ -83,7 +83,7 @@ class SQLConfEntrySuite extends SparkFunSuite {
test("doubleConf") {
val key = "spark.sql.SQLConfEntrySuite.double"
- val confEntry = SQLConfigBuilder(key).doubleConf.createWithDefault(1d)
+ val confEntry = buildConf(key).doubleConf.createWithDefault(1d)
assert(conf.getConf(confEntry, 5.0) === 5.0)
conf.setConf(confEntry, 10.0)
@@ -102,7 +102,7 @@ class SQLConfEntrySuite extends SparkFunSuite {
test("stringConf") {
val key = "spark.sql.SQLConfEntrySuite.string"
- val confEntry = SQLConfigBuilder(key).stringConf.createWithDefault(null)
+ val confEntry = buildConf(key).stringConf.createWithDefault(null)
assert(conf.getConf(confEntry, "abc") === "abc")
conf.setConf(confEntry, "abcd")
@@ -116,7 +116,7 @@ class SQLConfEntrySuite extends SparkFunSuite {
test("enumConf") {
val key = "spark.sql.SQLConfEntrySuite.enum"
- val confEntry = SQLConfigBuilder(key)
+ val confEntry = buildConf(key)
.stringConf
.checkValues(Set("a", "b", "c"))
.createWithDefault("a")
@@ -138,7 +138,7 @@ class SQLConfEntrySuite extends SparkFunSuite {
test("stringSeqConf") {
val key = "spark.sql.SQLConfEntrySuite.stringSeq"
- val confEntry = SQLConfigBuilder(key)
+ val confEntry = buildConf(key)
.stringConf
.toSequence
.createWithDefault(Nil)
@@ -155,7 +155,7 @@ class SQLConfEntrySuite extends SparkFunSuite {
test("optionalConf") {
val key = "spark.sql.SQLConfEntrySuite.optional"
- val confEntry = SQLConfigBuilder(key)
+ val confEntry = buildConf(key)
.stringConf
.createOptional
@@ -166,9 +166,9 @@ class SQLConfEntrySuite extends SparkFunSuite {
test("duplicate entry") {
val key = "spark.sql.SQLConfEntrySuite.duplicate"
- SQLConfigBuilder(key).stringConf.createOptional
+ buildConf(key).stringConf.createOptional
intercept[IllegalArgumentException] {
- SQLConfigBuilder(key).stringConf.createOptional
+ buildConf(key).stringConf.createOptional
}
}
}