aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala2
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala40
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/SQLConfSuite.scala64
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala68
4 files changed, 91 insertions, 83 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala b/sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala
index 2fe7f94663..3b5abab969 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala
@@ -50,7 +50,7 @@ trait SQLConf {
/** ********************** SQLConf functionality methods ************ */
@transient
- private val settings = java.util.Collections.synchronizedMap(
+ protected[sql] val settings = java.util.Collections.synchronizedMap(
new java.util.HashMap[String, String]())
def set(props: Properties): Unit = {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala
index 3d7d5eedbe..054b14f8f7 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala
@@ -39,25 +39,27 @@ class JoinSuite extends QueryTest {
test("plans broadcast hash join, given hints") {
def mkTest(buildSide: BuildSide, leftTable: String, rightTable: String) = {
- TestSQLContext.set("spark.sql.join.broadcastTables",
- s"${if (buildSide == BuildRight) rightTable else leftTable}")
- val rdd = sql(s"""SELECT * FROM $leftTable JOIN $rightTable ON key = a""")
- // Using `sparkPlan` because for relevant patterns in HashJoin to be
- // matched, other strategies need to be applied.
- val physical = rdd.queryExecution.sparkPlan
- val bhj = physical.collect { case j: BroadcastHashJoin if j.buildSide == buildSide => j }
-
- assert(bhj.size === 1, "planner does not pick up hint to generate broadcast hash join")
- checkAnswer(
- rdd,
- Seq(
- (1, "1", 1, 1),
- (1, "1", 1, 2),
- (2, "2", 2, 1),
- (2, "2", 2, 2),
- (3, "3", 3, 1),
- (3, "3", 3, 2)
- ))
+ TestSQLContext.settings.synchronized {
+ TestSQLContext.set("spark.sql.join.broadcastTables",
+ s"${if (buildSide == BuildRight) rightTable else leftTable}")
+ val rdd = sql( s"""SELECT * FROM $leftTable JOIN $rightTable ON key = a""")
+ // Using `sparkPlan` because for relevant patterns in HashJoin to be
+ // matched, other strategies need to be applied.
+ val physical = rdd.queryExecution.sparkPlan
+ val bhj = physical.collect { case j: BroadcastHashJoin if j.buildSide == buildSide => j}
+
+ assert(bhj.size === 1, "planner does not pick up hint to generate broadcast hash join")
+ checkAnswer(
+ rdd,
+ Seq(
+ (1, "1", 1, 1),
+ (1, "1", 1, 2),
+ (2, "2", 2, 1),
+ (2, "2", 2, 2),
+ (3, "3", 3, 1),
+ (3, "3", 3, 2)
+ ))
+ }
}
mkTest(BuildRight, "testData", "testData2")
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLConfSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLConfSuite.scala
index 08293f7f0c..93792f698c 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SQLConfSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLConfSuite.scala
@@ -28,46 +28,50 @@ class SQLConfSuite extends QueryTest {
val testVal = "test.val.0"
test("programmatic ways of basic setting and getting") {
- clear()
- assert(getOption(testKey).isEmpty)
- assert(getAll.toSet === Set())
+ TestSQLContext.settings.synchronized {
+ clear()
+ assert(getOption(testKey).isEmpty)
+ assert(getAll.toSet === Set())
- set(testKey, testVal)
- assert(get(testKey) == testVal)
- assert(get(testKey, testVal + "_") == testVal)
- assert(getOption(testKey) == Some(testVal))
- assert(contains(testKey))
+ set(testKey, testVal)
+ assert(get(testKey) == testVal)
+ assert(get(testKey, testVal + "_") == testVal)
+ assert(getOption(testKey) == Some(testVal))
+ assert(contains(testKey))
- // Tests SQLConf as accessed from a SQLContext is mutable after
- // the latter is initialized, unlike SparkConf inside a SparkContext.
- assert(TestSQLContext.get(testKey) == testVal)
- assert(TestSQLContext.get(testKey, testVal + "_") == testVal)
- assert(TestSQLContext.getOption(testKey) == Some(testVal))
- assert(TestSQLContext.contains(testKey))
+ // Tests SQLConf as accessed from a SQLContext is mutable after
+ // the latter is initialized, unlike SparkConf inside a SparkContext.
+ assert(TestSQLContext.get(testKey) == testVal)
+ assert(TestSQLContext.get(testKey, testVal + "_") == testVal)
+ assert(TestSQLContext.getOption(testKey) == Some(testVal))
+ assert(TestSQLContext.contains(testKey))
- clear()
+ clear()
+ }
}
test("parse SQL set commands") {
- clear()
- sql(s"set $testKey=$testVal")
- assert(get(testKey, testVal + "_") == testVal)
- assert(TestSQLContext.get(testKey, testVal + "_") == testVal)
+ TestSQLContext.settings.synchronized {
+ clear()
+ sql(s"set $testKey=$testVal")
+ assert(get(testKey, testVal + "_") == testVal)
+ assert(TestSQLContext.get(testKey, testVal + "_") == testVal)
- sql("set mapred.reduce.tasks=20")
- assert(get("mapred.reduce.tasks", "0") == "20")
- sql("set mapred.reduce.tasks = 40")
- assert(get("mapred.reduce.tasks", "0") == "40")
+ sql("set mapred.reduce.tasks=20")
+ assert(get("mapred.reduce.tasks", "0") == "20")
+ sql("set mapred.reduce.tasks = 40")
+ assert(get("mapred.reduce.tasks", "0") == "40")
- val key = "spark.sql.key"
- val vs = "val0,val_1,val2.3,my_table"
- sql(s"set $key=$vs")
- assert(get(key, "0") == vs)
+ val key = "spark.sql.key"
+ val vs = "val0,val_1,val2.3,my_table"
+ sql(s"set $key=$vs")
+ assert(get(key, "0") == vs)
- sql(s"set $key=")
- assert(get(key, "0") == "")
+ sql(s"set $key=")
+ assert(get(key, "0") == "")
- clear()
+ clear()
+ }
}
}
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
index bf7fafe952..2c1cb18670 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
@@ -372,38 +372,40 @@ class SQLQuerySuite extends QueryTest {
}
test("SET commands semantics using sql()") {
- clear()
- val testKey = "test.key.0"
- val testVal = "test.val.0"
- val nonexistentKey = "nonexistent"
-
- // "set" itself returns all config variables currently specified in SQLConf.
- assert(sql("SET").collect().size == 0)
-
- // "set key=val"
- sql(s"SET $testKey=$testVal")
- checkAnswer(
- sql("SET"),
- Seq(Seq(testKey, testVal))
- )
-
- sql(s"SET ${testKey + testKey}=${testVal + testVal}")
- checkAnswer(
- sql("set"),
- Seq(
- Seq(testKey, testVal),
- Seq(testKey + testKey, testVal + testVal))
- )
-
- // "set key"
- checkAnswer(
- sql(s"SET $testKey"),
- Seq(Seq(testKey, testVal))
- )
- checkAnswer(
- sql(s"SET $nonexistentKey"),
- Seq(Seq(nonexistentKey, "<undefined>"))
- )
- clear()
+ TestSQLContext.settings.synchronized {
+ clear()
+ val testKey = "test.key.0"
+ val testVal = "test.val.0"
+ val nonexistentKey = "nonexistent"
+
+ // "set" itself returns all config variables currently specified in SQLConf.
+ assert(sql("SET").collect().size == 0)
+
+ // "set key=val"
+ sql(s"SET $testKey=$testVal")
+ checkAnswer(
+ sql("SET"),
+ Seq(Seq(testKey, testVal))
+ )
+
+ sql(s"SET ${testKey + testKey}=${testVal + testVal}")
+ checkAnswer(
+ sql("set"),
+ Seq(
+ Seq(testKey, testVal),
+ Seq(testKey + testKey, testVal + testVal))
+ )
+
+ // "set key"
+ checkAnswer(
+ sql(s"SET $testKey"),
+ Seq(Seq(testKey, testVal))
+ )
+ checkAnswer(
+ sql(s"SET $nonexistentKey"),
+ Seq(Seq(nonexistentKey, "<undefined>"))
+ )
+ clear()
+ }
}
}