aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorPatrick Wendell <pwendell@gmail.com>2014-07-09 19:36:38 -0700
committerPatrick Wendell <pwendell@gmail.com>2014-07-09 19:36:38 -0700
commitdd22bc2d570c54ad9853234d7a3f61720d606f39 (patch)
tree8584756d0f0b6cef1e8de2dcbbe04bcf2272b8c5 /sql
parent2e0a037dff2ef3eee45f6d3e2d8eddfdc3edcd5d (diff)
downloadspark-dd22bc2d570c54ad9853234d7a3f61720d606f39.tar.gz
spark-dd22bc2d570c54ad9853234d7a3f61720d606f39.tar.bz2
spark-dd22bc2d570c54ad9853234d7a3f61720d606f39.zip
Revert "[HOTFIX] Synchronize on SQLContext.settings in tests."
This reverts commit d4c30cd9918e18dde2a52909e36eaef6eb5996ab.
Diffstat (limited to 'sql')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala2
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala40
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/SQLConfSuite.scala64
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala68
4 files changed, 83 insertions, 91 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala b/sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala
index b6fb46a3ac..2b787e14f3 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala
@@ -52,7 +52,7 @@ trait SQLConf {
/** ********************** SQLConf functionality methods ************ */
@transient
- protected[sql] val settings = java.util.Collections.synchronizedMap(
+ private val settings = java.util.Collections.synchronizedMap(
new java.util.HashMap[String, String]())
def set(props: Properties): Unit = {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala
index 054b14f8f7..3d7d5eedbe 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala
@@ -39,27 +39,25 @@ class JoinSuite extends QueryTest {
test("plans broadcast hash join, given hints") {
def mkTest(buildSide: BuildSide, leftTable: String, rightTable: String) = {
- TestSQLContext.settings.synchronized {
- TestSQLContext.set("spark.sql.join.broadcastTables",
- s"${if (buildSide == BuildRight) rightTable else leftTable}")
- val rdd = sql( s"""SELECT * FROM $leftTable JOIN $rightTable ON key = a""")
- // Using `sparkPlan` because for relevant patterns in HashJoin to be
- // matched, other strategies need to be applied.
- val physical = rdd.queryExecution.sparkPlan
- val bhj = physical.collect { case j: BroadcastHashJoin if j.buildSide == buildSide => j}
-
- assert(bhj.size === 1, "planner does not pick up hint to generate broadcast hash join")
- checkAnswer(
- rdd,
- Seq(
- (1, "1", 1, 1),
- (1, "1", 1, 2),
- (2, "2", 2, 1),
- (2, "2", 2, 2),
- (3, "3", 3, 1),
- (3, "3", 3, 2)
- ))
- }
+ TestSQLContext.set("spark.sql.join.broadcastTables",
+ s"${if (buildSide == BuildRight) rightTable else leftTable}")
+ val rdd = sql(s"""SELECT * FROM $leftTable JOIN $rightTable ON key = a""")
+ // Using `sparkPlan` because for relevant patterns in HashJoin to be
+ // matched, other strategies need to be applied.
+ val physical = rdd.queryExecution.sparkPlan
+ val bhj = physical.collect { case j: BroadcastHashJoin if j.buildSide == buildSide => j }
+
+ assert(bhj.size === 1, "planner does not pick up hint to generate broadcast hash join")
+ checkAnswer(
+ rdd,
+ Seq(
+ (1, "1", 1, 1),
+ (1, "1", 1, 2),
+ (2, "2", 2, 1),
+ (2, "2", 2, 2),
+ (3, "3", 3, 1),
+ (3, "3", 3, 2)
+ ))
}
mkTest(BuildRight, "testData", "testData2")
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLConfSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLConfSuite.scala
index 93792f698c..08293f7f0c 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SQLConfSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLConfSuite.scala
@@ -28,50 +28,46 @@ class SQLConfSuite extends QueryTest {
val testVal = "test.val.0"
test("programmatic ways of basic setting and getting") {
- TestSQLContext.settings.synchronized {
- clear()
- assert(getOption(testKey).isEmpty)
- assert(getAll.toSet === Set())
+ clear()
+ assert(getOption(testKey).isEmpty)
+ assert(getAll.toSet === Set())
- set(testKey, testVal)
- assert(get(testKey) == testVal)
- assert(get(testKey, testVal + "_") == testVal)
- assert(getOption(testKey) == Some(testVal))
- assert(contains(testKey))
+ set(testKey, testVal)
+ assert(get(testKey) == testVal)
+ assert(get(testKey, testVal + "_") == testVal)
+ assert(getOption(testKey) == Some(testVal))
+ assert(contains(testKey))
- // Tests SQLConf as accessed from a SQLContext is mutable after
- // the latter is initialized, unlike SparkConf inside a SparkContext.
- assert(TestSQLContext.get(testKey) == testVal)
- assert(TestSQLContext.get(testKey, testVal + "_") == testVal)
- assert(TestSQLContext.getOption(testKey) == Some(testVal))
- assert(TestSQLContext.contains(testKey))
+ // Tests SQLConf as accessed from a SQLContext is mutable after
+ // the latter is initialized, unlike SparkConf inside a SparkContext.
+ assert(TestSQLContext.get(testKey) == testVal)
+ assert(TestSQLContext.get(testKey, testVal + "_") == testVal)
+ assert(TestSQLContext.getOption(testKey) == Some(testVal))
+ assert(TestSQLContext.contains(testKey))
- clear()
- }
+ clear()
}
test("parse SQL set commands") {
- TestSQLContext.settings.synchronized {
- clear()
- sql(s"set $testKey=$testVal")
- assert(get(testKey, testVal + "_") == testVal)
- assert(TestSQLContext.get(testKey, testVal + "_") == testVal)
+ clear()
+ sql(s"set $testKey=$testVal")
+ assert(get(testKey, testVal + "_") == testVal)
+ assert(TestSQLContext.get(testKey, testVal + "_") == testVal)
- sql("set mapred.reduce.tasks=20")
- assert(get("mapred.reduce.tasks", "0") == "20")
- sql("set mapred.reduce.tasks = 40")
- assert(get("mapred.reduce.tasks", "0") == "40")
+ sql("set mapred.reduce.tasks=20")
+ assert(get("mapred.reduce.tasks", "0") == "20")
+ sql("set mapred.reduce.tasks = 40")
+ assert(get("mapred.reduce.tasks", "0") == "40")
- val key = "spark.sql.key"
- val vs = "val0,val_1,val2.3,my_table"
- sql(s"set $key=$vs")
- assert(get(key, "0") == vs)
+ val key = "spark.sql.key"
+ val vs = "val0,val_1,val2.3,my_table"
+ sql(s"set $key=$vs")
+ assert(get(key, "0") == vs)
- sql(s"set $key=")
- assert(get(key, "0") == "")
+ sql(s"set $key=")
+ assert(get(key, "0") == "")
- clear()
- }
+ clear()
}
}
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
index fa1f32f8a4..0743cfe8cf 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
@@ -397,40 +397,38 @@ class SQLQuerySuite extends QueryTest {
}
test("SET commands semantics using sql()") {
- TestSQLContext.settings.synchronized {
- clear()
- val testKey = "test.key.0"
- val testVal = "test.val.0"
- val nonexistentKey = "nonexistent"
-
- // "set" itself returns all config variables currently specified in SQLConf.
- assert(sql("SET").collect().size == 0)
-
- // "set key=val"
- sql(s"SET $testKey=$testVal")
- checkAnswer(
- sql("SET"),
- Seq(Seq(testKey, testVal))
- )
-
- sql(s"SET ${testKey + testKey}=${testVal + testVal}")
- checkAnswer(
- sql("set"),
- Seq(
- Seq(testKey, testVal),
- Seq(testKey + testKey, testVal + testVal))
- )
-
- // "set key"
- checkAnswer(
- sql(s"SET $testKey"),
- Seq(Seq(testKey, testVal))
- )
- checkAnswer(
- sql(s"SET $nonexistentKey"),
- Seq(Seq(nonexistentKey, "<undefined>"))
- )
- clear()
- }
+ clear()
+ val testKey = "test.key.0"
+ val testVal = "test.val.0"
+ val nonexistentKey = "nonexistent"
+
+ // "set" itself returns all config variables currently specified in SQLConf.
+ assert(sql("SET").collect().size == 0)
+
+ // "set key=val"
+ sql(s"SET $testKey=$testVal")
+ checkAnswer(
+ sql("SET"),
+ Seq(Seq(testKey, testVal))
+ )
+
+ sql(s"SET ${testKey + testKey}=${testVal + testVal}")
+ checkAnswer(
+ sql("set"),
+ Seq(
+ Seq(testKey, testVal),
+ Seq(testKey + testKey, testVal + testVal))
+ )
+
+ // "set key"
+ checkAnswer(
+ sql(s"SET $testKey"),
+ Seq(Seq(testKey, testVal))
+ )
+ checkAnswer(
+ sql(s"SET $nonexistentKey"),
+ Seq(Seq(nonexistentKey, "<undefined>"))
+ )
+ clear()
}
}