aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala5
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/test/TestSQLContext.scala6
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/SQLConfSuite.scala11
3 files changed, 20 insertions, 2 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
index b245e1a863..a42bedbe6c 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
@@ -75,6 +75,11 @@ class SQLContext(@transient val sparkContext: SparkContext)
protected[sql] def executePlan(plan: LogicalPlan): this.QueryExecution =
new this.QueryExecution { val logical = plan }
+ sparkContext.getConf.getAll.foreach {
+ case (key, value) if key.startsWith("spark.sql") => setConf(key, value)
+ case _ =>
+ }
+
/**
* :: DeveloperApi ::
* Allows catalyst LogicalPlans to be executed as a SchemaRDD. Note that the LogicalPlan
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/test/TestSQLContext.scala b/sql/core/src/main/scala/org/apache/spark/sql/test/TestSQLContext.scala
index 265b67737c..6bb81c76ed 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/test/TestSQLContext.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/test/TestSQLContext.scala
@@ -22,7 +22,11 @@ import org.apache.spark.sql.{SQLConf, SQLContext}
/** A SQLContext that can be used for local testing. */
object TestSQLContext
- extends SQLContext(new SparkContext("local[2]", "TestSQLContext", new SparkConf())) {
+ extends SQLContext(
+ new SparkContext(
+ "local[2]",
+ "TestSQLContext",
+ new SparkConf().set("spark.sql.testkey", "true"))) {
/** Fewer partitions to speed up testing. */
override private[spark] def numShufflePartitions: Int =
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLConfSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLConfSuite.scala
index 584f71b3c1..60701f0e15 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SQLConfSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLConfSuite.scala
@@ -17,16 +17,25 @@
package org.apache.spark.sql
+import org.scalatest.FunSuiteLike
+
import org.apache.spark.sql.test._
/* Implicits */
import TestSQLContext._
-class SQLConfSuite extends QueryTest {
+class SQLConfSuite extends QueryTest with FunSuiteLike {
val testKey = "test.key.0"
val testVal = "test.val.0"
+ test("propagate from spark conf") {
+ // We create a new context here to avoid order dependence with other tests that might call
+ // clear().
+ val newContext = new SQLContext(TestSQLContext.sparkContext)
+ assert(newContext.getConf("spark.sql.testkey", "false") == "true")
+ }
+
test("programmatic ways of basic setting and getting") {
clear()
assert(getAllConfs.size === 0)