aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorMichael Armbrust <michael@databricks.com>2014-09-23 12:27:12 -0700
committerMichael Armbrust <michael@databricks.com>2014-09-23 12:27:12 -0700
commita08153f8a3e7bad81bae330ec4152651da5e7804 (patch)
tree2efed9d7450e5b6a775b9c5c0c2165af779a277a /sql
parent1c62f97e94de96ca3dc6daf778f008176e92888a (diff)
downloadspark-a08153f8a3e7bad81bae330ec4152651da5e7804.tar.gz
spark-a08153f8a3e7bad81bae330ec4152651da5e7804.tar.bz2
spark-a08153f8a3e7bad81bae330ec4152651da5e7804.zip
[SPARK-3646][SQL] Copy SQL configuration from SparkConf when a SQLContext is created.
This will allow us to take advantage of things like the spark.defaults file. Author: Michael Armbrust <michael@databricks.com> Closes #2493 from marmbrus/copySparkConf and squashes the following commits: 0bd1377 [Michael Armbrust] Copy SQL configuration from SparkConf when a SQLContext is created.
Diffstat (limited to 'sql')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala5
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/test/TestSQLContext.scala6
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/SQLConfSuite.scala11
3 files changed, 20 insertions, 2 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
index b245e1a863..a42bedbe6c 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
@@ -75,6 +75,11 @@ class SQLContext(@transient val sparkContext: SparkContext)
protected[sql] def executePlan(plan: LogicalPlan): this.QueryExecution =
new this.QueryExecution { val logical = plan }
+ sparkContext.getConf.getAll.foreach {
+ case (key, value) if key.startsWith("spark.sql") => setConf(key, value)
+ case _ =>
+ }
+
/**
* :: DeveloperApi ::
* Allows catalyst LogicalPlans to be executed as a SchemaRDD. Note that the LogicalPlan
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/test/TestSQLContext.scala b/sql/core/src/main/scala/org/apache/spark/sql/test/TestSQLContext.scala
index 265b67737c..6bb81c76ed 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/test/TestSQLContext.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/test/TestSQLContext.scala
@@ -22,7 +22,11 @@ import org.apache.spark.sql.{SQLConf, SQLContext}
/** A SQLContext that can be used for local testing. */
object TestSQLContext
- extends SQLContext(new SparkContext("local[2]", "TestSQLContext", new SparkConf())) {
+ extends SQLContext(
+ new SparkContext(
+ "local[2]",
+ "TestSQLContext",
+ new SparkConf().set("spark.sql.testkey", "true"))) {
/** Fewer partitions to speed up testing. */
override private[spark] def numShufflePartitions: Int =
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLConfSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLConfSuite.scala
index 584f71b3c1..60701f0e15 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SQLConfSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLConfSuite.scala
@@ -17,16 +17,25 @@
package org.apache.spark.sql
+import org.scalatest.FunSuiteLike
+
import org.apache.spark.sql.test._
/* Implicits */
import TestSQLContext._
-class SQLConfSuite extends QueryTest {
+class SQLConfSuite extends QueryTest with FunSuiteLike {
val testKey = "test.key.0"
val testVal = "test.val.0"
+ test("propagate from spark conf") {
+ // We create a new context here to avoid order dependence with other tests that might call
+ // clear().
+ val newContext = new SQLContext(TestSQLContext.sparkContext)
+ assert(newContext.getConf("spark.sql.testkey", "false") == "true")
+ }
+
test("programmatic ways of basic setting and getting") {
clear()
assert(getAllConfs.size === 0)