aboutsummaryrefslogtreecommitdiff
path: root/sql/core/src/main
diff options
context:
space:
mode:
authorMichael Armbrust <michael@databricks.com>2014-09-23 12:27:12 -0700
committerMichael Armbrust <michael@databricks.com>2014-09-23 12:27:12 -0700
commita08153f8a3e7bad81bae330ec4152651da5e7804 (patch)
tree2efed9d7450e5b6a775b9c5c0c2165af779a277a /sql/core/src/main
parent1c62f97e94de96ca3dc6daf778f008176e92888a (diff)
downloadspark-a08153f8a3e7bad81bae330ec4152651da5e7804.tar.gz
spark-a08153f8a3e7bad81bae330ec4152651da5e7804.tar.bz2
spark-a08153f8a3e7bad81bae330ec4152651da5e7804.zip
[SPARK-3646][SQL] Copy SQL configuration from SparkConf when a SQLContext is created.
This will allow us to take advantage of things like the spark.defaults file. Author: Michael Armbrust <michael@databricks.com> Closes #2493 from marmbrus/copySparkConf and squashes the following commits: 0bd1377 [Michael Armbrust] Copy SQL configuration from SparkConf when a SQLContext is created.
Diffstat (limited to 'sql/core/src/main')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala5
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/test/TestSQLContext.scala6
2 files changed, 10 insertions, 1 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
index b245e1a863..a42bedbe6c 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
@@ -75,6 +75,11 @@ class SQLContext(@transient val sparkContext: SparkContext)
protected[sql] def executePlan(plan: LogicalPlan): this.QueryExecution =
new this.QueryExecution { val logical = plan }
+ sparkContext.getConf.getAll.foreach {
+ case (key, value) if key.startsWith("spark.sql") => setConf(key, value)
+ case _ =>
+ }
+
/**
* :: DeveloperApi ::
* Allows catalyst LogicalPlans to be executed as a SchemaRDD. Note that the LogicalPlan
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/test/TestSQLContext.scala b/sql/core/src/main/scala/org/apache/spark/sql/test/TestSQLContext.scala
index 265b67737c..6bb81c76ed 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/test/TestSQLContext.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/test/TestSQLContext.scala
@@ -22,7 +22,11 @@ import org.apache.spark.sql.{SQLConf, SQLContext}
/** A SQLContext that can be used for local testing. */
object TestSQLContext
- extends SQLContext(new SparkContext("local[2]", "TestSQLContext", new SparkConf())) {
+ extends SQLContext(
+ new SparkContext(
+ "local[2]",
+ "TestSQLContext",
+ new SparkConf().set("spark.sql.testkey", "true"))) {
/** Fewer partitions to speed up testing. */
override private[spark] def numShufflePartitions: Int =