aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorYin Huai <yhuai@databricks.com>2015-06-02 00:16:56 -0700
committerYin Huai <yhuai@databricks.com>2015-06-02 00:16:56 -0700
commit7b7f7b6c6fd903e2ecfc886d29eaa9df58adcfc3 (patch)
tree3724fda47c27c881c82905d0d6955944ddd703af /sql
parentbcb47ad7718b843fbd25cd1e228a7b7e6e5b8686 (diff)
downloadspark-7b7f7b6c6fd903e2ecfc886d29eaa9df58adcfc3.tar.gz
spark-7b7f7b6c6fd903e2ecfc886d29eaa9df58adcfc3.tar.bz2
spark-7b7f7b6c6fd903e2ecfc886d29eaa9df58adcfc3.zip
[SPARK-8020] [SQL] Spark SQL conf in spark-defaults.conf make metadataHive get constructed too early
https://issues.apache.org/jira/browse/SPARK-8020 Author: Yin Huai <yhuai@databricks.com> Closes #6571 from yhuai/SPARK-8020-1 and squashes the following commits: 0398f5b [Yin Huai] First populate the SQLConf and then construct executionHive and metadataHive.
Diffstat (limited to 'sql')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala25
1 files changed, 22 insertions, 3 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
index 7384b24c50..91e6385dec 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
@@ -182,9 +182,28 @@ class SQLContext(@transient val sparkContext: SparkContext)
conf.dialect
}
- sparkContext.getConf.getAll.foreach {
- case (key, value) if key.startsWith("spark.sql") => setConf(key, value)
- case _ =>
+ {
+ // We extract spark sql settings from SparkContext's conf and put them to
+ // Spark SQL's conf.
+ // First, we populate the SQLConf (conf). So, we can make sure that other values using
+ // those settings in their construction can get the correct settings.
+ // For example, metadataHive in HiveContext may need both spark.sql.hive.metastore.version
+ // and spark.sql.hive.metastore.jars to get correctly constructed.
+ val properties = new Properties
+ sparkContext.getConf.getAll.foreach {
+ case (key, value) if key.startsWith("spark.sql") => properties.setProperty(key, value)
+ case _ =>
+ }
+ // We directly put those settings to conf to avoid of calling setConf, which may have
+ // side-effects. For example, in HiveContext, setConf may cause executionHive and metadataHive
+ // get constructed. If we call setConf directly, the constructed metadataHive may have
+ // wrong settings, or the construction may fail.
+ conf.setConf(properties)
+ // After we have populated SQLConf, we call setConf to populate other confs in the subclass
+ // (e.g. hiveconf in HiveContext).
+ properties.foreach {
+ case (key, value) => setConf(key, value)
+ }
}
@transient