aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorReynold Xin <rxin@databricks.com>2015-06-01 21:35:55 -0700
committerReynold Xin <rxin@databricks.com>2015-06-01 21:35:55 -0700
commit75dda33f3e037d550c4ab55d438661070804c717 (patch)
treee6a91fa1347d5050a4d4f6a9bbb502813fdd4708 /sql
parent91f6be87bc5cff41ca7a9cca9fdcc4678a4e7086 (diff)
downloadspark-75dda33f3e037d550c4ab55d438661070804c717.tar.gz
spark-75dda33f3e037d550c4ab55d438661070804c717.tar.bz2
spark-75dda33f3e037d550c4ab55d438661070804c717.zip
Revert "[SPARK-8020] Spark SQL in spark-defaults.conf make metadataHive get constructed too early"
This reverts commit 91f6be87bc5cff41ca7a9cca9fdcc4678a4e7086.
Diffstat (limited to 'sql')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala25
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala45
2 files changed, 4 insertions, 66 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
index 91e6385dec..7384b24c50 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
@@ -182,28 +182,9 @@ class SQLContext(@transient val sparkContext: SparkContext)
conf.dialect
}
- {
- // We extract spark sql settings from SparkContext's conf and put them to
- // Spark SQL's conf.
- // First, we populate the SQLConf (conf). So, we can make sure that other values using
- // those settings in their construction can get the correct settings.
- // For example, metadataHive in HiveContext may need both spark.sql.hive.metastore.version
- // and spark.sql.hive.metastore.jars to get correctly constructed.
- val properties = new Properties
- sparkContext.getConf.getAll.foreach {
- case (key, value) if key.startsWith("spark.sql") => properties.setProperty(key, value)
- case _ =>
- }
- // We directly put those settings to conf to avoid of calling setConf, which may have
- // side-effects. For example, in HiveContext, setConf may cause executionHive and metadataHive
- // get constructed. If we call setConf directly, the constructed metadataHive may have
- // wrong settings, or the construction may fail.
- conf.setConf(properties)
- // After we have populated SQLConf, we call setConf to populate other confs in the subclass
- // (e.g. hiveconf in HiveContext).
- properties.foreach {
- case (key, value) => setConf(key, value)
- }
+ sparkContext.getConf.getAll.foreach {
+ case (key, value) if key.startsWith("spark.sql") => setConf(key, value)
+ case _ =>
}
@transient
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala
index deceb67d2b..7eb4842726 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala
@@ -17,8 +17,7 @@
package org.apache.spark.sql.hive.client
-import org.apache.spark.sql.hive.HiveContext
-import org.apache.spark.{Logging, SparkConf, SparkContext, SparkFunSuite}
+import org.apache.spark.{Logging, SparkFunSuite}
import org.apache.spark.sql.catalyst.util.quietly
import org.apache.spark.util.Utils
@@ -38,48 +37,6 @@ class VersionsSuite extends SparkFunSuite with Logging {
"hive.metastore.warehouse.dir" -> warehousePath.toString)
}
- test("SPARK-8020: successfully create a HiveContext with metastore settings in Spark conf.") {
- val sparkConf =
- new SparkConf() {
- // We are not really clone it. We need to keep the custom getAll.
- override def clone: SparkConf = this
-
- override def getAll: Array[(String, String)] = {
- val allSettings = super.getAll
- val metastoreVersion = get("spark.sql.hive.metastore.version")
- val metastoreJars = get("spark.sql.hive.metastore.jars")
-
- val others = allSettings.filterNot { case (key, _) =>
- key == "spark.sql.hive.metastore.version" || key == "spark.sql.hive.metastore.jars"
- }
-
- // Put metastore.version to the first one. It is needed to trigger the exception
- // caused by SPARK-8020. Other problems triggered by SPARK-8020
- // (e.g. using Hive 0.13.1's metastore client to connect to the a 0.12 metastore)
- // are not easy to test.
- Array(
- ("spark.sql.hive.metastore.version" -> metastoreVersion),
- ("spark.sql.hive.metastore.jars" -> metastoreJars)) ++ others
- }
- }
- sparkConf
- .set("spark.sql.hive.metastore.version", "12")
- .set("spark.sql.hive.metastore.jars", "maven")
-
- val hiveContext = new HiveContext(
- new SparkContext(
- "local[2]",
- "TestSQLContextInVersionsSuite",
- sparkConf)) {
-
- protected override def configure(): Map[String, String] = buildConf
-
- }
-
- // Make sure all metastore related lazy vals got created.
- hiveContext.tables()
- }
-
test("success sanity check") {
val badClient = IsolatedClientLoader.forVersion("13", buildConf()).client
val db = new HiveDatabase("default", "")