aboutsummaryrefslogtreecommitdiff
path: root/sql/hive
diff options
context:
space:
mode:
authorReynold Xin <rxin@databricks.com>2015-06-01 21:35:55 -0700
committerReynold Xin <rxin@databricks.com>2015-06-01 21:35:55 -0700
commit75dda33f3e037d550c4ab55d438661070804c717 (patch)
treee6a91fa1347d5050a4d4f6a9bbb502813fdd4708 /sql/hive
parent91f6be87bc5cff41ca7a9cca9fdcc4678a4e7086 (diff)
downloadspark-75dda33f3e037d550c4ab55d438661070804c717.tar.gz
spark-75dda33f3e037d550c4ab55d438661070804c717.tar.bz2
spark-75dda33f3e037d550c4ab55d438661070804c717.zip
Revert "[SPARK-8020] Spark SQL in spark-defaults.conf make metadataHive get constructed too early"
This reverts commit 91f6be87bc5cff41ca7a9cca9fdcc4678a4e7086.
Diffstat (limited to 'sql/hive')
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala45
1 files changed, 1 insertions, 44 deletions
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala
index deceb67d2b..7eb4842726 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala
@@ -17,8 +17,7 @@
package org.apache.spark.sql.hive.client
-import org.apache.spark.sql.hive.HiveContext
-import org.apache.spark.{Logging, SparkConf, SparkContext, SparkFunSuite}
+import org.apache.spark.{Logging, SparkFunSuite}
import org.apache.spark.sql.catalyst.util.quietly
import org.apache.spark.util.Utils
@@ -38,48 +37,6 @@ class VersionsSuite extends SparkFunSuite with Logging {
"hive.metastore.warehouse.dir" -> warehousePath.toString)
}
- test("SPARK-8020: successfully create a HiveContext with metastore settings in Spark conf.") {
- val sparkConf =
- new SparkConf() {
- // We are not really clone it. We need to keep the custom getAll.
- override def clone: SparkConf = this
-
- override def getAll: Array[(String, String)] = {
- val allSettings = super.getAll
- val metastoreVersion = get("spark.sql.hive.metastore.version")
- val metastoreJars = get("spark.sql.hive.metastore.jars")
-
- val others = allSettings.filterNot { case (key, _) =>
- key == "spark.sql.hive.metastore.version" || key == "spark.sql.hive.metastore.jars"
- }
-
- // Put metastore.version to the first one. It is needed to trigger the exception
- // caused by SPARK-8020. Other problems triggered by SPARK-8020
- // (e.g. using Hive 0.13.1's metastore client to connect to the a 0.12 metastore)
- // are not easy to test.
- Array(
- ("spark.sql.hive.metastore.version" -> metastoreVersion),
- ("spark.sql.hive.metastore.jars" -> metastoreJars)) ++ others
- }
- }
- sparkConf
- .set("spark.sql.hive.metastore.version", "12")
- .set("spark.sql.hive.metastore.jars", "maven")
-
- val hiveContext = new HiveContext(
- new SparkContext(
- "local[2]",
- "TestSQLContextInVersionsSuite",
- sparkConf)) {
-
- protected override def configure(): Map[String, String] = buildConf
-
- }
-
- // Make sure all metastore related lazy vals got created.
- hiveContext.tables()
- }
-
test("success sanity check") {
val badClient = IsolatedClientLoader.forVersion("13", buildConf()).client
val db = new HiveDatabase("default", "")