aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorSandeep Singh <sandeep@techaddict.me>2016-05-05 14:35:15 -0700
committerAndrew Or <andrew@databricks.com>2016-05-05 14:35:15 -0700
commited6f3f8a5f3a6bf7c53e13c2798de398c9a526a6 (patch)
treecac99af0f11f39aae44ab2dc10ee5d08a4cb25a6 /sql
parent8cba57a75cf9e29b54d97366a039a97a2f305d5d (diff)
downloadspark-ed6f3f8a5f3a6bf7c53e13c2798de398c9a526a6.tar.gz
spark-ed6f3f8a5f3a6bf7c53e13c2798de398c9a526a6.tar.bz2
spark-ed6f3f8a5f3a6bf7c53e13c2798de398c9a526a6.zip
[SPARK-15072][SQL][REPL][EXAMPLES] Remove SparkSession.withHiveSupport
## What changes were proposed in this pull request? Removing the `withHiveSupport` method of `SparkSession`, instead use `enableHiveSupport` ## How was this patch tested? ran tests locally Author: Sandeep Singh <sandeep@techaddict.me> Closes #12851 from techaddict/SPARK-15072.
Diffstat (limited to 'sql')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala13
-rw-r--r--sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala10
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala7
3 files changed, 11 insertions, 19 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
index aa7c335c53..9ed3756628 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
@@ -816,17 +816,4 @@ object SparkSession {
}
}
- /**
- * Create a new [[SparkSession]] with a catalog backed by Hive.
- */
- def withHiveSupport(sc: SparkContext): SparkSession = {
- if (hiveClassesArePresent) {
- sc.conf.set(CATALOG_IMPLEMENTATION.key, "hive")
- new SparkSession(sc)
- } else {
- throw new IllegalArgumentException(
- "Unable to instantiate SparkSession with Hive support because Hive classes are not found.")
- }
- }
-
}
diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala
index 665a44e51a..8de223f444 100644
--- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala
+++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala
@@ -54,13 +54,15 @@ private[hive] object SparkSQLEnv extends Logging {
"spark.kryo.referenceTracking",
maybeKryoReferenceTracking.getOrElse("false"))
- sparkContext = new SparkContext(sparkConf)
- sqlContext = SparkSession.withHiveSupport(sparkContext).wrapped
- val sessionState = sqlContext.sessionState.asInstanceOf[HiveSessionState]
+ val sparkSession = SparkSession.builder.config(sparkConf).enableHiveSupport().getOrCreate()
+ sparkContext = sparkSession.sparkContext
+ sqlContext = sparkSession.wrapped
+
+ val sessionState = sparkSession.sessionState.asInstanceOf[HiveSessionState]
sessionState.metadataHive.setOut(new PrintStream(System.out, true, "UTF-8"))
sessionState.metadataHive.setInfo(new PrintStream(System.err, true, "UTF-8"))
sessionState.metadataHive.setError(new PrintStream(System.err, true, "UTF-8"))
- sqlContext.setConf("spark.sql.hive.version", HiveUtils.hiveExecutionVersion)
+ sparkSession.conf.set("spark.sql.hive.version", HiveUtils.hiveExecutionVersion)
}
}
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
index 77a6a94a67..a320011799 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
@@ -290,8 +290,11 @@ object SetWarehouseLocationTest extends Logging {
conf.set("spark.sql.warehouse.dir", warehouseLocation.toString)
conf.set("hive.metastore.warehouse.dir", hiveWarehouseLocation.toString)
- val sc = new SparkContext(conf)
- val sparkSession = SparkSession.withHiveSupport(sc)
+ val sparkSession = SparkSession.builder
+ .config(conf)
+ .enableHiveSupport()
+ .getOrCreate()
+
val catalog = sparkSession.sessionState.catalog
sparkSession.sql("drop table if exists testLocation")