aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/sql/hive/HiveFromSpark.scala14
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala13
-rw-r--r--sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala10
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala7
4 files changed, 20 insertions, 24 deletions
diff --git a/examples/src/main/scala/org/apache/spark/examples/sql/hive/HiveFromSpark.scala b/examples/src/main/scala/org/apache/spark/examples/sql/hive/HiveFromSpark.scala
index ff33091621..a15cf5ded0 100644
--- a/examples/src/main/scala/org/apache/spark/examples/sql/hive/HiveFromSpark.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/sql/hive/HiveFromSpark.scala
@@ -36,15 +36,19 @@ object HiveFromSpark {
def main(args: Array[String]) {
val sparkConf = new SparkConf().setAppName("HiveFromSpark")
- val sc = new SparkContext(sparkConf)
// A hive context adds support for finding tables in the MetaStore and writing queries
// using HiveQL. Users who do not have an existing Hive deployment can still create a
// HiveContext. When not configured by the hive-site.xml, the context automatically
// creates metastore_db and warehouse in the current directory.
- val sparkSession = SparkSession.withHiveSupport(sc)
- import sparkSession.implicits._
- import sparkSession.sql
+ val spark = SparkSession.builder
+ .config(sparkConf)
+ .enableHiveSupport()
+ .getOrCreate()
+ val sc = spark.sparkContext
+
+ import spark.implicits._
+ import spark.sql
sql("CREATE TABLE IF NOT EXISTS src (key INT, value STRING)")
sql(s"LOAD DATA LOCAL INPATH '${kv1File.getAbsolutePath}' INTO TABLE src")
@@ -74,7 +78,7 @@ object HiveFromSpark {
println("Result of SELECT *:")
sql("SELECT * FROM records r JOIN src s ON r.key = s.key").collect().foreach(println)
- sc.stop()
+ spark.stop()
}
}
// scalastyle:on println
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
index aa7c335c53..9ed3756628 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
@@ -816,17 +816,4 @@ object SparkSession {
}
}
- /**
- * Create a new [[SparkSession]] with a catalog backed by Hive.
- */
- def withHiveSupport(sc: SparkContext): SparkSession = {
- if (hiveClassesArePresent) {
- sc.conf.set(CATALOG_IMPLEMENTATION.key, "hive")
- new SparkSession(sc)
- } else {
- throw new IllegalArgumentException(
- "Unable to instantiate SparkSession with Hive support because Hive classes are not found.")
- }
- }
-
}
diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala
index 665a44e51a..8de223f444 100644
--- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala
+++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala
@@ -54,13 +54,15 @@ private[hive] object SparkSQLEnv extends Logging {
"spark.kryo.referenceTracking",
maybeKryoReferenceTracking.getOrElse("false"))
- sparkContext = new SparkContext(sparkConf)
- sqlContext = SparkSession.withHiveSupport(sparkContext).wrapped
- val sessionState = sqlContext.sessionState.asInstanceOf[HiveSessionState]
+ val sparkSession = SparkSession.builder.config(sparkConf).enableHiveSupport().getOrCreate()
+ sparkContext = sparkSession.sparkContext
+ sqlContext = sparkSession.wrapped
+
+ val sessionState = sparkSession.sessionState.asInstanceOf[HiveSessionState]
sessionState.metadataHive.setOut(new PrintStream(System.out, true, "UTF-8"))
sessionState.metadataHive.setInfo(new PrintStream(System.err, true, "UTF-8"))
sessionState.metadataHive.setError(new PrintStream(System.err, true, "UTF-8"))
- sqlContext.setConf("spark.sql.hive.version", HiveUtils.hiveExecutionVersion)
+ sparkSession.conf.set("spark.sql.hive.version", HiveUtils.hiveExecutionVersion)
}
}
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
index 77a6a94a67..a320011799 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
@@ -290,8 +290,11 @@ object SetWarehouseLocationTest extends Logging {
conf.set("spark.sql.warehouse.dir", warehouseLocation.toString)
conf.set("hive.metastore.warehouse.dir", hiveWarehouseLocation.toString)
- val sc = new SparkContext(conf)
- val sparkSession = SparkSession.withHiveSupport(sc)
+ val sparkSession = SparkSession.builder
+ .config(conf)
+ .enableHiveSupport()
+ .getOrCreate()
+
val catalog = sparkSession.sessionState.catalog
sparkSession.sql("drop table if exists testLocation")