aboutsummaryrefslogtreecommitdiff
path: root/sql/hive-thriftserver/src/main/scala
diff options
context:
space:
mode:
authorSandeep Singh <sandeep@techaddict.me>2016-05-05 14:35:15 -0700
committerAndrew Or <andrew@databricks.com>2016-05-05 14:35:15 -0700
commited6f3f8a5f3a6bf7c53e13c2798de398c9a526a6 (patch)
treecac99af0f11f39aae44ab2dc10ee5d08a4cb25a6 /sql/hive-thriftserver/src/main/scala
parent8cba57a75cf9e29b54d97366a039a97a2f305d5d (diff)
downloadspark-ed6f3f8a5f3a6bf7c53e13c2798de398c9a526a6.tar.gz
spark-ed6f3f8a5f3a6bf7c53e13c2798de398c9a526a6.tar.bz2
spark-ed6f3f8a5f3a6bf7c53e13c2798de398c9a526a6.zip
[SPARK-15072][SQL][REPL][EXAMPLES] Remove SparkSession.withHiveSupport
## What changes were proposed in this pull request? Removing the `withHiveSupport` method of `SparkSession`, instead use `enableHiveSupport` ## How was this patch tested? ran tests locally Author: Sandeep Singh <sandeep@techaddict.me> Closes #12851 from techaddict/SPARK-15072.
Diffstat (limited to 'sql/hive-thriftserver/src/main/scala')
-rw-r--r--sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala10
1 files changed, 6 insertions, 4 deletions
diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala
index 665a44e51a..8de223f444 100644
--- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala
+++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala
@@ -54,13 +54,15 @@ private[hive] object SparkSQLEnv extends Logging {
"spark.kryo.referenceTracking",
maybeKryoReferenceTracking.getOrElse("false"))
- sparkContext = new SparkContext(sparkConf)
- sqlContext = SparkSession.withHiveSupport(sparkContext).wrapped
- val sessionState = sqlContext.sessionState.asInstanceOf[HiveSessionState]
+ val sparkSession = SparkSession.builder.config(sparkConf).enableHiveSupport().getOrCreate()
+ sparkContext = sparkSession.sparkContext
+ sqlContext = sparkSession.wrapped
+
+ val sessionState = sparkSession.sessionState.asInstanceOf[HiveSessionState]
sessionState.metadataHive.setOut(new PrintStream(System.out, true, "UTF-8"))
sessionState.metadataHive.setInfo(new PrintStream(System.err, true, "UTF-8"))
sessionState.metadataHive.setError(new PrintStream(System.err, true, "UTF-8"))
- sqlContext.setConf("spark.sql.hive.version", HiveUtils.hiveExecutionVersion)
+ sparkSession.conf.set("spark.sql.hive.version", HiveUtils.hiveExecutionVersion)
}
}