aboutsummaryrefslogtreecommitdiff
path: root/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala
diff options
context:
space:
mode:
Diffstat (limited to 'sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala')
-rw-r--r--sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala23
1 files changed, 12 insertions, 11 deletions
diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala
index a44b0d3e8e..268ba2f0bc 100644
--- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala
+++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala
@@ -23,18 +23,19 @@ import scala.collection.JavaConverters._
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.internal.Logging
-import org.apache.spark.sql.hive.{HiveContext, HiveUtils}
+import org.apache.spark.sql.{SparkSession, SQLContext}
+import org.apache.spark.sql.hive.{HiveSessionState, HiveUtils}
import org.apache.spark.util.Utils
/** A singleton object for the master program. The slaves should not access this. */
private[hive] object SparkSQLEnv extends Logging {
logDebug("Initializing SparkSQLEnv")
- var hiveContext: HiveContext = _
+ var sqlContext: SQLContext = _
var sparkContext: SparkContext = _
def init() {
- if (hiveContext == null) {
+ if (sqlContext == null) {
val sparkConf = new SparkConf(loadDefaults = true)
val maybeSerializer = sparkConf.getOption("spark.serializer")
val maybeKryoReferenceTracking = sparkConf.getOption("spark.kryo.referenceTracking")
@@ -54,16 +55,16 @@ private[hive] object SparkSQLEnv extends Logging {
maybeKryoReferenceTracking.getOrElse("false"))
sparkContext = new SparkContext(sparkConf)
- hiveContext = new HiveContext(sparkContext)
+ sqlContext = SparkSession.withHiveSupport(sparkContext).wrapped
+ val sessionState = sqlContext.sessionState.asInstanceOf[HiveSessionState]
+ sessionState.metadataHive.setOut(new PrintStream(System.out, true, "UTF-8"))
+ sessionState.metadataHive.setInfo(new PrintStream(System.err, true, "UTF-8"))
+ sessionState.metadataHive.setError(new PrintStream(System.err, true, "UTF-8"))
- hiveContext.sessionState.metadataHive.setOut(new PrintStream(System.out, true, "UTF-8"))
- hiveContext.sessionState.metadataHive.setInfo(new PrintStream(System.err, true, "UTF-8"))
- hiveContext.sessionState.metadataHive.setError(new PrintStream(System.err, true, "UTF-8"))
-
- hiveContext.setConf("spark.sql.hive.version", HiveUtils.hiveExecutionVersion)
+ sqlContext.setConf("spark.sql.hive.version", HiveUtils.hiveExecutionVersion)
if (log.isDebugEnabled) {
- hiveContext.sessionState.hiveconf.getAllProperties.asScala.toSeq.sorted
+ sessionState.hiveconf.getAllProperties.asScala.toSeq.sorted
.foreach { case (k, v) => logDebug(s"HiveConf var: $k=$v") }
}
}
@@ -76,7 +77,7 @@ private[hive] object SparkSQLEnv extends Logging {
if (SparkSQLEnv.sparkContext != null) {
sparkContext.stop()
sparkContext = null
- hiveContext = null
+ sqlContext = null
}
}
}