aboutsummaryrefslogtreecommitdiff
path: root/sql/hive-thriftserver
diff options
context:
space:
mode:
authorHerman van Hovell <hvanhovell@databricks.com>2017-03-28 23:14:31 +0800
committerWenchen Fan <wenchen@databricks.com>2017-03-28 23:14:31 +0800
commitf82461fc1197f6055d9cf972d82260b178e10a7c (patch)
tree36bb1f58ce3080b1b2d86cd8c2b99148d07cbf0c /sql/hive-thriftserver
parent4fcc214d9eb5e98b2eed3e28cc23b0c511cd9007 (diff)
downloadspark-f82461fc1197f6055d9cf972d82260b178e10a7c.tar.gz
spark-f82461fc1197f6055d9cf972d82260b178e10a7c.tar.bz2
spark-f82461fc1197f6055d9cf972d82260b178e10a7c.zip
[SPARK-20126][SQL] Remove HiveSessionState
## What changes were proposed in this pull request? Commit https://github.com/apache/spark/commit/ea361165e1ddce4d8aa0242ae3e878d7b39f1de2 moved most of the logic from the SessionState classes into an accompanying builder. This makes the existence of the `HiveSessionState` redundant. This PR removes the `HiveSessionState`. ## How was this patch tested? Existing tests. Author: Herman van Hovell <hvanhovell@databricks.com> Closes #17457 from hvanhovell/SPARK-20126.
Diffstat (limited to 'sql/hive-thriftserver')
-rw-r--r--sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala12
-rw-r--r--sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/server/SparkSQLOperationManager.scala6
2 files changed, 10 insertions, 8 deletions
diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala
index c0b299411e..01c4eb131a 100644
--- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala
+++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala
@@ -22,7 +22,7 @@ import java.io.PrintStream
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.internal.Logging
import org.apache.spark.sql.{SparkSession, SQLContext}
-import org.apache.spark.sql.hive.{HiveSessionState, HiveUtils}
+import org.apache.spark.sql.hive.{HiveExternalCatalog, HiveUtils}
import org.apache.spark.util.Utils
/** A singleton object for the master program. The slaves should not access this. */
@@ -49,10 +49,12 @@ private[hive] object SparkSQLEnv extends Logging {
sparkContext = sparkSession.sparkContext
sqlContext = sparkSession.sqlContext
- val sessionState = sparkSession.sessionState.asInstanceOf[HiveSessionState]
- sessionState.metadataHive.setOut(new PrintStream(System.out, true, "UTF-8"))
- sessionState.metadataHive.setInfo(new PrintStream(System.err, true, "UTF-8"))
- sessionState.metadataHive.setError(new PrintStream(System.err, true, "UTF-8"))
+ val metadataHive = sparkSession
+ .sharedState.externalCatalog.asInstanceOf[HiveExternalCatalog]
+ .client.newSession()
+ metadataHive.setOut(new PrintStream(System.out, true, "UTF-8"))
+ metadataHive.setInfo(new PrintStream(System.err, true, "UTF-8"))
+ metadataHive.setError(new PrintStream(System.err, true, "UTF-8"))
sparkSession.conf.set("spark.sql.hive.version", HiveUtils.hiveExecutionVersion)
}
}
diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/server/SparkSQLOperationManager.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/server/SparkSQLOperationManager.scala
index 49ab664009..a0e5012633 100644
--- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/server/SparkSQLOperationManager.scala
+++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/server/SparkSQLOperationManager.scala
@@ -26,7 +26,7 @@ import org.apache.hive.service.cli.session.HiveSession
import org.apache.spark.internal.Logging
import org.apache.spark.sql.SQLContext
-import org.apache.spark.sql.hive.HiveSessionState
+import org.apache.spark.sql.hive.HiveUtils
import org.apache.spark.sql.hive.thriftserver.{ReflectionUtils, SparkExecuteStatementOperation}
/**
@@ -49,8 +49,8 @@ private[thriftserver] class SparkSQLOperationManager()
val sqlContext = sessionToContexts.get(parentSession.getSessionHandle)
require(sqlContext != null, s"Session handle: ${parentSession.getSessionHandle} has not been" +
s" initialized or had already closed.")
- val sessionState = sqlContext.sessionState.asInstanceOf[HiveSessionState]
- val runInBackground = async && sessionState.hiveThriftServerAsync
+ val conf = sqlContext.sessionState.conf
+ val runInBackground = async && conf.getConf(HiveUtils.HIVE_THRIFT_SERVER_ASYNC)
val operation = new SparkExecuteStatementOperation(parentSession, statement, confOverlay,
runInBackground)(sqlContext, sessionToActivePool)
handleToOperation.put(operation.getHandle, operation)