aboutsummaryrefslogtreecommitdiff
path: root/sql/hive-thriftserver
diff options
context:
space:
mode:
authorReynold Xin <rxin@databricks.com>2016-04-22 01:31:13 -0700
committerReynold Xin <rxin@databricks.com>2016-04-22 01:31:13 -0700
commit284b15d2fbff7c0c3ffe8737838071d366ea5742 (patch)
treecad712bbf0674f8b44895f2af61098d7f81b57c3 /sql/hive-thriftserver
parent80127935df06a829b734cafc2447aa1f3df40288 (diff)
downloadspark-284b15d2fbff7c0c3ffe8737838071d366ea5742.tar.gz
spark-284b15d2fbff7c0c3ffe8737838071d366ea5742.tar.bz2
spark-284b15d2fbff7c0c3ffe8737838071d366ea5742.zip
[SPARK-14826][SQL] Remove HiveQueryExecution
## What changes were proposed in this pull request? This patch removes HiveQueryExecution. As part of this, I consolidated all the describe commands into DescribeTableCommand. ## How was this patch tested? Should be covered by existing tests. Author: Reynold Xin <rxin@databricks.com> Closes #12588 from rxin/SPARK-14826.
Diffstat (limited to 'sql/hive-thriftserver')
-rw-r--r--sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala10
1 files changed, 5 insertions, 5 deletions
diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala
index 7e8eada5ad..f730952507 100644
--- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala
+++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala
@@ -28,7 +28,8 @@ import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse
import org.apache.spark.internal.Logging
import org.apache.spark.sql.AnalysisException
-import org.apache.spark.sql.hive.{HiveContext, HiveMetastoreTypes, HiveQueryExecution}
+import org.apache.spark.sql.execution.QueryExecution
+import org.apache.spark.sql.hive.{HiveContext, HiveMetastoreTypes}
private[hive] class SparkSQLDriver(
val context: HiveContext = SparkSQLEnv.hiveContext)
@@ -41,7 +42,7 @@ private[hive] class SparkSQLDriver(
override def init(): Unit = {
}
- private def getResultSetSchema(query: HiveQueryExecution): Schema = {
+ private def getResultSetSchema(query: QueryExecution): Schema = {
val analyzed = query.analyzed
logDebug(s"Result Schema: ${analyzed.output}")
if (analyzed.output.isEmpty) {
@@ -59,9 +60,8 @@ private[hive] class SparkSQLDriver(
// TODO unify the error code
try {
context.sparkContext.setJobDescription(command)
- val execution =
- context.executePlan(context.sql(command).logicalPlan).asInstanceOf[HiveQueryExecution]
- hiveResponse = execution.stringResult()
+ val execution = context.executePlan(context.sql(command).logicalPlan)
+ hiveResponse = execution.hiveResultString()
tableSchema = getResultSetSchema(execution)
new CommandProcessorResponse(0)
} catch {