From 284b15d2fbff7c0c3ffe8737838071d366ea5742 Mon Sep 17 00:00:00 2001 From: Reynold Xin Date: Fri, 22 Apr 2016 01:31:13 -0700 Subject: [SPARK-14826][SQL] Remove HiveQueryExecution ## What changes were proposed in this pull request? This patch removes HiveQueryExecution. As part of this, I consolidated all the describe commands into DescribeTableCommand. ## How was this patch tested? Should be covered by existing tests. Author: Reynold Xin Closes #12588 from rxin/SPARK-14826. --- .../apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) (limited to 'sql/hive-thriftserver/src') diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala index 7e8eada5ad..f730952507 100644 --- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala +++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala @@ -28,7 +28,8 @@ import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse import org.apache.spark.internal.Logging import org.apache.spark.sql.AnalysisException -import org.apache.spark.sql.hive.{HiveContext, HiveMetastoreTypes, HiveQueryExecution} +import org.apache.spark.sql.execution.QueryExecution +import org.apache.spark.sql.hive.{HiveContext, HiveMetastoreTypes} private[hive] class SparkSQLDriver( val context: HiveContext = SparkSQLEnv.hiveContext) @@ -41,7 +42,7 @@ private[hive] class SparkSQLDriver( override def init(): Unit = { } - private def getResultSetSchema(query: HiveQueryExecution): Schema = { + private def getResultSetSchema(query: QueryExecution): Schema = { val analyzed = query.analyzed logDebug(s"Result Schema: ${analyzed.output}") if (analyzed.output.isEmpty) { @@ -59,9 +60,8 @@ private[hive] class SparkSQLDriver( // TODO unify the error code try { context.sparkContext.setJobDescription(command) - val execution = - context.executePlan(context.sql(command).logicalPlan).asInstanceOf[HiveQueryExecution] - hiveResponse = execution.stringResult() + val execution = context.executePlan(context.sql(command).logicalPlan) + hiveResponse = execution.hiveResultString() tableSchema = getResultSetSchema(execution) new CommandProcessorResponse(0) } catch { -- cgit v1.2.3