diff options
author | Reynold Xin <rxin@databricks.com> | 2016-04-23 15:41:17 -0700 |
---|---|---|
committer | Yin Huai <yhuai@databricks.com> | 2016-04-23 15:41:17 -0700 |
commit | 162e12b085692d67fd3b2bd1254152cb56db0722 (patch) | |
tree | de03deef50f6f7988b6871ed46f6d97396bc631d /sql/hive-thriftserver | |
parent | e3c1366bbcf712f8d7a91640eb11e67a4419e4be (diff) | |
download | spark-162e12b085692d67fd3b2bd1254152cb56db0722.tar.gz spark-162e12b085692d67fd3b2bd1254152cb56db0722.tar.bz2 spark-162e12b085692d67fd3b2bd1254152cb56db0722.zip |
[SPARK-14877][SQL] Remove HiveMetastoreTypes class
## What changes were proposed in this pull request?
It is unnecessary as DataType.catalogString largely replaces the need for this class.
## How was this patch tested?
Mostly removing dead code and should be covered by existing tests.
Author: Reynold Xin <rxin@databricks.com>
Closes #12644 from rxin/SPARK-14877.
Diffstat (limited to 'sql/hive-thriftserver')
2 files changed, 5 insertions, 6 deletions
diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkExecuteStatementOperation.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkExecuteStatementOperation.scala index 3025660301..4e6dcaa8f4 100644 --- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkExecuteStatementOperation.scala +++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkExecuteStatementOperation.scala @@ -35,7 +35,7 @@ import org.apache.hive.service.cli.session.HiveSession import org.apache.spark.internal.Logging import org.apache.spark.sql.{DataFrame, Row => SparkRow} import org.apache.spark.sql.execution.command.SetCommand -import org.apache.spark.sql.hive.{HiveContext, HiveMetastoreTypes, HiveUtils} +import org.apache.spark.sql.hive.{HiveContext, HiveUtils} import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types._ import org.apache.spark.util.{Utils => SparkUtils} @@ -60,7 +60,7 @@ private[hive] class SparkExecuteStatementOperation( } else { logInfo(s"Result Schema: ${result.queryExecution.analyzed.output}") val schema = result.queryExecution.analyzed.output.map { attr => - new FieldSchema(attr.name, HiveMetastoreTypes.toMetastoreType(attr.dataType), "") + new FieldSchema(attr.name, attr.dataType.catalogString, "") } new TableSchema(schema.asJava) } diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala index f730952507..1fa885177e 100644 --- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala +++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala @@ -29,10 +29,9 @@ import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse import org.apache.spark.internal.Logging import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.execution.QueryExecution -import org.apache.spark.sql.hive.{HiveContext, HiveMetastoreTypes} +import org.apache.spark.sql.hive.HiveContext -private[hive] class SparkSQLDriver( - val context: HiveContext = SparkSQLEnv.hiveContext) +private[hive] class SparkSQLDriver(val context: HiveContext = SparkSQLEnv.hiveContext) extends Driver with Logging { @@ -49,7 +48,7 @@ private[hive] class SparkSQLDriver( new Schema(Arrays.asList(new FieldSchema("Response code", "string", "")), null) } else { val fieldSchemas = analyzed.output.map { attr => - new FieldSchema(attr.name, HiveMetastoreTypes.toMetastoreType(attr.dataType), "") + new FieldSchema(attr.name, attr.dataType.catalogString, "") } new Schema(fieldSchemas.asJava, null) |