aboutsummaryrefslogtreecommitdiff
path: root/sql/hive-thriftserver/src/main/scala
diff options
context:
space:
mode:
authorhyukjinkwon <gurwls223@gmail.com>2016-04-27 17:41:05 -0700
committerReynold Xin <rxin@databricks.com>2016-04-27 17:41:05 -0700
commitf5da592fc63b8d3bc09d49c196d6c5d98cd2a013 (patch)
treee79fd6231f4cc94674199c71f605d185824c79ca /sql/hive-thriftserver/src/main/scala
parentb0ce0d13127431fa7cd4c11064762eb0b12e3436 (diff)
downloadspark-f5da592fc63b8d3bc09d49c196d6c5d98cd2a013.tar.gz
spark-f5da592fc63b8d3bc09d49c196d6c5d98cd2a013.tar.bz2
spark-f5da592fc63b8d3bc09d49c196d6c5d98cd2a013.zip
[SPARK-12143][SQL] Binary type support for Hive thrift server
## What changes were proposed in this pull request? https://issues.apache.org/jira/browse/SPARK-12143 This PR adds the support for conversion between `SparkRow` in Spark and `RowSet` in Hive for `BinaryType` as `Array[Byte]` (JDBC) ## How was this patch tested? Unittests in `HiveThriftBinaryServerSuite` (regression test) Closes #10139 Author: hyukjinkwon <gurwls223@gmail.com> Closes #12733 from HyukjinKwon/SPARK-12143.
Diffstat (limited to 'sql/hive-thriftserver/src/main/scala')
-rw-r--r--sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkExecuteStatementOperation.scala6
1 files changed, 4 insertions, 2 deletions
diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkExecuteStatementOperation.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkExecuteStatementOperation.scala
index 18b78ab506..40dc81e02d 100644
--- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkExecuteStatementOperation.scala
+++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkExecuteStatementOperation.scala
@@ -96,8 +96,10 @@ private[hive] class SparkExecuteStatementOperation(
case DateType =>
to += from.getAs[Date](ordinal)
case TimestampType =>
- to += from.getAs[Timestamp](ordinal)
- case BinaryType | _: ArrayType | _: StructType | _: MapType =>
+ to += from.getAs[Timestamp](ordinal)
+ case BinaryType =>
+ to += from.getAs[Array[Byte]](ordinal)
+ case _: ArrayType | _: StructType | _: MapType =>
val hiveString = HiveUtils.toHiveString((from.get(ordinal), dataTypes(ordinal)))
to += hiveString
}