aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorCheng Hao <hao.cheng@intel.com>2014-07-18 16:38:11 -0500
committerMichael Armbrust <michael@databricks.com>2014-07-18 16:38:11 -0500
commit7f1720813793e155743b58eae5228298e894b90d (patch)
treef95c33e85d02af7978dff595b996884b921479c5 /sql
parent3a1709fa557f2bd6d101bc67a9e773882078c527 (diff)
downloadspark-7f1720813793e155743b58eae5228298e894b90d.tar.gz
spark-7f1720813793e155743b58eae5228298e894b90d.tar.bz2
spark-7f1720813793e155743b58eae5228298e894b90d.zip
[SPARK-2540] [SQL] Add HiveDecimal & HiveVarchar support in unwrapping data
Author: Cheng Hao <hao.cheng@intel.com> Closes #1436 from chenghao-intel/unwrapdata and squashes the following commits: 34cc21a [Cheng Hao] update the table scan accodringly since the unwrapData function changed afc39da [Cheng Hao] Polish the code 39d6475 [Cheng Hao] Add HiveDecimal & HiveVarchar support in unwrap data
Diffstat (limited to 'sql')
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveTableScan.scala12
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUdfs.scala4
2 files changed, 5 insertions, 11 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveTableScan.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveTableScan.scala
index ef8bae7453..e7016fa16e 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveTableScan.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveTableScan.scala
@@ -96,19 +96,9 @@ case class HiveTableScan(
.getOrElse(sys.error(s"Can't find attribute $a"))
val fieldObjectInspector = ref.getFieldObjectInspector
- val unwrapHiveData = fieldObjectInspector match {
- case _: HiveVarcharObjectInspector =>
- (value: Any) => value.asInstanceOf[HiveVarchar].getValue
- case _: HiveDecimalObjectInspector =>
- (value: Any) => BigDecimal(value.asInstanceOf[HiveDecimal].bigDecimalValue())
- case _ =>
- identity[Any] _
- }
-
(row: Any, _: Array[String]) => {
val data = objectInspector.getStructFieldData(row, ref)
- val hiveData = unwrapData(data, fieldObjectInspector)
- if (hiveData != null) unwrapHiveData(hiveData) else null
+ unwrapData(data, fieldObjectInspector)
}
}
}
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUdfs.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUdfs.scala
index 9b105308ab..fc33c5b460 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUdfs.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUdfs.scala
@@ -280,6 +280,10 @@ private[hive] case class HiveGenericUdf(name: String, children: Seq[Expression])
private[hive] trait HiveInspectors {
def unwrapData(data: Any, oi: ObjectInspector): Any = oi match {
+ case hvoi: HiveVarcharObjectInspector =>
+ if (data == null) null else hvoi.getPrimitiveJavaObject(data).getValue
+ case hdoi: HiveDecimalObjectInspector =>
+ if (data == null) null else BigDecimal(hdoi.getPrimitiveJavaObject(data).bigDecimalValue())
case pi: PrimitiveObjectInspector => pi.getPrimitiveJavaObject(data)
case li: ListObjectInspector =>
Option(li.getList(data))