aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorCheng Hao <hao.cheng@intel.com>2014-11-10 17:22:57 -0800
committerMichael Armbrust <michael@databricks.com>2014-11-10 17:22:57 -0800
commitfa777833b52b6f339cdc335e8e3935cfe9a2a7eb (patch)
treea91ef53220104ac80faf5961fcbc0a2a20a947f8 /sql
parentd793d80c8084923ea04dcf7d268eec8ede490127 (diff)
downloadspark-fa777833b52b6f339cdc335e8e3935cfe9a2a7eb.tar.gz
spark-fa777833b52b6f339cdc335e8e3935cfe9a2a7eb.tar.bz2
spark-fa777833b52b6f339cdc335e8e3935cfe9a2a7eb.zip
[SPARK-4250] [SQL] Fix bug of constant null value mapping to ConstantObjectInspector
Author: Cheng Hao <hao.cheng@intel.com> Closes #3114 from chenghao-intel/constant_null_oi and squashes the following commits: e603bda [Cheng Hao] fix the bug of null value for primitive types 50a13ba [Cheng Hao] fix the timezone issue f54f369 [Cheng Hao] fix bug of constant null value for ObjectInspector
Diffstat (limited to 'sql')
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala78
-rw-r--r--sql/hive/src/test/resources/golden/constant null testing-0-9a02bc7de09bcabcbd4c91f54a814c201
-rw-r--r--sql/hive/src/test/resources/golden/udf_if-0-b7ffa85b5785cccef2af1b285348cc2c1
-rw-r--r--sql/hive/src/test/resources/golden/udf_if-1-30cf7f51f92b5684e556deff3032d49a1
-rw-r--r--sql/hive/src/test/resources/golden/udf_if-2-f2b010128e922d0096a65ddd9ae1d0b40
-rw-r--r--sql/hive/src/test/resources/golden/udf_if-3-20206f17367ff284d67044abd745ce9f1
-rw-r--r--sql/hive/src/test/resources/golden/udf_if-4-174dae8a1eb4cad6ccf6f67203de71ca0
-rw-r--r--sql/hive/src/test/resources/golden/udf_if-5-a7db13aec05c97792f9331d63709d8cc1
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala52
-rw-r--r--sql/hive/v0.12.0/src/main/scala/org/apache/spark/sql/hive/Shim12.scala70
-rw-r--r--sql/hive/v0.13.1/src/main/scala/org/apache/spark/sql/hive/Shim13.scala80
11 files changed, 199 insertions, 86 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala
index bdc7e1dac1..7e76aff642 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala
@@ -88,6 +88,7 @@ private[hive] trait HiveInspectors {
* @return convert the data into catalyst type
*/
def unwrap(data: Any, oi: ObjectInspector): Any = oi match {
+ case _ if data == null => null
case hvoi: HiveVarcharObjectInspector =>
if (data == null) null else hvoi.getPrimitiveJavaObject(data).getValue
case hdoi: HiveDecimalObjectInspector =>
@@ -250,46 +251,53 @@ private[hive] trait HiveInspectors {
}
def toInspector(expr: Expression): ObjectInspector = expr match {
- case Literal(value: String, StringType) =>
- HiveShim.getPrimitiveWritableConstantObjectInspector(value)
- case Literal(value: Int, IntegerType) =>
- HiveShim.getPrimitiveWritableConstantObjectInspector(value)
- case Literal(value: Double, DoubleType) =>
- HiveShim.getPrimitiveWritableConstantObjectInspector(value)
- case Literal(value: Boolean, BooleanType) =>
- HiveShim.getPrimitiveWritableConstantObjectInspector(value)
- case Literal(value: Long, LongType) =>
- HiveShim.getPrimitiveWritableConstantObjectInspector(value)
- case Literal(value: Float, FloatType) =>
- HiveShim.getPrimitiveWritableConstantObjectInspector(value)
- case Literal(value: Short, ShortType) =>
- HiveShim.getPrimitiveWritableConstantObjectInspector(value)
- case Literal(value: Byte, ByteType) =>
- HiveShim.getPrimitiveWritableConstantObjectInspector(value)
- case Literal(value: Array[Byte], BinaryType) =>
- HiveShim.getPrimitiveWritableConstantObjectInspector(value)
- case Literal(value: java.sql.Date, DateType) =>
- HiveShim.getPrimitiveWritableConstantObjectInspector(value)
- case Literal(value: java.sql.Timestamp, TimestampType) =>
- HiveShim.getPrimitiveWritableConstantObjectInspector(value)
- case Literal(value: BigDecimal, DecimalType()) =>
- HiveShim.getPrimitiveWritableConstantObjectInspector(value)
- case Literal(value: Decimal, DecimalType()) =>
- HiveShim.getPrimitiveWritableConstantObjectInspector(value.toBigDecimal)
+ case Literal(value, StringType) =>
+ HiveShim.getStringWritableConstantObjectInspector(value)
+ case Literal(value, IntegerType) =>
+ HiveShim.getIntWritableConstantObjectInspector(value)
+ case Literal(value, DoubleType) =>
+ HiveShim.getDoubleWritableConstantObjectInspector(value)
+ case Literal(value, BooleanType) =>
+ HiveShim.getBooleanWritableConstantObjectInspector(value)
+ case Literal(value, LongType) =>
+ HiveShim.getLongWritableConstantObjectInspector(value)
+ case Literal(value, FloatType) =>
+ HiveShim.getFloatWritableConstantObjectInspector(value)
+ case Literal(value, ShortType) =>
+ HiveShim.getShortWritableConstantObjectInspector(value)
+ case Literal(value, ByteType) =>
+ HiveShim.getByteWritableConstantObjectInspector(value)
+ case Literal(value, BinaryType) =>
+ HiveShim.getBinaryWritableConstantObjectInspector(value)
+ case Literal(value, DateType) =>
+ HiveShim.getDateWritableConstantObjectInspector(value)
+ case Literal(value, TimestampType) =>
+ HiveShim.getTimestampWritableConstantObjectInspector(value)
+ case Literal(value, DecimalType()) =>
+ HiveShim.getDecimalWritableConstantObjectInspector(value)
case Literal(_, NullType) =>
HiveShim.getPrimitiveNullWritableConstantObjectInspector
- case Literal(value: Seq[_], ArrayType(dt, _)) =>
+ case Literal(value, ArrayType(dt, _)) =>
val listObjectInspector = toInspector(dt)
- val list = new java.util.ArrayList[Object]()
- value.foreach(v => list.add(wrap(v, listObjectInspector)))
- ObjectInspectorFactory.getStandardConstantListObjectInspector(listObjectInspector, list)
- case Literal(map: Map[_, _], MapType(keyType, valueType, _)) =>
- val value = new java.util.HashMap[Object, Object]()
+ if (value == null) {
+ ObjectInspectorFactory.getStandardConstantListObjectInspector(listObjectInspector, null)
+ } else {
+ val list = new java.util.ArrayList[Object]()
+ value.asInstanceOf[Seq[_]].foreach(v => list.add(wrap(v, listObjectInspector)))
+ ObjectInspectorFactory.getStandardConstantListObjectInspector(listObjectInspector, list)
+ }
+ case Literal(value, MapType(keyType, valueType, _)) =>
val keyOI = toInspector(keyType)
val valueOI = toInspector(valueType)
- map.foreach (entry => value.put(wrap(entry._1, keyOI), wrap(entry._2, valueOI)))
- ObjectInspectorFactory.getStandardConstantMapObjectInspector(keyOI, valueOI, value)
- case Literal(_, dt) => sys.error(s"Hive doesn't support the constant type [$dt].")
+ if (value == null) {
+ ObjectInspectorFactory.getStandardConstantMapObjectInspector(keyOI, valueOI, null)
+ } else {
+ val map = new java.util.HashMap[Object, Object]()
+ value.asInstanceOf[Map[_, _]].foreach (entry => {
+ map.put(wrap(entry._1, keyOI), wrap(entry._2, valueOI))
+ })
+ ObjectInspectorFactory.getStandardConstantMapObjectInspector(keyOI, valueOI, map)
+ }
case _ => toInspector(expr.dataType)
}
diff --git a/sql/hive/src/test/resources/golden/constant null testing-0-9a02bc7de09bcabcbd4c91f54a814c20 b/sql/hive/src/test/resources/golden/constant null testing-0-9a02bc7de09bcabcbd4c91f54a814c20
new file mode 100644
index 0000000000..7c41615f8c
--- /dev/null
+++ b/sql/hive/src/test/resources/golden/constant null testing-0-9a02bc7de09bcabcbd4c91f54a814c20
@@ -0,0 +1 @@
+1 NULL 1 NULL 1.0 NULL true NULL 1 NULL 1.0 NULL 1 NULL 1 NULL 1 NULL 1970-01-01 NULL 1969-12-31 16:00:00.001 NULL 1 NULL
diff --git a/sql/hive/src/test/resources/golden/udf_if-0-b7ffa85b5785cccef2af1b285348cc2c b/sql/hive/src/test/resources/golden/udf_if-0-b7ffa85b5785cccef2af1b285348cc2c
new file mode 100644
index 0000000000..2cf0d9d618
--- /dev/null
+++ b/sql/hive/src/test/resources/golden/udf_if-0-b7ffa85b5785cccef2af1b285348cc2c
@@ -0,0 +1 @@
+There is no documentation for function 'if'
diff --git a/sql/hive/src/test/resources/golden/udf_if-1-30cf7f51f92b5684e556deff3032d49a b/sql/hive/src/test/resources/golden/udf_if-1-30cf7f51f92b5684e556deff3032d49a
new file mode 100644
index 0000000000..2cf0d9d618
--- /dev/null
+++ b/sql/hive/src/test/resources/golden/udf_if-1-30cf7f51f92b5684e556deff3032d49a
@@ -0,0 +1 @@
+There is no documentation for function 'if'
diff --git a/sql/hive/src/test/resources/golden/udf_if-2-f2b010128e922d0096a65ddd9ae1d0b4 b/sql/hive/src/test/resources/golden/udf_if-2-f2b010128e922d0096a65ddd9ae1d0b4
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/sql/hive/src/test/resources/golden/udf_if-2-f2b010128e922d0096a65ddd9ae1d0b4
diff --git a/sql/hive/src/test/resources/golden/udf_if-3-20206f17367ff284d67044abd745ce9f b/sql/hive/src/test/resources/golden/udf_if-3-20206f17367ff284d67044abd745ce9f
new file mode 100644
index 0000000000..a29e96cbd1
--- /dev/null
+++ b/sql/hive/src/test/resources/golden/udf_if-3-20206f17367ff284d67044abd745ce9f
@@ -0,0 +1 @@
+1 1 1 1 NULL 2
diff --git a/sql/hive/src/test/resources/golden/udf_if-4-174dae8a1eb4cad6ccf6f67203de71ca b/sql/hive/src/test/resources/golden/udf_if-4-174dae8a1eb4cad6ccf6f67203de71ca
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/sql/hive/src/test/resources/golden/udf_if-4-174dae8a1eb4cad6ccf6f67203de71ca
diff --git a/sql/hive/src/test/resources/golden/udf_if-5-a7db13aec05c97792f9331d63709d8cc b/sql/hive/src/test/resources/golden/udf_if-5-a7db13aec05c97792f9331d63709d8cc
new file mode 100644
index 0000000000..f0669b8698
--- /dev/null
+++ b/sql/hive/src/test/resources/golden/udf_if-5-a7db13aec05c97792f9331d63709d8cc
@@ -0,0 +1 @@
+128 1.1 ABC 12.3
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
index b897dff015..684d22807c 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
@@ -18,6 +18,9 @@
package org.apache.spark.sql.hive.execution
import java.io.File
+import java.util.{Locale, TimeZone}
+
+import org.scalatest.BeforeAndAfter
import scala.util.Try
@@ -28,14 +31,59 @@ import org.apache.spark.sql.catalyst.plans.logical.Project
import org.apache.spark.sql.hive._
import org.apache.spark.sql.hive.test.TestHive
import org.apache.spark.sql.hive.test.TestHive._
-import org.apache.spark.sql.{Row, SchemaRDD}
+import org.apache.spark.sql.{SQLConf, Row, SchemaRDD}
case class TestData(a: Int, b: String)
/**
* A set of test cases expressed in Hive QL that are not covered by the tests included in the hive distribution.
*/
-class HiveQuerySuite extends HiveComparisonTest {
+class HiveQuerySuite extends HiveComparisonTest with BeforeAndAfter {
+ private val originalTimeZone = TimeZone.getDefault
+ private val originalLocale = Locale.getDefault
+
+ override def beforeAll() {
+ TestHive.cacheTables = true
+ // Timezone is fixed to America/Los_Angeles for those timezone sensitive tests (timestamp_*)
+ TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles"))
+ // Add Locale setting
+ Locale.setDefault(Locale.US)
+ }
+
+ override def afterAll() {
+ TestHive.cacheTables = false
+ TimeZone.setDefault(originalTimeZone)
+ Locale.setDefault(originalLocale)
+ }
+
+ createQueryTest("constant null testing",
+ """SELECT
+ |IF(FALSE, CAST(NULL AS STRING), CAST(1 AS STRING)) AS COL1,
+ |IF(TRUE, CAST(NULL AS STRING), CAST(1 AS STRING)) AS COL2,
+ |IF(FALSE, CAST(NULL AS INT), CAST(1 AS INT)) AS COL3,
+ |IF(TRUE, CAST(NULL AS INT), CAST(1 AS INT)) AS COL4,
+ |IF(FALSE, CAST(NULL AS DOUBLE), CAST(1 AS DOUBLE)) AS COL5,
+ |IF(TRUE, CAST(NULL AS DOUBLE), CAST(1 AS DOUBLE)) AS COL6,
+ |IF(FALSE, CAST(NULL AS BOOLEAN), CAST(1 AS BOOLEAN)) AS COL7,
+ |IF(TRUE, CAST(NULL AS BOOLEAN), CAST(1 AS BOOLEAN)) AS COL8,
+ |IF(FALSE, CAST(NULL AS BIGINT), CAST(1 AS BIGINT)) AS COL9,
+ |IF(TRUE, CAST(NULL AS BIGINT), CAST(1 AS BIGINT)) AS COL10,
+ |IF(FALSE, CAST(NULL AS FLOAT), CAST(1 AS FLOAT)) AS COL11,
+ |IF(TRUE, CAST(NULL AS FLOAT), CAST(1 AS FLOAT)) AS COL12,
+ |IF(FALSE, CAST(NULL AS SMALLINT), CAST(1 AS SMALLINT)) AS COL13,
+ |IF(TRUE, CAST(NULL AS SMALLINT), CAST(1 AS SMALLINT)) AS COL14,
+ |IF(FALSE, CAST(NULL AS TINYINT), CAST(1 AS TINYINT)) AS COL15,
+ |IF(TRUE, CAST(NULL AS TINYINT), CAST(1 AS TINYINT)) AS COL16,
+ |IF(FALSE, CAST(NULL AS BINARY), CAST("1" AS BINARY)) AS COL17,
+ |IF(TRUE, CAST(NULL AS BINARY), CAST("1" AS BINARY)) AS COL18,
+ |IF(FALSE, CAST(NULL AS DATE), CAST("1970-01-01" AS DATE)) AS COL19,
+ |IF(TRUE, CAST(NULL AS DATE), CAST("1970-01-01" AS DATE)) AS COL20,
+ |IF(FALSE, CAST(NULL AS TIMESTAMP), CAST(1 AS TIMESTAMP)) AS COL21,
+ |IF(TRUE, CAST(NULL AS TIMESTAMP), CAST(1 AS TIMESTAMP)) AS COL22,
+ |IF(FALSE, CAST(NULL AS DECIMAL), CAST(1 AS DECIMAL)) AS COL23,
+ |IF(TRUE, CAST(NULL AS DECIMAL), CAST(1 AS DECIMAL)) AS COL24
+ |FROM src LIMIT 1""".stripMargin)
+
createQueryTest("constant array",
"""
|SELECT sort_array(
diff --git a/sql/hive/v0.12.0/src/main/scala/org/apache/spark/sql/hive/Shim12.scala b/sql/hive/v0.12.0/src/main/scala/org/apache/spark/sql/hive/Shim12.scala
index 8e946b7e82..8ba25f889d 100644
--- a/sql/hive/v0.12.0/src/main/scala/org/apache/spark/sql/hive/Shim12.scala
+++ b/sql/hive/v0.12.0/src/main/scala/org/apache/spark/sql/hive/Shim12.scala
@@ -57,54 +57,74 @@ private[hive] object HiveShim {
new TableDesc(serdeClass, inputFormatClass, outputFormatClass, properties)
}
- def getPrimitiveWritableConstantObjectInspector(value: String): ObjectInspector =
+ def getStringWritableConstantObjectInspector(value: Any): ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
- PrimitiveCategory.STRING, new hadoopIo.Text(value))
+ PrimitiveCategory.STRING,
+ if (value == null) null else new hadoopIo.Text(value.asInstanceOf[String]))
- def getPrimitiveWritableConstantObjectInspector(value: Int): ObjectInspector =
+ def getIntWritableConstantObjectInspector(value: Any): ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
- PrimitiveCategory.INT, new hadoopIo.IntWritable(value))
+ PrimitiveCategory.INT,
+ if (value == null) null else new hadoopIo.IntWritable(value.asInstanceOf[Int]))
- def getPrimitiveWritableConstantObjectInspector(value: Double): ObjectInspector =
+ def getDoubleWritableConstantObjectInspector(value: Any): ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
- PrimitiveCategory.DOUBLE, new hiveIo.DoubleWritable(value))
+ PrimitiveCategory.DOUBLE,
+ if (value == null) null else new hiveIo.DoubleWritable(value.asInstanceOf[Double]))
- def getPrimitiveWritableConstantObjectInspector(value: Boolean): ObjectInspector =
+ def getBooleanWritableConstantObjectInspector(value: Any): ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
- PrimitiveCategory.BOOLEAN, new hadoopIo.BooleanWritable(value))
+ PrimitiveCategory.BOOLEAN,
+ if (value == null) null else new hadoopIo.BooleanWritable(value.asInstanceOf[Boolean]))
- def getPrimitiveWritableConstantObjectInspector(value: Long): ObjectInspector =
+ def getLongWritableConstantObjectInspector(value: Any): ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
- PrimitiveCategory.LONG, new hadoopIo.LongWritable(value))
+ PrimitiveCategory.LONG,
+ if (value == null) null else new hadoopIo.LongWritable(value.asInstanceOf[Long]))
- def getPrimitiveWritableConstantObjectInspector(value: Float): ObjectInspector =
+ def getFloatWritableConstantObjectInspector(value: Any): ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
- PrimitiveCategory.FLOAT, new hadoopIo.FloatWritable(value))
+ PrimitiveCategory.FLOAT,
+ if (value == null) null else new hadoopIo.FloatWritable(value.asInstanceOf[Float]))
- def getPrimitiveWritableConstantObjectInspector(value: Short): ObjectInspector =
+ def getShortWritableConstantObjectInspector(value: Any): ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
- PrimitiveCategory.SHORT, new hiveIo.ShortWritable(value))
+ PrimitiveCategory.SHORT,
+ if (value == null) null else new hiveIo.ShortWritable(value.asInstanceOf[Short]))
- def getPrimitiveWritableConstantObjectInspector(value: Byte): ObjectInspector =
+ def getByteWritableConstantObjectInspector(value: Any): ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
- PrimitiveCategory.BYTE, new hiveIo.ByteWritable(value))
+ PrimitiveCategory.BYTE,
+ if (value == null) null else new hiveIo.ByteWritable(value.asInstanceOf[Byte]))
- def getPrimitiveWritableConstantObjectInspector(value: Array[Byte]): ObjectInspector =
+ def getBinaryWritableConstantObjectInspector(value: Any): ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
- PrimitiveCategory.BINARY, new hadoopIo.BytesWritable(value))
+ PrimitiveCategory.BINARY,
+ if (value == null) null else new hadoopIo.BytesWritable(value.asInstanceOf[Array[Byte]]))
- def getPrimitiveWritableConstantObjectInspector(value: java.sql.Date): ObjectInspector =
+ def getDateWritableConstantObjectInspector(value: Any): ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
- PrimitiveCategory.DATE, new hiveIo.DateWritable(value))
+ PrimitiveCategory.DATE,
+ if (value == null) null else new hiveIo.DateWritable(value.asInstanceOf[java.sql.Date]))
- def getPrimitiveWritableConstantObjectInspector(value: java.sql.Timestamp): ObjectInspector =
+ def getTimestampWritableConstantObjectInspector(value: Any): ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
- PrimitiveCategory.TIMESTAMP, new hiveIo.TimestampWritable(value))
-
- def getPrimitiveWritableConstantObjectInspector(value: BigDecimal): ObjectInspector =
+ PrimitiveCategory.TIMESTAMP,
+ if (value == null) {
+ null
+ } else {
+ new hiveIo.TimestampWritable(value.asInstanceOf[java.sql.Timestamp])
+ })
+
+ def getDecimalWritableConstantObjectInspector(value: Any): ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
PrimitiveCategory.DECIMAL,
- new hiveIo.HiveDecimalWritable(HiveShim.createDecimal(value.underlying())))
+ if (value == null) {
+ null
+ } else {
+ new hiveIo.HiveDecimalWritable(
+ HiveShim.createDecimal(value.asInstanceOf[Decimal].toBigDecimal.underlying()))
+ })
def getPrimitiveNullWritableConstantObjectInspector: ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
diff --git a/sql/hive/v0.13.1/src/main/scala/org/apache/spark/sql/hive/Shim13.scala b/sql/hive/v0.13.1/src/main/scala/org/apache/spark/sql/hive/Shim13.scala
index 0bc330cdbe..e4aee57f0a 100644
--- a/sql/hive/v0.13.1/src/main/scala/org/apache/spark/sql/hive/Shim13.scala
+++ b/sql/hive/v0.13.1/src/main/scala/org/apache/spark/sql/hive/Shim13.scala
@@ -56,54 +56,86 @@ private[hive] object HiveShim {
new TableDesc(inputFormatClass, outputFormatClass, properties)
}
- def getPrimitiveWritableConstantObjectInspector(value: String): ObjectInspector =
+ def getStringWritableConstantObjectInspector(value: Any): ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
- TypeInfoFactory.stringTypeInfo, new hadoopIo.Text(value))
+ TypeInfoFactory.stringTypeInfo,
+ if (value == null) null else new hadoopIo.Text(value.asInstanceOf[String]))
- def getPrimitiveWritableConstantObjectInspector(value: Int): ObjectInspector =
+ def getIntWritableConstantObjectInspector(value: Any): ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
- TypeInfoFactory.intTypeInfo, new hadoopIo.IntWritable(value))
+ TypeInfoFactory.intTypeInfo,
+ if (value == null) null else new hadoopIo.IntWritable(value.asInstanceOf[Int]))
- def getPrimitiveWritableConstantObjectInspector(value: Double): ObjectInspector =
+ def getDoubleWritableConstantObjectInspector(value: Any): ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
- TypeInfoFactory.doubleTypeInfo, new hiveIo.DoubleWritable(value))
+ TypeInfoFactory.doubleTypeInfo, if (value == null) {
+ null
+ } else {
+ new hiveIo.DoubleWritable(value.asInstanceOf[Double])
+ })
- def getPrimitiveWritableConstantObjectInspector(value: Boolean): ObjectInspector =
+ def getBooleanWritableConstantObjectInspector(value: Any): ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
- TypeInfoFactory.booleanTypeInfo, new hadoopIo.BooleanWritable(value))
+ TypeInfoFactory.booleanTypeInfo, if (value == null) {
+ null
+ } else {
+ new hadoopIo.BooleanWritable(value.asInstanceOf[Boolean])
+ })
- def getPrimitiveWritableConstantObjectInspector(value: Long): ObjectInspector =
+ def getLongWritableConstantObjectInspector(value: Any): ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
- TypeInfoFactory.longTypeInfo, new hadoopIo.LongWritable(value))
+ TypeInfoFactory.longTypeInfo,
+ if (value == null) null else new hadoopIo.LongWritable(value.asInstanceOf[Long]))
- def getPrimitiveWritableConstantObjectInspector(value: Float): ObjectInspector =
+ def getFloatWritableConstantObjectInspector(value: Any): ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
- TypeInfoFactory.floatTypeInfo, new hadoopIo.FloatWritable(value))
+ TypeInfoFactory.floatTypeInfo, if (value == null) {
+ null
+ } else {
+ new hadoopIo.FloatWritable(value.asInstanceOf[Float])
+ })
- def getPrimitiveWritableConstantObjectInspector(value: Short): ObjectInspector =
+ def getShortWritableConstantObjectInspector(value: Any): ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
- TypeInfoFactory.shortTypeInfo, new hiveIo.ShortWritable(value))
+ TypeInfoFactory.shortTypeInfo,
+ if (value == null) null else new hiveIo.ShortWritable(value.asInstanceOf[Short]))
- def getPrimitiveWritableConstantObjectInspector(value: Byte): ObjectInspector =
+ def getByteWritableConstantObjectInspector(value: Any): ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
- TypeInfoFactory.byteTypeInfo, new hiveIo.ByteWritable(value))
+ TypeInfoFactory.byteTypeInfo,
+ if (value == null) null else new hiveIo.ByteWritable(value.asInstanceOf[Byte]))
- def getPrimitiveWritableConstantObjectInspector(value: Array[Byte]): ObjectInspector =
+ def getBinaryWritableConstantObjectInspector(value: Any): ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
- TypeInfoFactory.binaryTypeInfo, new hadoopIo.BytesWritable(value))
+ TypeInfoFactory.binaryTypeInfo, if (value == null) {
+ null
+ } else {
+ new hadoopIo.BytesWritable(value.asInstanceOf[Array[Byte]])
+ })
- def getPrimitiveWritableConstantObjectInspector(value: java.sql.Date): ObjectInspector =
+ def getDateWritableConstantObjectInspector(value: Any): ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
- TypeInfoFactory.dateTypeInfo, new hiveIo.DateWritable(value))
+ TypeInfoFactory.dateTypeInfo,
+ if (value == null) null else new hiveIo.DateWritable(value.asInstanceOf[java.sql.Date]))
- def getPrimitiveWritableConstantObjectInspector(value: java.sql.Timestamp): ObjectInspector =
+ def getTimestampWritableConstantObjectInspector(value: Any): ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
- TypeInfoFactory.timestampTypeInfo, new hiveIo.TimestampWritable(value))
+ TypeInfoFactory.timestampTypeInfo, if (value == null) {
+ null
+ } else {
+ new hiveIo.TimestampWritable(value.asInstanceOf[java.sql.Timestamp])
+ })
- def getPrimitiveWritableConstantObjectInspector(value: BigDecimal): ObjectInspector =
+ def getDecimalWritableConstantObjectInspector(value: Any): ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
TypeInfoFactory.decimalTypeInfo,
- new hiveIo.HiveDecimalWritable(HiveShim.createDecimal(value.underlying())))
+ if (value == null) {
+ null
+ } else {
+ // TODO precise, scale?
+ new hiveIo.HiveDecimalWritable(
+ HiveShim.createDecimal(value.asInstanceOf[Decimal].toBigDecimal.underlying()))
+ })
def getPrimitiveNullWritableConstantObjectInspector: ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(