diff options
author | Cheng Hao <hao.cheng@intel.com> | 2014-12-19 08:04:41 -0800 |
---|---|---|
committer | Josh Rosen <joshrosen@databricks.com> | 2014-12-19 08:04:41 -0800 |
commit | 5479450c45038ddb4485cd676f945a961f2420d8 (patch) | |
tree | 17b83e820a14dda77b159f47e3043e8529543844 /sql/hive | |
parent | 283263ffaa941e7e9ba147cf0ad377d9202d3761 (diff) | |
download | spark-5479450c45038ddb4485cd676f945a961f2420d8.tar.gz spark-5479450c45038ddb4485cd676f945a961f2420d8.tar.bz2 spark-5479450c45038ddb4485cd676f945a961f2420d8.zip |
[SPARK-4901] [SQL] Hot fix for ByteWritables.copyBytes
HiveInspectors.scala failed in compiling with Hadoop 1, as the BytesWritable.copyBytes is not available in Hadoop 1.
Author: Cheng Hao <hao.cheng@intel.com>
Closes #3742 from chenghao-intel/settable_oi_hotfix and squashes the following commits:
bb04d1f [Cheng Hao] hot fix for ByteWritables.copyBytes
Diffstat (limited to 'sql/hive')
-rw-r--r-- | sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala | 8 |
1 files changed, 7 insertions, 1 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala index 06189341f8..e114747813 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala @@ -297,7 +297,13 @@ private[hive] trait HiveInspectors { case x: ByteObjectInspector if x.preferWritable() => x.get(data) case x: HiveDecimalObjectInspector => HiveShim.toCatalystDecimal(x, data) case x: BinaryObjectInspector if x.preferWritable() => - x.getPrimitiveWritableObject(data).copyBytes() + // BytesWritable.copyBytes() only available since Hadoop2 + // In order to keep backward-compatible, we have to copy the + // bytes with old apis + val bw = x.getPrimitiveWritableObject(data) + val result = new Array[Byte](bw.getLength()) + System.arraycopy(bw.getBytes(), 0, result, 0, bw.getLength()) + result case x: DateObjectInspector if x.preferWritable() => x.getPrimitiveWritableObject(data).get() // org.apache.hadoop.hive.serde2.io.TimestampWritable.set will reset current time object |