aboutsummaryrefslogtreecommitdiff
path: root/sql/hive
diff options
context:
space:
mode:
authorReynold Xin <rxin@databricks.com>2015-01-18 11:01:42 -0800
committerReynold Xin <rxin@databricks.com>2015-01-18 11:01:42 -0800
commit1727e0841cf9948e601ae2936fe89094c8c0c835 (patch)
tree6017e327a6ca2b7a42c8a102d1c6c3a889aa8628 /sql/hive
parentad16da1bcc500d0fe594853cd00470dc34b007fa (diff)
downloadspark-1727e0841cf9948e601ae2936fe89094c8c0c835.tar.gz
spark-1727e0841cf9948e601ae2936fe89094c8c0c835.tar.bz2
spark-1727e0841cf9948e601ae2936fe89094c8c0c835.zip
[SPARK-5279][SQL] Use java.math.BigDecimal as the exposed Decimal type.
Author: Reynold Xin <rxin@databricks.com> Closes #4092 from rxin/bigdecimal and squashes the following commits: 27b08c9 [Reynold Xin] Fixed test. 10cb496 [Reynold Xin] [SPARK-5279][SQL] Use java.math.BigDecimal as the exposed Decimal type.
Diffstat (limited to 'sql/hive')
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala5
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala4
-rw-r--r--sql/hive/v0.12.0/src/main/scala/org/apache/spark/sql/hive/Shim12.scala2
-rw-r--r--sql/hive/v0.13.1/src/main/scala/org/apache/spark/sql/hive/Shim13.scala2
4 files changed, 7 insertions, 6 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
index 4246b8b091..10833c1132 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
@@ -409,8 +409,9 @@ private object HiveContext {
case (d: Date, DateType) => new DateWritable(d).toString
case (t: Timestamp, TimestampType) => new TimestampWritable(t).toString
case (bin: Array[Byte], BinaryType) => new String(bin, "UTF-8")
- case (decimal: BigDecimal, DecimalType()) => // Hive strips trailing zeros so use its toString
- HiveShim.createDecimal(decimal.underlying()).toString
+ case (decimal: java.math.BigDecimal, DecimalType()) =>
+ // Hive strips trailing zeros so use its toString
+ HiveShim.createDecimal(decimal).toString
case (other, tpe) if primitiveTypes contains tpe => other.toString
}
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala
index 5140d2064c..d87c4945c8 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala
@@ -341,7 +341,7 @@ private[hive] trait HiveInspectors {
(o: Any) => new HiveVarchar(o.asInstanceOf[String], o.asInstanceOf[String].size)
case _: JavaHiveDecimalObjectInspector =>
- (o: Any) => HiveShim.createDecimal(o.asInstanceOf[Decimal].toBigDecimal.underlying())
+ (o: Any) => HiveShim.createDecimal(o.asInstanceOf[Decimal].toJavaBigDecimal)
case soi: StandardStructObjectInspector =>
val wrappers = soi.getAllStructFieldRefs.map(ref => wrapperFor(ref.getFieldObjectInspector))
@@ -412,7 +412,7 @@ private[hive] trait HiveInspectors {
case _: HiveDecimalObjectInspector if x.preferWritable() =>
HiveShim.getDecimalWritable(a.asInstanceOf[Decimal])
case _: HiveDecimalObjectInspector =>
- HiveShim.createDecimal(a.asInstanceOf[Decimal].toBigDecimal.underlying())
+ HiveShim.createDecimal(a.asInstanceOf[Decimal].toJavaBigDecimal)
case _: BinaryObjectInspector if x.preferWritable() => HiveShim.getBinaryWritable(a)
case _: BinaryObjectInspector => a.asInstanceOf[Array[Byte]]
case _: DateObjectInspector if x.preferWritable() => HiveShim.getDateWritable(a)
diff --git a/sql/hive/v0.12.0/src/main/scala/org/apache/spark/sql/hive/Shim12.scala b/sql/hive/v0.12.0/src/main/scala/org/apache/spark/sql/hive/Shim12.scala
index 58417a15bb..c0b7741bc3 100644
--- a/sql/hive/v0.12.0/src/main/scala/org/apache/spark/sql/hive/Shim12.scala
+++ b/sql/hive/v0.12.0/src/main/scala/org/apache/spark/sql/hive/Shim12.scala
@@ -174,7 +174,7 @@ private[hive] object HiveShim {
null
} else {
new hiveIo.HiveDecimalWritable(
- HiveShim.createDecimal(value.asInstanceOf[Decimal].toBigDecimal.underlying()))
+ HiveShim.createDecimal(value.asInstanceOf[Decimal].toJavaBigDecimal))
}
def getPrimitiveNullWritable: NullWritable = NullWritable.get()
diff --git a/sql/hive/v0.13.1/src/main/scala/org/apache/spark/sql/hive/Shim13.scala b/sql/hive/v0.13.1/src/main/scala/org/apache/spark/sql/hive/Shim13.scala
index 1f768ca971..c04cda7bf1 100644
--- a/sql/hive/v0.13.1/src/main/scala/org/apache/spark/sql/hive/Shim13.scala
+++ b/sql/hive/v0.13.1/src/main/scala/org/apache/spark/sql/hive/Shim13.scala
@@ -276,7 +276,7 @@ private[hive] object HiveShim {
} else {
// TODO precise, scale?
new hiveIo.HiveDecimalWritable(
- HiveShim.createDecimal(value.asInstanceOf[Decimal].toBigDecimal.underlying()))
+ HiveShim.createDecimal(value.asInstanceOf[Decimal].toJavaBigDecimal))
}
def getPrimitiveNullWritable: NullWritable = NullWritable.get()