aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorLiang-Chi Hsieh <viirya@gmail.com>2015-06-22 23:11:56 -0700
committerDavies Liu <davies@databricks.com>2015-06-22 23:11:56 -0700
commit31bd30687bc29c0e457c37308d489ae2b6e5b72a (patch)
treebaa70f03f1b3248379567733f48c20968869025c
parentd4f633514a393320c9ae64c00a75f702e6f58c67 (diff)
downloadspark-31bd30687bc29c0e457c37308d489ae2b6e5b72a.tar.gz
spark-31bd30687bc29c0e457c37308d489ae2b6e5b72a.tar.bz2
spark-31bd30687bc29c0e457c37308d489ae2b6e5b72a.zip
[SPARK-8359] [SQL] Fix incorrect decimal precision after multiplication
JIRA: https://issues.apache.org/jira/browse/SPARK-8359 Author: Liang-Chi Hsieh <viirya@gmail.com> Closes #6814 from viirya/fix_decimal2 and squashes the following commits: 071a757 [Liang-Chi Hsieh] Remove maximum precision and use MathContext.UNLIMITED. df217d4 [Liang-Chi Hsieh] Merge remote-tracking branch 'upstream/master' into fix_decimal2 a43bfc3 [Liang-Chi Hsieh] Add MathContext with maximum supported precision. 72eeb3f [Liang-Chi Hsieh] Merge remote-tracking branch 'upstream/master' into fix_decimal2 44c9348 [Liang-Chi Hsieh] Fix incorrect decimal precision after multiplication.
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala6
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/types/decimal/DecimalSuite.scala5
2 files changed, 9 insertions, 2 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
index a85af9e04a..bd9823bc05 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
@@ -17,6 +17,8 @@
package org.apache.spark.sql.types
+import java.math.{MathContext, RoundingMode}
+
import org.apache.spark.annotation.DeveloperApi
/**
@@ -137,9 +139,9 @@ final class Decimal extends Ordered[Decimal] with Serializable {
def toBigDecimal: BigDecimal = {
if (decimalVal.ne(null)) {
- decimalVal
+ decimalVal(MathContext.UNLIMITED)
} else {
- BigDecimal(longVal, _scale)
+ BigDecimal(longVal, _scale)(MathContext.UNLIMITED)
}
}
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/decimal/DecimalSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/decimal/DecimalSuite.scala
index 4c0365cf1b..ccc29c0dc8 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/decimal/DecimalSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/decimal/DecimalSuite.scala
@@ -162,4 +162,9 @@ class DecimalSuite extends SparkFunSuite with PrivateMethodTester {
assert(new Decimal().set(100L, 10, 0).toUnscaledLong === 100L)
assert(Decimal(Long.MaxValue, 100, 0).toUnscaledLong === Long.MaxValue)
}
+
+ test("accurate precision after multiplication") {
+ val decimal = (Decimal(Long.MaxValue, 38, 0) * Decimal(Long.MaxValue, 38, 0)).toJavaBigDecimal
+ assert(decimal.unscaledValue.toString === "85070591730234615847396907784232501249")
+ }
}