aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorDavies Liu <davies@databricks.com>2015-08-25 14:55:34 -0700
committerYin Huai <yhuai@databricks.com>2015-08-25 14:55:34 -0700
commitec89bd840a6862751999d612f586a962cae63f6d (patch)
treea1b23691969ca31e78fa046c3c23f81eb282d997 /sql
parent00ae4be97f7b205432db2967ba6d506286ef2ca6 (diff)
downloadspark-ec89bd840a6862751999d612f586a962cae63f6d.tar.gz
spark-ec89bd840a6862751999d612f586a962cae63f6d.tar.bz2
spark-ec89bd840a6862751999d612f586a962cae63f6d.zip
[SPARK-10245] [SQL] Fix decimal literals with precision < scale
In BigDecimal or java.math.BigDecimal, the precision could be smaller than scale, for example, BigDecimal("0.001") has precision = 1 and scale = 3. But DecimalType require that the precision should be larger than scale, so we should use the maximum of precision and scale when inferring the schema from decimal literal. Author: Davies Liu <davies@databricks.com> Closes #8428 from davies/smaller_decimal.
Diffstat (limited to 'sql')
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala7
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/LiteralExpressionSuite.scala8
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala10
3 files changed, 19 insertions, 6 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala
index 34bad23802..8c0c5d5b1e 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala
@@ -36,9 +36,10 @@ object Literal {
case s: Short => Literal(s, ShortType)
case s: String => Literal(UTF8String.fromString(s), StringType)
case b: Boolean => Literal(b, BooleanType)
- case d: BigDecimal => Literal(Decimal(d), DecimalType(d.precision, d.scale))
- case d: java.math.BigDecimal => Literal(Decimal(d), DecimalType(d.precision(), d.scale()))
- case d: Decimal => Literal(d, DecimalType(d.precision, d.scale))
+ case d: BigDecimal => Literal(Decimal(d), DecimalType(Math.max(d.precision, d.scale), d.scale))
+ case d: java.math.BigDecimal =>
+ Literal(Decimal(d), DecimalType(Math.max(d.precision, d.scale), d.scale()))
+ case d: Decimal => Literal(d, DecimalType(Math.max(d.precision, d.scale), d.scale))
case t: Timestamp => Literal(DateTimeUtils.fromJavaTimestamp(t), TimestampType)
case d: Date => Literal(DateTimeUtils.fromJavaDate(d), DateType)
case a: Array[Byte] => Literal(a, BinaryType)
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/LiteralExpressionSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/LiteralExpressionSuite.scala
index f6404d2161..015eb1897f 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/LiteralExpressionSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/LiteralExpressionSuite.scala
@@ -83,12 +83,14 @@ class LiteralExpressionSuite extends SparkFunSuite with ExpressionEvalHelper {
}
test("decimal") {
- List(0.0, 1.2, 1.1111, 5).foreach { d =>
+ List(-0.0001, 0.0, 0.001, 1.2, 1.1111, 5).foreach { d =>
checkEvaluation(Literal(Decimal(d)), Decimal(d))
checkEvaluation(Literal(Decimal(d.toInt)), Decimal(d.toInt))
checkEvaluation(Literal(Decimal(d.toLong)), Decimal(d.toLong))
- checkEvaluation(Literal(Decimal((d * 1000L).toLong, 10, 1)),
- Decimal((d * 1000L).toLong, 10, 1))
+ checkEvaluation(Literal(Decimal((d * 1000L).toLong, 10, 3)),
+ Decimal((d * 1000L).toLong, 10, 3))
+ checkEvaluation(Literal(BigDecimal(d.toString)), Decimal(d))
+ checkEvaluation(Literal(new java.math.BigDecimal(d.toString)), Decimal(d))
}
}
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
index dcb4e83710..aa07665c6b 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
@@ -1627,6 +1627,16 @@ class SQLQuerySuite extends QueryTest with SharedSQLContext {
Row(null))
}
+ test("precision smaller than scale") {
+ checkAnswer(sql("select 10.00"), Row(BigDecimal("10.00")))
+ checkAnswer(sql("select 1.00"), Row(BigDecimal("1.00")))
+ checkAnswer(sql("select 0.10"), Row(BigDecimal("0.10")))
+ checkAnswer(sql("select 0.01"), Row(BigDecimal("0.01")))
+ checkAnswer(sql("select 0.001"), Row(BigDecimal("0.001")))
+ checkAnswer(sql("select -0.01"), Row(BigDecimal("-0.01")))
+ checkAnswer(sql("select -0.001"), Row(BigDecimal("-0.001")))
+ }
+
test("external sorting updates peak execution memory") {
withSQLConf((SQLConf.EXTERNAL_SORT.key, "true")) {
val sc = sqlContext.sparkContext