aboutsummaryrefslogtreecommitdiff
path: root/sql/core
diff options
context:
space:
mode:
authorDavies Liu <davies@databricks.com>2015-08-25 14:55:34 -0700
committerYin Huai <yhuai@databricks.com>2015-08-25 14:55:34 -0700
commitec89bd840a6862751999d612f586a962cae63f6d (patch)
treea1b23691969ca31e78fa046c3c23f81eb282d997 /sql/core
parent00ae4be97f7b205432db2967ba6d506286ef2ca6 (diff)
downloadspark-ec89bd840a6862751999d612f586a962cae63f6d.tar.gz
spark-ec89bd840a6862751999d612f586a962cae63f6d.tar.bz2
spark-ec89bd840a6862751999d612f586a962cae63f6d.zip
[SPARK-10245] [SQL] Fix decimal literals with precision < scale
In BigDecimal or java.math.BigDecimal, the precision could be smaller than scale, for example, BigDecimal("0.001") has precision = 1 and scale = 3. But DecimalType require that the precision should be larger than scale, so we should use the maximum of precision and scale when inferring the schema from decimal literal. Author: Davies Liu <davies@databricks.com> Closes #8428 from davies/smaller_decimal.
Diffstat (limited to 'sql/core')
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala10
1 files changed, 10 insertions, 0 deletions
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
index dcb4e83710..aa07665c6b 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
@@ -1627,6 +1627,16 @@ class SQLQuerySuite extends QueryTest with SharedSQLContext {
Row(null))
}
+ test("precision smaller than scale") {
+ checkAnswer(sql("select 10.00"), Row(BigDecimal("10.00")))
+ checkAnswer(sql("select 1.00"), Row(BigDecimal("1.00")))
+ checkAnswer(sql("select 0.10"), Row(BigDecimal("0.10")))
+ checkAnswer(sql("select 0.01"), Row(BigDecimal("0.01")))
+ checkAnswer(sql("select 0.001"), Row(BigDecimal("0.001")))
+ checkAnswer(sql("select -0.01"), Row(BigDecimal("-0.01")))
+ checkAnswer(sql("select -0.001"), Row(BigDecimal("-0.001")))
+ }
+
test("external sorting updates peak execution memory") {
withSQLConf((SQLConf.EXTERNAL_SORT.key, "true")) {
val sc = sqlContext.sparkContext