aboutsummaryrefslogtreecommitdiff
path: root/sql/catalyst/src/test
diff options
context:
space:
mode:
authorHerman van Hovell <hvanhovell@databricks.com>2016-10-04 23:48:26 -0700
committerReynold Xin <rxin@databricks.com>2016-10-04 23:48:26 -0700
commit89516c1c4a167249b0c82f60a62edb45ede3bd2c (patch)
tree38fd2f4683fe15f4d311204c8a5060cf847ebe13 /sql/catalyst/src/test
parentc9fe10d4ed8df5ac4bd0f1eb8c9cd19244e27736 (diff)
downloadspark-89516c1c4a167249b0c82f60a62edb45ede3bd2c.tar.gz
spark-89516c1c4a167249b0c82f60a62edb45ede3bd2c.tar.bz2
spark-89516c1c4a167249b0c82f60a62edb45ede3bd2c.zip
[SPARK-17258][SQL] Parse scientific decimal literals as decimals
## What changes were proposed in this pull request? Currently Spark SQL parses regular decimal literals (e.g. `10.00`) as decimals and scientific decimal literals (e.g. `10.0e10`) as doubles. The difference between the two confuses most users. This PR unifies the parsing behavior and also parses scientific decimal literals as decimals. This implications in tests are limited to a single Hive compatibility test. ## How was this patch tested? Updated tests in `ExpressionParserSuite` and `SQLQueryTestSuite`. Author: Herman van Hovell <hvanhovell@databricks.com> Closes #14828 from hvanhovell/SPARK-17258.
Diffstat (limited to 'sql/catalyst/src/test')
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala24
1 files changed, 12 insertions, 12 deletions
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala
index 3718ac5f1e..0fb1138478 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala
@@ -352,6 +352,10 @@ class ExpressionParserSuite extends PlanTest {
}
test("literals") {
+ def testDecimal(value: String): Unit = {
+ assertEqual(value, Literal(BigDecimal(value).underlying))
+ }
+
// NULL
assertEqual("null", Literal(null))
@@ -362,20 +366,18 @@ class ExpressionParserSuite extends PlanTest {
// Integral should have the narrowest possible type
assertEqual("787324", Literal(787324))
assertEqual("7873247234798249234", Literal(7873247234798249234L))
- assertEqual("78732472347982492793712334",
- Literal(BigDecimal("78732472347982492793712334").underlying()))
+ testDecimal("78732472347982492793712334")
// Decimal
- assertEqual("7873247234798249279371.2334",
- Literal(BigDecimal("7873247234798249279371.2334").underlying()))
+ testDecimal("7873247234798249279371.2334")
// Scientific Decimal
- assertEqual("9.0e1", 90d)
- assertEqual(".9e+2", 90d)
- assertEqual("0.9e+2", 90d)
- assertEqual("900e-1", 90d)
- assertEqual("900.0E-1", 90d)
- assertEqual("9.e+1", 90d)
+ testDecimal("9.0e1")
+ testDecimal(".9e+2")
+ testDecimal("0.9e+2")
+ testDecimal("900e-1")
+ testDecimal("900.0E-1")
+ testDecimal("9.e+1")
intercept(".e3")
// Tiny Int Literal
@@ -395,8 +397,6 @@ class ExpressionParserSuite extends PlanTest {
assertEqual("10.0D", Literal(10.0D))
intercept("-1.8E308D", s"does not fit in range")
intercept("1.8E308D", s"does not fit in range")
- // TODO we need to figure out if we should throw an exception here!
- assertEqual("1E309", Literal(Double.PositiveInfinity))
// BigDecimal Literal
assertEqual("90912830918230182310293801923652346786BD",