aboutsummaryrefslogtreecommitdiff
path: root/sql/catalyst/src/test
diff options
context:
space:
mode:
authorDaoyuan Wang <daoyuan.wang@intel.com>2015-02-02 15:49:22 -0800
committerMichael Armbrust <michael@databricks.com>2015-02-02 15:49:22 -0800
commit1646f89d967913ee1f231d9606f8502d13c25804 (patch)
tree86e89b964e7cd8a596e54621544af0052abb1d83 /sql/catalyst/src/test
parent830934976e8cf9e894bd3e5758fb941cad5d2f0b (diff)
downloadspark-1646f89d967913ee1f231d9606f8502d13c25804.tar.gz
spark-1646f89d967913ee1f231d9606f8502d13c25804.tar.bz2
spark-1646f89d967913ee1f231d9606f8502d13c25804.zip
[SPARK-4508] [SQL] build native date type to conform behavior to Hive
Store daysSinceEpoch as an Int value(4 bytes) to represent DateType, instead of using java.sql.Date(8 bytes as Long) in catalyst row. This ensures the same comparison behavior of Hive and Catalyst. Subsumes #3381 I thinks there are already some tests in JavaSQLSuite, and for python it will not affect python's datetime class. Author: Daoyuan Wang <daoyuan.wang@intel.com> Closes #3732 from adrian-wang/datenative and squashes the following commits: 0ed0fdc [Daoyuan Wang] fix test data a2fdd4e [Daoyuan Wang] getDate c37832b [Daoyuan Wang] row to catalyst f0005b1 [Daoyuan Wang] add date in sql parser and java type conversion 024c9a6 [Daoyuan Wang] clean some import order d6715fc [Daoyuan Wang] refactoring Date as Primitive Int internally 374abd5 [Daoyuan Wang] spark native date type support
Diffstat (limited to 'sql/catalyst/src/test')
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala28
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeSuite.scala2
2 files changed, 15 insertions, 15 deletions
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala
index 37e64adeea..25d1c105a0 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala
@@ -303,6 +303,7 @@ class ExpressionEvaluationSuite extends FunSuite {
val sd = "1970-01-01"
val d = Date.valueOf(sd)
+ val zts = sd + " 00:00:00"
val sts = sd + " 00:00:02"
val nts = sts + ".1"
val ts = Timestamp.valueOf(nts)
@@ -319,14 +320,14 @@ class ExpressionEvaluationSuite extends FunSuite {
checkEvaluation(Cast(Literal(1.toDouble) cast TimestampType, DoubleType), 1.toDouble)
checkEvaluation(Cast(Literal(sd) cast DateType, StringType), sd)
- checkEvaluation(Cast(Literal(d) cast StringType, DateType), d)
+ checkEvaluation(Cast(Literal(d) cast StringType, DateType), 0)
checkEvaluation(Cast(Literal(nts) cast TimestampType, StringType), nts)
checkEvaluation(Cast(Literal(ts) cast StringType, TimestampType), ts)
// all convert to string type to check
checkEvaluation(
Cast(Cast(Literal(nts) cast TimestampType, DateType), StringType), sd)
checkEvaluation(
- Cast(Cast(Literal(ts) cast DateType, TimestampType), StringType), sts)
+ Cast(Cast(Literal(ts) cast DateType, TimestampType), StringType), zts)
checkEvaluation(Cast("abdef" cast BinaryType, StringType), "abdef")
@@ -377,8 +378,8 @@ class ExpressionEvaluationSuite extends FunSuite {
}
test("date") {
- val d1 = Date.valueOf("1970-01-01")
- val d2 = Date.valueOf("1970-01-02")
+ val d1 = DateUtils.fromJavaDate(Date.valueOf("1970-01-01"))
+ val d2 = DateUtils.fromJavaDate(Date.valueOf("1970-01-02"))
checkEvaluation(Literal(d1) < Literal(d2), true)
}
@@ -459,22 +460,21 @@ class ExpressionEvaluationSuite extends FunSuite {
test("date casting") {
val d = Date.valueOf("1970-01-01")
- checkEvaluation(Cast(d, ShortType), null)
- checkEvaluation(Cast(d, IntegerType), null)
- checkEvaluation(Cast(d, LongType), null)
- checkEvaluation(Cast(d, FloatType), null)
- checkEvaluation(Cast(d, DoubleType), null)
- checkEvaluation(Cast(d, DecimalType.Unlimited), null)
- checkEvaluation(Cast(d, DecimalType(10, 2)), null)
- checkEvaluation(Cast(d, StringType), "1970-01-01")
- checkEvaluation(Cast(Cast(d, TimestampType), StringType), "1970-01-01 00:00:00")
+ checkEvaluation(Cast(Literal(d), ShortType), null)
+ checkEvaluation(Cast(Literal(d), IntegerType), null)
+ checkEvaluation(Cast(Literal(d), LongType), null)
+ checkEvaluation(Cast(Literal(d), FloatType), null)
+ checkEvaluation(Cast(Literal(d), DoubleType), null)
+ checkEvaluation(Cast(Literal(d), DecimalType.Unlimited), null)
+ checkEvaluation(Cast(Literal(d), DecimalType(10, 2)), null)
+ checkEvaluation(Cast(Literal(d), StringType), "1970-01-01")
+ checkEvaluation(Cast(Cast(Literal(d), TimestampType), StringType), "1970-01-01 00:00:00")
}
test("timestamp casting") {
val millis = 15 * 1000 + 2
val seconds = millis * 1000 + 2
val ts = new Timestamp(millis)
- val ts1 = new Timestamp(15 * 1000) // a timestamp without the milliseconds part
val tss = new Timestamp(seconds)
checkEvaluation(Cast(ts, ShortType), 15)
checkEvaluation(Cast(ts, IntegerType), 15)
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeSuite.scala
index c147be9f6b..7bcd6687d1 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeSuite.scala
@@ -106,7 +106,7 @@ class DataTypeSuite extends FunSuite {
checkDefaultSize(DoubleType, 8)
checkDefaultSize(DecimalType(10, 5), 4096)
checkDefaultSize(DecimalType.Unlimited, 4096)
- checkDefaultSize(DateType, 8)
+ checkDefaultSize(DateType, 4)
checkDefaultSize(TimestampType, 8)
checkDefaultSize(StringType, 4096)
checkDefaultSize(BinaryType, 4096)