aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala9
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala16
2 files changed, 16 insertions, 9 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
index b1e89b5de8..a12fba047b 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
@@ -52,7 +52,8 @@ object Cast {
case (DateType, TimestampType) => true
case (_: NumericType, TimestampType) => true
- case (_, DateType) => true
+ case (StringType, DateType) => true
+ case (TimestampType, DateType) => true
case (StringType, CalendarIntervalType) => true
@@ -228,18 +229,12 @@ case class Cast(child: Expression, dataType: DataType) extends UnaryExpression w
// throw valid precision more than seconds, according to Hive.
// Timestamp.nanos is in 0 to 999,999,999, no more than a second.
buildCast[Long](_, t => DateTimeUtils.millisToDays(t / 1000L))
- // Hive throws this exception as a Semantic Exception
- // It is never possible to compare result when hive return with exception,
- // so we can return null
- // NULL is more reasonable here, since the query itself obeys the grammar.
- case _ => _ => null
}
// IntervalConverter
private[this] def castToInterval(from: DataType): Any => Any = from match {
case StringType =>
buildCast[UTF8String](_, s => CalendarInterval.fromString(s.toString))
- case _ => _ => null
}
// LongConverter
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala
index dfda7c50f2..5ae0527a9c 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala
@@ -70,7 +70,8 @@ class CastSuite extends SparkFunSuite with ExpressionEvalHelper {
checkNullCast(DateType, TimestampType)
numericTypes.foreach(dt => checkNullCast(dt, TimestampType))
- atomicTypes.foreach(dt => checkNullCast(dt, DateType))
+ checkNullCast(StringType, DateType)
+ checkNullCast(TimestampType, DateType)
checkNullCast(StringType, CalendarIntervalType)
numericTypes.foreach(dt => checkNullCast(StringType, dt))
@@ -366,7 +367,6 @@ class CastSuite extends SparkFunSuite with ExpressionEvalHelper {
checkEvaluation(cast("2012-12-11", DoubleType), null)
checkEvaluation(cast(123, IntegerType), 123)
-
checkEvaluation(cast(Literal.create(null, IntegerType), ShortType), null)
}
@@ -783,4 +783,16 @@ class CastSuite extends SparkFunSuite with ExpressionEvalHelper {
checkEvaluation(cast("abc", BooleanType), null)
checkEvaluation(cast("", BooleanType), null)
}
+
+ test("SPARK-16729 type checking for casting to date type") {
+ assert(cast("1234", DateType).checkInputDataTypes().isSuccess)
+ assert(cast(new Timestamp(1), DateType).checkInputDataTypes().isSuccess)
+ assert(cast(false, DateType).checkInputDataTypes().isFailure)
+ assert(cast(1.toByte, DateType).checkInputDataTypes().isFailure)
+ assert(cast(1.toShort, DateType).checkInputDataTypes().isFailure)
+ assert(cast(1, DateType).checkInputDataTypes().isFailure)
+ assert(cast(1L, DateType).checkInputDataTypes().isFailure)
+ assert(cast(1.0.toFloat, DateType).checkInputDataTypes().isFailure)
+ assert(cast(1.0, DateType).checkInputDataTypes().isFailure)
+ }
}