aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala2
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala7
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala6
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala2
4 files changed, 5 insertions, 12 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
index a6fb34cbfb..5e998d6188 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
@@ -673,7 +673,7 @@ object FoldablePropagation extends Rule[LogicalPlan] {
def apply(plan: LogicalPlan): LogicalPlan = {
val foldableMap = AttributeMap(plan.flatMap {
case Project(projectList, _) => projectList.collect {
- case a: Alias if a.resolved && a.child.foldable => (a.toAttribute, a)
+ case a: Alias if a.child.foldable => (a.toAttribute, a)
}
case _ => Nil
})
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala
index 43af359207..dfda7c50f2 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala
@@ -548,7 +548,6 @@ class CastSuite extends SparkFunSuite with ExpressionEvalHelper {
{
val ret = cast(array_notNull, ArrayType(BooleanType, containsNull = false))
assert(ret.resolved === false)
- checkEvaluation(ret, Seq(null, true, false))
}
{
@@ -607,7 +606,6 @@ class CastSuite extends SparkFunSuite with ExpressionEvalHelper {
{
val ret = cast(map_notNull, MapType(StringType, BooleanType, valueContainsNull = false))
assert(ret.resolved === false)
- checkEvaluation(ret, Map("a" -> null, "b" -> true, "c" -> false))
}
{
val ret = cast(map_notNull, MapType(IntegerType, StringType, valueContainsNull = true))
@@ -714,7 +712,6 @@ class CastSuite extends SparkFunSuite with ExpressionEvalHelper {
StructField("b", BooleanType, nullable = true),
StructField("c", BooleanType, nullable = false))))
assert(ret.resolved === false)
- checkEvaluation(ret, InternalRow(null, true, false))
}
{
@@ -755,10 +752,6 @@ class CastSuite extends SparkFunSuite with ExpressionEvalHelper {
StructField("l", LongType, nullable = true)))))))
assert(ret.resolved === false)
- checkEvaluation(ret, Row(
- Seq(123, null, null),
- Map("a" -> null, "b" -> true, "c" -> false),
- Row(0L)))
}
test("cast between string and interval") {
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala
index 53c66d8a75..6118a34d29 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala
@@ -143,7 +143,7 @@ class DateExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
}
test("Seconds") {
- checkEvaluation(Second(Literal.create(null, DateType)), null)
+ assert(Second(Literal.create(null, DateType)).resolved === false)
checkEvaluation(Second(Cast(Literal(d), TimestampType)), 0)
checkEvaluation(Second(Cast(Literal(sdf.format(d)), TimestampType)), 15)
checkEvaluation(Second(Literal(ts)), 15)
@@ -176,7 +176,7 @@ class DateExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
}
test("Hour") {
- checkEvaluation(Hour(Literal.create(null, DateType)), null)
+ assert(Hour(Literal.create(null, DateType)).resolved === false)
checkEvaluation(Hour(Cast(Literal(d), TimestampType)), 0)
checkEvaluation(Hour(Cast(Literal(sdf.format(d)), TimestampType)), 13)
checkEvaluation(Hour(Literal(ts)), 13)
@@ -195,7 +195,7 @@ class DateExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
}
test("Minute") {
- checkEvaluation(Minute(Literal.create(null, DateType)), null)
+ assert(Minute(Literal.create(null, DateType)).resolved === false)
checkEvaluation(Minute(Cast(Literal(d), TimestampType)), 0)
checkEvaluation(Minute(Cast(Literal(sdf.format(d)), TimestampType)), 10)
checkEvaluation(Minute(Literal(ts)), 10)
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala
index 2cf8ca7000..c09c64fd6b 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala
@@ -688,7 +688,7 @@ class StringExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
Literal(Decimal(123123324123L) * Decimal(123123.21234d)), Literal(4)),
"15,159,339,180,002,773.2778")
checkEvaluation(FormatNumber(Literal.create(null, IntegerType), Literal(3)), null)
- checkEvaluation(FormatNumber(Literal.create(null, NullType), Literal(3)), null)
+ assert(FormatNumber(Literal.create(null, NullType), Literal(3)).resolved === false)
}
test("find in set") {