aboutsummaryrefslogtreecommitdiff
path: root/sql/catalyst/src/test/scala/org
diff options
context:
space:
mode:
Diffstat (limited to 'sql/catalyst/src/test/scala/org')
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ResolveInlineTablesSuite.scala10
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercionSuite.scala35
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala4
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala6
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvalHelper.scala7
5 files changed, 35 insertions, 27 deletions
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ResolveInlineTablesSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ResolveInlineTablesSuite.scala
index f45a826869..d0fe815052 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ResolveInlineTablesSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ResolveInlineTablesSuite.scala
@@ -22,6 +22,7 @@ import org.scalatest.BeforeAndAfter
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.expressions.{Cast, Literal, Rand}
import org.apache.spark.sql.catalyst.expressions.aggregate.Count
+import org.apache.spark.sql.catalyst.plans.logical.LocalRelation
import org.apache.spark.sql.types.{LongType, NullType, TimestampType}
/**
@@ -91,12 +92,13 @@ class ResolveInlineTablesSuite extends AnalysisTest with BeforeAndAfter {
test("convert TimeZoneAwareExpression") {
val table = UnresolvedInlineTable(Seq("c1"),
Seq(Seq(Cast(lit("1991-12-06 00:00:00.0"), TimestampType))))
- val converted = ResolveInlineTables(conf).convert(table)
+ val withTimeZone = ResolveTimeZone(conf).apply(table)
+ val LocalRelation(output, data) = ResolveInlineTables(conf).apply(withTimeZone)
val correct = Cast(lit("1991-12-06 00:00:00.0"), TimestampType)
.withTimeZone(conf.sessionLocalTimeZone).eval().asInstanceOf[Long]
- assert(converted.output.map(_.dataType) == Seq(TimestampType))
- assert(converted.data.size == 1)
- assert(converted.data(0).getLong(0) == correct)
+ assert(output.map(_.dataType) == Seq(TimestampType))
+ assert(data.size == 1)
+ assert(data.head.getLong(0) == correct)
}
test("nullability inference in convert") {
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercionSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercionSuite.scala
index 011d09ff60..2624f5586f 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercionSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercionSuite.scala
@@ -25,6 +25,7 @@ import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.plans.PlanTest
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.rules.{Rule, RuleExecutor}
+import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.CalendarInterval
@@ -787,6 +788,12 @@ class TypeCoercionSuite extends PlanTest {
}
}
+ private val timeZoneResolver = ResolveTimeZone(new SQLConf)
+
+ private def widenSetOperationTypes(plan: LogicalPlan): LogicalPlan = {
+ timeZoneResolver(TypeCoercion.WidenSetOperationTypes(plan))
+ }
+
test("WidenSetOperationTypes for except and intersect") {
val firstTable = LocalRelation(
AttributeReference("i", IntegerType)(),
@@ -799,11 +806,10 @@ class TypeCoercionSuite extends PlanTest {
AttributeReference("f", FloatType)(),
AttributeReference("l", LongType)())
- val wt = TypeCoercion.WidenSetOperationTypes
val expectedTypes = Seq(StringType, DecimalType.SYSTEM_DEFAULT, FloatType, DoubleType)
- val r1 = wt(Except(firstTable, secondTable)).asInstanceOf[Except]
- val r2 = wt(Intersect(firstTable, secondTable)).asInstanceOf[Intersect]
+ val r1 = widenSetOperationTypes(Except(firstTable, secondTable)).asInstanceOf[Except]
+ val r2 = widenSetOperationTypes(Intersect(firstTable, secondTable)).asInstanceOf[Intersect]
checkOutput(r1.left, expectedTypes)
checkOutput(r1.right, expectedTypes)
checkOutput(r2.left, expectedTypes)
@@ -838,10 +844,9 @@ class TypeCoercionSuite extends PlanTest {
AttributeReference("p", ByteType)(),
AttributeReference("q", DoubleType)())
- val wt = TypeCoercion.WidenSetOperationTypes
val expectedTypes = Seq(StringType, DecimalType.SYSTEM_DEFAULT, FloatType, DoubleType)
- val unionRelation = wt(
+ val unionRelation = widenSetOperationTypes(
Union(firstTable :: secondTable :: thirdTable :: forthTable :: Nil)).asInstanceOf[Union]
assert(unionRelation.children.length == 4)
checkOutput(unionRelation.children.head, expectedTypes)
@@ -862,17 +867,15 @@ class TypeCoercionSuite extends PlanTest {
}
}
- val dp = TypeCoercion.WidenSetOperationTypes
-
val left1 = LocalRelation(
AttributeReference("l", DecimalType(10, 8))())
val right1 = LocalRelation(
AttributeReference("r", DecimalType(5, 5))())
val expectedType1 = Seq(DecimalType(10, 8))
- val r1 = dp(Union(left1, right1)).asInstanceOf[Union]
- val r2 = dp(Except(left1, right1)).asInstanceOf[Except]
- val r3 = dp(Intersect(left1, right1)).asInstanceOf[Intersect]
+ val r1 = widenSetOperationTypes(Union(left1, right1)).asInstanceOf[Union]
+ val r2 = widenSetOperationTypes(Except(left1, right1)).asInstanceOf[Except]
+ val r3 = widenSetOperationTypes(Intersect(left1, right1)).asInstanceOf[Intersect]
checkOutput(r1.children.head, expectedType1)
checkOutput(r1.children.last, expectedType1)
@@ -891,17 +894,17 @@ class TypeCoercionSuite extends PlanTest {
val plan2 = LocalRelation(
AttributeReference("r", rType)())
- val r1 = dp(Union(plan1, plan2)).asInstanceOf[Union]
- val r2 = dp(Except(plan1, plan2)).asInstanceOf[Except]
- val r3 = dp(Intersect(plan1, plan2)).asInstanceOf[Intersect]
+ val r1 = widenSetOperationTypes(Union(plan1, plan2)).asInstanceOf[Union]
+ val r2 = widenSetOperationTypes(Except(plan1, plan2)).asInstanceOf[Except]
+ val r3 = widenSetOperationTypes(Intersect(plan1, plan2)).asInstanceOf[Intersect]
checkOutput(r1.children.last, Seq(expectedType))
checkOutput(r2.right, Seq(expectedType))
checkOutput(r3.right, Seq(expectedType))
- val r4 = dp(Union(plan2, plan1)).asInstanceOf[Union]
- val r5 = dp(Except(plan2, plan1)).asInstanceOf[Except]
- val r6 = dp(Intersect(plan2, plan1)).asInstanceOf[Intersect]
+ val r4 = widenSetOperationTypes(Union(plan2, plan1)).asInstanceOf[Union]
+ val r5 = widenSetOperationTypes(Except(plan2, plan1)).asInstanceOf[Except]
+ val r6 = widenSetOperationTypes(Intersect(plan2, plan1)).asInstanceOf[Intersect]
checkOutput(r4.children.last, Seq(expectedType))
checkOutput(r5.left, Seq(expectedType))
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala
index a7ffa884d2..22f3f3514f 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala
@@ -34,7 +34,7 @@ import org.apache.spark.unsafe.types.UTF8String
*/
class CastSuite extends SparkFunSuite with ExpressionEvalHelper {
- private def cast(v: Any, targetType: DataType, timeZoneId: Option[String] = None): Cast = {
+ private def cast(v: Any, targetType: DataType, timeZoneId: Option[String] = Some("GMT")): Cast = {
v match {
case lit: Expression => Cast(lit, targetType, timeZoneId)
case _ => Cast(Literal(v), targetType, timeZoneId)
@@ -47,7 +47,7 @@ class CastSuite extends SparkFunSuite with ExpressionEvalHelper {
}
private def checkNullCast(from: DataType, to: DataType): Unit = {
- checkEvaluation(cast(Literal.create(null, from), to, Option("GMT")), null)
+ checkEvaluation(cast(Literal.create(null, from), to), null)
}
test("null cast") {
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala
index 9978f35a03..ca89bf7db0 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala
@@ -160,7 +160,7 @@ class DateExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
test("Seconds") {
assert(Second(Literal.create(null, DateType), gmtId).resolved === false)
- assert(Second(Cast(Literal(d), TimestampType), None).resolved === true)
+ assert(Second(Cast(Literal(d), TimestampType, gmtId), gmtId).resolved === true)
checkEvaluation(Second(Cast(Literal(d), TimestampType, gmtId), gmtId), 0)
checkEvaluation(Second(Cast(Literal(sdf.format(d)), TimestampType, gmtId), gmtId), 15)
checkEvaluation(Second(Literal(ts), gmtId), 15)
@@ -220,7 +220,7 @@ class DateExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
test("Hour") {
assert(Hour(Literal.create(null, DateType), gmtId).resolved === false)
- assert(Hour(Literal(ts), None).resolved === true)
+ assert(Hour(Literal(ts), gmtId).resolved === true)
checkEvaluation(Hour(Cast(Literal(d), TimestampType, gmtId), gmtId), 0)
checkEvaluation(Hour(Cast(Literal(sdf.format(d)), TimestampType, gmtId), gmtId), 13)
checkEvaluation(Hour(Literal(ts), gmtId), 13)
@@ -246,7 +246,7 @@ class DateExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
test("Minute") {
assert(Minute(Literal.create(null, DateType), gmtId).resolved === false)
- assert(Minute(Literal(ts), None).resolved === true)
+ assert(Minute(Literal(ts), gmtId).resolved === true)
checkEvaluation(Minute(Cast(Literal(d), TimestampType, gmtId), gmtId), 0)
checkEvaluation(
Minute(Cast(Literal(sdf.format(d)), TimestampType, gmtId), gmtId), 10)
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvalHelper.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvalHelper.scala
index 1ba6dd1c5e..b6399edb68 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvalHelper.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvalHelper.scala
@@ -25,10 +25,12 @@ import org.scalatest.prop.GeneratorDrivenPropertyChecks
import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.serializer.JavaSerializer
import org.apache.spark.sql.catalyst.{CatalystTypeConverters, InternalRow}
+import org.apache.spark.sql.catalyst.analysis.ResolveTimeZone
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.catalyst.optimizer.SimpleTestOptimizer
import org.apache.spark.sql.catalyst.plans.logical.{OneRowRelation, Project}
-import org.apache.spark.sql.catalyst.util.{ArrayBasedMapData, ArrayData, GenericArrayData, MapData}
+import org.apache.spark.sql.catalyst.util.{ArrayData, MapData}
+import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types._
import org.apache.spark.util.Utils
@@ -45,7 +47,8 @@ trait ExpressionEvalHelper extends GeneratorDrivenPropertyChecks {
protected def checkEvaluation(
expression: => Expression, expected: Any, inputRow: InternalRow = EmptyRow): Unit = {
val serializer = new JavaSerializer(new SparkConf()).newInstance
- val expr: Expression = serializer.deserialize(serializer.serialize(expression))
+ val resolver = ResolveTimeZone(new SQLConf)
+ val expr = resolver.resolveTimeZones(serializer.deserialize(serializer.serialize(expression)))
val catalystValue = CatalystTypeConverters.convertToCatalyst(expected)
checkEvaluationWithoutCodegen(expr, catalystValue, inputRow)
checkEvaluationWithGeneratedMutableProjection(expr, catalystValue, inputRow)