aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ExpectsInputTypes.scala4
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala16
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisErrorSuite.scala8
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercionSuite.scala1
4 files changed, 23 insertions, 6 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ExpectsInputTypes.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ExpectsInputTypes.scala
index 986cc09499..3eb0eb195c 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ExpectsInputTypes.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ExpectsInputTypes.scala
@@ -39,8 +39,8 @@ trait ExpectsInputTypes { self: Expression =>
override def checkInputDataTypes(): TypeCheckResult = {
val mismatches = children.zip(inputTypes).zipWithIndex.collect {
case ((child, expected), idx) if !expected.acceptsType(child.dataType) =>
- s"Argument ${idx + 1} is expected to be of type ${expected.simpleString}, " +
- s"however, ${child.prettyString} is of type ${child.dataType.simpleString}."
+ s"argument ${idx + 1} is expected to be of type ${expected.simpleString}, " +
+ s"however, '${child.prettyString}' is of type ${child.dataType.simpleString}."
}
if (mismatches.isEmpty) {
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala
index ad75fa2e31..32f87440b4 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala
@@ -36,12 +36,28 @@ private[sql] abstract class AbstractDataType {
/**
* Returns true if this data type is the same type as `other`. This is different that equality
* as equality will also consider data type parametrization, such as decimal precision.
+ *
+ * {{{
+ * // this should return true
+ * DecimalType.isSameType(DecimalType(10, 2))
+ *
+ * // this should return false
+ * NumericType.isSameType(DecimalType(10, 2))
+ * }}}
*/
private[sql] def isSameType(other: DataType): Boolean
/**
* Returns true if `other` is an acceptable input type for a function that expectes this,
* possibly abstract, DataType.
+ *
+ * {{{
+ * // this should return true
+ * DecimalType.isSameType(DecimalType(10, 2))
+ *
+ * // this should return true as well
+ * NumericType.acceptsType(DecimalType(10, 2))
+ * }}}
*/
private[sql] def acceptsType(other: DataType): Boolean = isSameType(other)
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisErrorSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisErrorSuite.scala
index 73236c3acb..9d0c69a245 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisErrorSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisErrorSuite.scala
@@ -58,7 +58,7 @@ class AnalysisErrorSuite extends SparkFunSuite with BeforeAndAfter {
}
}
- errorMessages.foreach(m => assert(error.getMessage.toLowerCase contains m.toLowerCase))
+ errorMessages.foreach(m => assert(error.getMessage.toLowerCase.contains(m.toLowerCase)))
}
}
@@ -68,21 +68,21 @@ class AnalysisErrorSuite extends SparkFunSuite with BeforeAndAfter {
"single invalid type, single arg",
testRelation.select(TestFunction(dateLit :: Nil, IntegerType :: Nil).as('a)),
"cannot resolve" :: "testfunction" :: "argument 1" :: "expected to be of type int" ::
- "null is of type date" ::Nil)
+ "'null' is of type date" ::Nil)
errorTest(
"single invalid type, second arg",
testRelation.select(
TestFunction(dateLit :: dateLit :: Nil, DateType :: IntegerType :: Nil).as('a)),
"cannot resolve" :: "testfunction" :: "argument 2" :: "expected to be of type int" ::
- "null is of type date" ::Nil)
+ "'null' is of type date" ::Nil)
errorTest(
"multiple invalid type",
testRelation.select(
TestFunction(dateLit :: dateLit :: Nil, IntegerType :: IntegerType :: Nil).as('a)),
"cannot resolve" :: "testfunction" :: "argument 1" :: "argument 2" ::
- "expected to be of type int" :: "null is of type date" ::Nil)
+ "expected to be of type int" :: "'null' is of type date" ::Nil)
errorTest(
"unresolved window function",
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercionSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercionSuite.scala
index 6e3aa0eebe..acb9a433de 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercionSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercionSuite.scala
@@ -79,6 +79,7 @@ class HiveTypeCoercionSuite extends PlanTest {
shouldCast(IntegerType, TypeCollection(DecimalType(10, 2), StringType), DecimalType(10, 2))
shouldCast(StringType, NumericType, DoubleType)
+ shouldCast(StringType, TypeCollection(NumericType, BinaryType), DoubleType)
// NumericType should not be changed when function accepts any of them.
Seq(ByteType, ShortType, IntegerType, LongType, FloatType, DoubleType,