aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorReynold Xin <rxin@databricks.com>2015-07-05 23:54:25 -0700
committerReynold Xin <rxin@databricks.com>2015-07-05 23:54:25 -0700
commit86768b7b3b0c2964e744bc491bc20a1d3140ce93 (patch)
treef7a846a3ec77d07db9416a50a13b5ab1d3b86f71 /sql
parent6d0411b4f3a202cfb53f638ee5fd49072b42d3a6 (diff)
downloadspark-86768b7b3b0c2964e744bc491bc20a1d3140ce93.tar.gz
spark-86768b7b3b0c2964e744bc491bc20a1d3140ce93.tar.bz2
spark-86768b7b3b0c2964e744bc491bc20a1d3140ce93.zip
[SPARK-8831][SQL] Support AbstractDataType in TypeCollection.
Otherwise it is impossible to declare an expression supporting DecimalType. Author: Reynold Xin <rxin@databricks.com> Closes #7232 from rxin/typecollection-adt and squashes the following commits: 934d3d1 [Reynold Xin] [SPARK-8831][SQL] Support AbstractDataType in TypeCollection.
Diffstat (limited to 'sql')
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercion.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala10
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercionSuite.scala6
3 files changed, 12 insertions, 6 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercion.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercion.scala
index 84acc0e7e9..5367b7f330 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercion.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercion.scala
@@ -708,8 +708,6 @@ object HiveTypeCoercion {
case (NullType, target) => Cast(e, target.defaultConcreteType)
// Implicit cast among numeric types
- // If input is decimal, and we expect a decimal type, just use the input.
- case (_: DecimalType, DecimalType) => e
// If input is a numeric type but not decimal, and we expect a decimal type,
// cast the input to unlimited precision decimal.
case (_: NumericType, DecimalType) if !inType.isInstanceOf[DecimalType] =>
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala
index ffefb0e783..fb1b47e946 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala
@@ -53,10 +53,12 @@ private[sql] abstract class AbstractDataType {
*
* This means that we prefer StringType over BinaryType if it is possible to cast to StringType.
*/
-private[sql] class TypeCollection(private val types: Seq[DataType]) extends AbstractDataType {
+private[sql] class TypeCollection(private val types: Seq[AbstractDataType])
+ extends AbstractDataType {
+
require(types.nonEmpty, s"TypeCollection ($types) cannot be empty")
- private[sql] override def defaultConcreteType: DataType = types.head
+ private[sql] override def defaultConcreteType: DataType = types.head.defaultConcreteType
private[sql] override def isParentOf(childCandidate: DataType): Boolean = false
@@ -68,9 +70,9 @@ private[sql] class TypeCollection(private val types: Seq[DataType]) extends Abst
private[sql] object TypeCollection {
- def apply(types: DataType*): TypeCollection = new TypeCollection(types)
+ def apply(types: AbstractDataType*): TypeCollection = new TypeCollection(types)
- def unapply(typ: AbstractDataType): Option[Seq[DataType]] = typ match {
+ def unapply(typ: AbstractDataType): Option[Seq[AbstractDataType]] = typ match {
case typ: TypeCollection => Some(typ.types)
case _ => None
}
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercionSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercionSuite.scala
index 67d05ab536..b564266177 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercionSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercionSuite.scala
@@ -71,6 +71,12 @@ class HiveTypeCoercionSuite extends PlanTest {
shouldCast(IntegerType, TypeCollection(StringType, BinaryType), StringType)
shouldCast(IntegerType, TypeCollection(BinaryType, StringType), StringType)
+
+ shouldCast(
+ DecimalType.Unlimited, TypeCollection(IntegerType, DecimalType), DecimalType.Unlimited)
+ shouldCast(DecimalType(10, 2), TypeCollection(IntegerType, DecimalType), DecimalType(10, 2))
+ shouldCast(DecimalType(10, 2), TypeCollection(DecimalType, IntegerType), DecimalType(10, 2))
+ shouldCast(IntegerType, TypeCollection(DecimalType(10, 2), StringType), DecimalType(10, 2))
}
test("ineligible implicit type cast") {