aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorReynold Xin <rxin@databricks.com>2015-07-01 00:08:16 -0700
committerReynold Xin <rxin@databricks.com>2015-07-01 00:08:16 -0700
commitfc3a6fe67f5aeda2443958c31f097daeba8549e5 (patch)
treea9e415b5c795e49592dca3353f4c12d0fdff2223 /sql
parent365c14055e90db5ea4b25afec03022be81c8a704 (diff)
downloadspark-fc3a6fe67f5aeda2443958c31f097daeba8549e5.tar.gz
spark-fc3a6fe67f5aeda2443958c31f097daeba8549e5.tar.bz2
spark-fc3a6fe67f5aeda2443958c31f097daeba8549e5.zip
[SPARK-8749][SQL] Remove HiveTypeCoercion trait.
Moved all the rules into the companion object. Author: Reynold Xin <rxin@databricks.com> Closes #7147 from rxin/SPARK-8749 and squashes the following commits: c1c6dc0 [Reynold Xin] [SPARK-8749][SQL] Remove HiveTypeCoercion trait.
Diffstat (limited to 'sql')
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala4
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercion.scala59
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercionSuite.scala14
3 files changed, 33 insertions, 44 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
index 117c87a785..15e84e68b9 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
@@ -43,7 +43,7 @@ class Analyzer(
registry: FunctionRegistry,
conf: CatalystConf,
maxIterations: Int = 100)
- extends RuleExecutor[LogicalPlan] with HiveTypeCoercion with CheckAnalysis {
+ extends RuleExecutor[LogicalPlan] with CheckAnalysis {
def resolver: Resolver = {
if (conf.caseSensitiveAnalysis) {
@@ -76,7 +76,7 @@ class Analyzer(
ExtractWindowExpressions ::
GlobalAggregates ::
UnresolvedHavingClauseAttributes ::
- typeCoercionRules ++
+ HiveTypeCoercion.typeCoercionRules ++
extendedResolutionRules : _*)
)
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercion.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercion.scala
index e525ad623f..a9d396d1fa 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercion.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercion.scala
@@ -22,7 +22,32 @@ import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, Project, Union}
import org.apache.spark.sql.catalyst.rules.Rule
import org.apache.spark.sql.types._
+
+/**
+ * A collection of [[Rule Rules]] that can be used to coerce differing types that
+ * participate in operations into compatible ones. Most of these rules are based on Hive semantics,
+ * but they do not introduce any dependencies on the hive codebase. For this reason they remain in
+ * Catalyst until we have a more standard set of coercions.
+ */
object HiveTypeCoercion {
+
+ val typeCoercionRules =
+ PropagateTypes ::
+ ConvertNaNs ::
+ InConversion ::
+ WidenTypes ::
+ PromoteStrings ::
+ DecimalPrecision ::
+ BooleanEquality ::
+ StringToIntegralCasts ::
+ FunctionArgumentConversion ::
+ CaseWhenCoercion ::
+ IfCoercion ::
+ Division ::
+ PropagateTypes ::
+ AddCastForAutoCastInputTypes ::
+ Nil
+
// See https://cwiki.apache.org/confluence/display/Hive/LanguageManual+Types.
// The conversion for integral and floating point types have a linear widening hierarchy:
private val numericPrecedence =
@@ -79,7 +104,6 @@ object HiveTypeCoercion {
})
}
-
/**
* Find the tightest common type of a set of types by continuously applying
* `findTightestCommonTypeOfTwo` on these types.
@@ -90,34 +114,6 @@ object HiveTypeCoercion {
case Some(d) => findTightestCommonTypeOfTwo(d, c)
})
}
-}
-
-/**
- * A collection of [[Rule Rules]] that can be used to coerce differing types that
- * participate in operations into compatible ones. Most of these rules are based on Hive semantics,
- * but they do not introduce any dependencies on the hive codebase. For this reason they remain in
- * Catalyst until we have a more standard set of coercions.
- */
-trait HiveTypeCoercion {
-
- import HiveTypeCoercion._
-
- val typeCoercionRules =
- PropagateTypes ::
- ConvertNaNs ::
- InConversion ::
- WidenTypes ::
- PromoteStrings ::
- DecimalPrecision ::
- BooleanEquality ::
- StringToIntegralCasts ::
- FunctionArgumentConversion ::
- CaseWhenCoercion ::
- IfCoercion ::
- Division ::
- PropagateTypes ::
- AddCastForAutoCastInputTypes ::
- Nil
/**
* Applies any changes to [[AttributeReference]] data types that are made by other rules to
@@ -202,8 +198,6 @@ trait HiveTypeCoercion {
* - LongType to DoubleType
*/
object WidenTypes extends Rule[LogicalPlan] {
- import HiveTypeCoercion._
-
def apply(plan: LogicalPlan): LogicalPlan = plan transform {
// TODO: unions with fixed-precision decimals
case u @ Union(left, right) if u.childrenResolved && !u.resolved =>
@@ -655,8 +649,6 @@ trait HiveTypeCoercion {
* Coerces the type of different branches of a CASE WHEN statement to a common type.
*/
object CaseWhenCoercion extends Rule[LogicalPlan] {
- import HiveTypeCoercion._
-
def apply(plan: LogicalPlan): LogicalPlan = plan transformAllExpressions {
case c: CaseWhenLike if c.childrenResolved && !c.valueTypesEqual =>
logDebug(s"Input values for null casting ${c.valueTypes.mkString(",")}")
@@ -714,7 +706,6 @@ trait HiveTypeCoercion {
* [[AutoCastInputTypes]].
*/
object AddCastForAutoCastInputTypes extends Rule[LogicalPlan] {
-
def apply(plan: LogicalPlan): LogicalPlan = plan transformAllExpressions {
// Skip nodes who's children have not been resolved yet.
case e if !e.childrenResolved => e
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercionSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercionSuite.scala
index f7b8e21bed..eae3666595 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercionSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercionSuite.scala
@@ -113,8 +113,7 @@ class HiveTypeCoercionSuite extends PlanTest {
}
test("coalesce casts") {
- val fac = new HiveTypeCoercion { }.FunctionArgumentConversion
- ruleTest(fac,
+ ruleTest(HiveTypeCoercion.FunctionArgumentConversion,
Coalesce(Literal(1.0)
:: Literal(1)
:: Literal.create(1.0, FloatType)
@@ -123,7 +122,7 @@ class HiveTypeCoercionSuite extends PlanTest {
:: Cast(Literal(1), DoubleType)
:: Cast(Literal.create(1.0, FloatType), DoubleType)
:: Nil))
- ruleTest(fac,
+ ruleTest(HiveTypeCoercion.FunctionArgumentConversion,
Coalesce(Literal(1L)
:: Literal(1)
:: Literal(new java.math.BigDecimal("1000000000000000000000"))
@@ -135,7 +134,7 @@ class HiveTypeCoercionSuite extends PlanTest {
}
test("type coercion for If") {
- val rule = new HiveTypeCoercion { }.IfCoercion
+ val rule = HiveTypeCoercion.IfCoercion
ruleTest(rule,
If(Literal(true), Literal(1), Literal(1L)),
If(Literal(true), Cast(Literal(1), LongType), Literal(1L))
@@ -148,19 +147,18 @@ class HiveTypeCoercionSuite extends PlanTest {
}
test("type coercion for CaseKeyWhen") {
- val cwc = new HiveTypeCoercion {}.CaseWhenCoercion
- ruleTest(cwc,
+ ruleTest(HiveTypeCoercion.CaseWhenCoercion,
CaseKeyWhen(Literal(1.toShort), Seq(Literal(1), Literal("a"))),
CaseKeyWhen(Cast(Literal(1.toShort), IntegerType), Seq(Literal(1), Literal("a")))
)
- ruleTest(cwc,
+ ruleTest(HiveTypeCoercion.CaseWhenCoercion,
CaseKeyWhen(Literal(true), Seq(Literal(1), Literal("a"))),
CaseKeyWhen(Literal(true), Seq(Literal(1), Literal("a")))
)
}
test("type coercion simplification for equal to") {
- val be = new HiveTypeCoercion {}.BooleanEquality
+ val be = HiveTypeCoercion.BooleanEquality
ruleTest(be,
EqualTo(Literal(true), Literal(1)),