aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorhyukjinkwon <gurwls223@gmail.com>2017-02-03 22:10:17 -0800
committergatorsmile <gatorsmile@gmail.com>2017-02-03 22:10:17 -0800
commit2f3c20bbddd266015d9478c35ce2b37d67e01200 (patch)
treedd9a02b402cfe71a1bbbea0e5c46567489149aaf
parent22d4aae8be338051f6652cdf54dd593023751189 (diff)
downloadspark-2f3c20bbddd266015d9478c35ce2b37d67e01200.tar.gz
spark-2f3c20bbddd266015d9478c35ce2b37d67e01200.tar.bz2
spark-2f3c20bbddd266015d9478c35ce2b37d67e01200.zip
[SPARK-19446][SQL] Remove unused findTightestCommonType in TypeCoercion
## What changes were proposed in this pull request? This PR proposes to - remove unused `findTightestCommonType` in `TypeCoercion` as suggested in https://github.com/apache/spark/pull/16777#discussion_r99283834 - rename `findTightestCommonTypeOfTwo ` to `findTightestCommonType`. - fix comments accordingly The usage was removed while refactoring/fixing in several JIRAs such as SPARK-16714, SPARK-16735 and SPARK-16646 ## How was this patch tested? Existing tests. Author: hyukjinkwon <gurwls223@gmail.com> Closes #16786 from HyukjinKwon/SPARK-19446.
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala29
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercionSuite.scala4
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/json/InferSchema.scala2
3 files changed, 12 insertions, 23 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala
index 4177c2b038..c6242e01ee 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala
@@ -79,7 +79,7 @@ object TypeCoercion {
* with primitive types, because in that case the precision and scale of the result depends on
* the operation. Those rules are implemented in [[DecimalPrecision]].
*/
- val findTightestCommonTypeOfTwo: (DataType, DataType) => Option[DataType] = {
+ val findTightestCommonType: (DataType, DataType) => Option[DataType] = {
case (t1, t2) if t1 == t2 => Some(t1)
case (NullType, t1) => Some(t1)
case (t1, NullType) => Some(t1)
@@ -103,7 +103,7 @@ object TypeCoercion {
/** Similar to [[findTightestCommonType]], but can promote all the way to StringType. */
def findTightestCommonTypeToString(left: DataType, right: DataType): Option[DataType] = {
- findTightestCommonTypeOfTwo(left, right).orElse((left, right) match {
+ findTightestCommonType(left, right).orElse((left, right) match {
case (StringType, t2: AtomicType) if t2 != BinaryType && t2 != BooleanType => Some(StringType)
case (t1: AtomicType, StringType) if t1 != BinaryType && t1 != BooleanType => Some(StringType)
case _ => None
@@ -111,21 +111,10 @@ object TypeCoercion {
}
/**
- * Find the tightest common type of a set of types by continuously applying
- * `findTightestCommonTypeOfTwo` on these types.
- */
- private def findTightestCommonType(types: Seq[DataType]): Option[DataType] = {
- types.foldLeft[Option[DataType]](Some(NullType))((r, c) => r match {
- case None => None
- case Some(d) => findTightestCommonTypeOfTwo(d, c)
- })
- }
-
- /**
* Case 2 type widening (see the classdoc comment above for TypeCoercion).
*
- * i.e. the main difference with [[findTightestCommonTypeOfTwo]] is that here we allow some
- * loss of precision when widening decimal and double.
+ * i.e. the main difference with [[findTightestCommonType]] is that here we allow some
+ * loss of precision when widening decimal and double, and promotion to string.
*/
private def findWiderTypeForTwo(t1: DataType, t2: DataType): Option[DataType] = (t1, t2) match {
case (t1: DecimalType, t2: DecimalType) =>
@@ -148,13 +137,13 @@ object TypeCoercion {
}
/**
- * Similar to [[findWiderCommonType]], but can't promote to string. This is also similar to
- * [[findTightestCommonType]], but can handle decimal types. If the wider decimal type exceeds
- * system limitation, this rule will truncate the decimal type before return it.
+ * Similar to [[findWiderCommonType]] that can handle decimal types, but can't promote to
+ * string. If the wider decimal type exceeds system limitation, this rule will truncate
+ * the decimal type before return it.
*/
def findWiderTypeWithoutStringPromotion(types: Seq[DataType]): Option[DataType] = {
types.foldLeft[Option[DataType]](Some(NullType))((r, c) => r match {
- case Some(d) => findTightestCommonTypeOfTwo(d, c).orElse((d, c) match {
+ case Some(d) => findTightestCommonType(d, c).orElse((d, c) match {
case (t1: DecimalType, t2: DecimalType) =>
Some(DecimalPrecision.widerDecimalType(t1, t2))
case (t: IntegralType, d: DecimalType) =>
@@ -621,7 +610,7 @@ object TypeCoercion {
case e if !e.childrenResolved => e
case b @ BinaryOperator(left, right) if left.dataType != right.dataType =>
- findTightestCommonTypeOfTwo(left.dataType, right.dataType).map { commonType =>
+ findTightestCommonType(left.dataType, right.dataType).map { commonType =>
if (b.inputType.acceptsType(commonType)) {
// If the expression accepts the tightest common type, cast to that.
val newLeft = if (left.dataType == commonType) left else Cast(left, commonType)
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercionSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercionSuite.scala
index 110bd02eed..ceb5b53e08 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercionSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercionSuite.scala
@@ -309,11 +309,11 @@ class TypeCoercionSuite extends PlanTest {
test("tightest common bound for types") {
def widenTest(t1: DataType, t2: DataType, tightestCommon: Option[DataType]) {
- var found = TypeCoercion.findTightestCommonTypeOfTwo(t1, t2)
+ var found = TypeCoercion.findTightestCommonType(t1, t2)
assert(found == tightestCommon,
s"Expected $tightestCommon as tightest common type for $t1 and $t2, found $found")
// Test both directions to make sure the widening is symmetric.
- found = TypeCoercion.findTightestCommonTypeOfTwo(t2, t1)
+ found = TypeCoercion.findTightestCommonType(t2, t1)
assert(found == tightestCommon,
s"Expected $tightestCommon as tightest common type for $t2 and $t1, found $found")
}
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/json/InferSchema.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/json/InferSchema.scala
index dc8bd817f2..330d04de66 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/json/InferSchema.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/json/InferSchema.scala
@@ -253,7 +253,7 @@ private[sql] object InferSchema {
* Returns the most general data type for two given data types.
*/
def compatibleType(t1: DataType, t2: DataType): DataType = {
- TypeCoercion.findTightestCommonTypeOfTwo(t1, t2).getOrElse {
+ TypeCoercion.findTightestCommonType(t1, t2).getOrElse {
// t1 or t2 is a StructType, ArrayType, or an unexpected type.
(t1, t2) match {
// Double support larger range than fixed decimal, DecimalType.Maximum should be enough