diff options
author | Reynold Xin <rxin@databricks.com> | 2015-06-13 17:10:13 -0700 |
---|---|---|
committer | Reynold Xin <rxin@databricks.com> | 2015-06-13 17:10:13 -0700 |
commit | a138953391975886c88bfe81d4ce6b6dd189cd32 (patch) | |
tree | 7cc61e281f24c1d716ab2998c765068367a8e978 /sql/catalyst | |
parent | ddec45279ed1061f4c05fd0760309a53581d03f5 (diff) | |
download | spark-a138953391975886c88bfe81d4ce6b6dd189cd32.tar.gz spark-a138953391975886c88bfe81d4ce6b6dd189cd32.tar.bz2 spark-a138953391975886c88bfe81d4ce6b6dd189cd32.zip |
[SPARK-8347][SQL] Add unit tests for abs.
Also addressed code review feedback from #6754
Author: Reynold Xin <rxin@databricks.com>
Closes #6803 from rxin/abs and squashes the following commits:
d07beba [Reynold Xin] [SPARK-8347] Add unit tests for abs.
Diffstat (limited to 'sql/catalyst')
2 files changed, 19 insertions, 4 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercion.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercion.scala index 6ed192360d..e7bf7cc1f1 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercion.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercion.scala @@ -672,13 +672,13 @@ trait HiveTypeCoercion { findTightestCommonTypeToString(left.dataType, right.dataType).map { widestType => val newLeft = if (left.dataType == widestType) left else Cast(left, widestType) val newRight = if (right.dataType == widestType) right else Cast(right, widestType) - i.makeCopy(Array(pred, newLeft, newRight)) + If(pred, newLeft, newRight) }.getOrElse(i) // If there is no applicable conversion, leave expression unchanged. // Convert If(null literal, _, _) into boolean type. // In the optimizer, we should short-circuit this directly into false value. - case i @ If(pred, left, right) if pred.dataType == NullType => - i.makeCopy(Array(Literal.create(null, BooleanType), left, right)) + case If(pred, left, right) if pred.dataType == NullType => + If(Literal.create(null, BooleanType), left, right) } } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala index e1afa81a7a..5ff1bca260 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala @@ -21,7 +21,7 @@ import org.scalatest.Matchers._ import org.apache.spark.SparkFunSuite import org.apache.spark.sql.catalyst.dsl.expressions._ -import org.apache.spark.sql.types.{DoubleType, IntegerType} +import org.apache.spark.sql.types.{Decimal, DoubleType, IntegerType} class ArithmeticExpressionSuite extends SparkFunSuite with ExpressionEvalHelper { @@ -75,6 +75,21 @@ class ArithmeticExpressionSuite extends SparkFunSuite with ExpressionEvalHelper checkDoubleEvaluation(c3 % c2, (1.1 +- 0.001), row) } + test("Abs") { + def testAbs(convert: (Int) => Any): Unit = { + checkEvaluation(Abs(Literal(convert(0))), convert(0)) + checkEvaluation(Abs(Literal(convert(1))), convert(1)) + checkEvaluation(Abs(Literal(convert(-1))), convert(1)) + } + testAbs(_.toByte) + testAbs(_.toShort) + testAbs(identity) + testAbs(_.toLong) + testAbs(_.toFloat) + testAbs(_.toDouble) + testAbs(Decimal(_)) + } + test("Divide") { checkEvaluation(Divide(Literal(2), Literal(1)), 2) checkEvaluation(Divide(Literal(1.0), Literal(2.0)), 0.5) |