aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorVenkata Ramana Gollamudi <ramana.gollamudi@huawei.com>2014-09-23 12:17:47 -0700
committerMichael Armbrust <michael@databricks.com>2014-09-23 12:17:47 -0700
commit1c62f97e94de96ca3dc6daf778f008176e92888a (patch)
tree5f15d1d7d1c0d4f369c558c936098ee9cba50f0b /sql
parentae60f8fb2d879ee1ebc0746bcbe05b89ab6ed3c9 (diff)
downloadspark-1c62f97e94de96ca3dc6daf778f008176e92888a.tar.gz
spark-1c62f97e94de96ca3dc6daf778f008176e92888a.tar.bz2
spark-1c62f97e94de96ca3dc6daf778f008176e92888a.zip
[SPARK-3268][SQL] DoubleType, FloatType and DecimalType modulus support
Supported modulus operation using % operator on fractional datatypes FloatType, DoubleType and DecimalType Example: SELECT 1388632775.0 % 60 from tablename LIMIT 1 Author : Venkata Ramana Gollamudi ramana.gollamudihuawei.com Author: Venkata Ramana Gollamudi <ramana.gollamudi@huawei.com> Closes #2457 from gvramana/double_modulus_support and squashes the following commits: 79172a8 [Venkata Ramana Gollamudi] Add hive cache to testcase c09bd5b [Venkata Ramana Gollamudi] Added a HiveQuerySuite testcase 193fa81 [Venkata Ramana Gollamudi] corrected testcase 3624471 [Venkata Ramana Gollamudi] modified testcase e112c09 [Venkata Ramana Gollamudi] corrected the testcase 513d0e0 [Venkata Ramana Gollamudi] modified to add modulus support to fractional types float,double,decimal 296d253 [Venkata Ramana Gollamudi] modified to add modulus support to fractional types float,double,decimal
Diffstat (limited to 'sql')
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala3
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/types/dataTypes.scala5
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala32
-rw-r--r--sql/hive/src/test/resources/golden/modulus-0-6afd4a359a478cfa3ebd9ad00ae3868e1
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala3
5 files changed, 44 insertions, 0 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala
index 70507e7ee2..1eb260efa6 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala
@@ -179,6 +179,9 @@ abstract class Expression extends TreeNode[Expression] {
case i: IntegralType =>
f.asInstanceOf[(Integral[i.JvmType], i.JvmType, i.JvmType) => i.JvmType](
i.integral, evalE1.asInstanceOf[i.JvmType], evalE2.asInstanceOf[i.JvmType])
+ case i: FractionalType =>
+ f.asInstanceOf[(Integral[i.JvmType], i.JvmType, i.JvmType) => i.JvmType](
+ i.asIntegral, evalE1.asInstanceOf[i.JvmType], evalE2.asInstanceOf[i.JvmType])
case other => sys.error(s"Type $other does not support numeric operations")
}
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/types/dataTypes.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/types/dataTypes.scala
index e3050e5397..c7d73d3990 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/types/dataTypes.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/types/dataTypes.scala
@@ -19,6 +19,7 @@ package org.apache.spark.sql.catalyst.types
import java.sql.Timestamp
+import scala.math.Numeric.{FloatAsIfIntegral, BigDecimalAsIfIntegral, DoubleAsIfIntegral}
import scala.reflect.ClassTag
import scala.reflect.runtime.universe.{typeTag, TypeTag, runtimeMirror}
import scala.util.parsing.combinator.RegexParsers
@@ -250,6 +251,7 @@ object FractionalType {
}
abstract class FractionalType extends NumericType {
private[sql] val fractional: Fractional[JvmType]
+ private[sql] val asIntegral: Integral[JvmType]
}
case object DecimalType extends FractionalType {
@@ -258,6 +260,7 @@ case object DecimalType extends FractionalType {
private[sql] val numeric = implicitly[Numeric[BigDecimal]]
private[sql] val fractional = implicitly[Fractional[BigDecimal]]
private[sql] val ordering = implicitly[Ordering[JvmType]]
+ private[sql] val asIntegral = BigDecimalAsIfIntegral
def simpleString: String = "decimal"
}
@@ -267,6 +270,7 @@ case object DoubleType extends FractionalType {
private[sql] val numeric = implicitly[Numeric[Double]]
private[sql] val fractional = implicitly[Fractional[Double]]
private[sql] val ordering = implicitly[Ordering[JvmType]]
+ private[sql] val asIntegral = DoubleAsIfIntegral
def simpleString: String = "double"
}
@@ -276,6 +280,7 @@ case object FloatType extends FractionalType {
private[sql] val numeric = implicitly[Numeric[Float]]
private[sql] val fractional = implicitly[Fractional[Float]]
private[sql] val ordering = implicitly[Ordering[JvmType]]
+ private[sql] val asIntegral = FloatAsIfIntegral
def simpleString: String = "float"
}
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala
index 8b6721d5d8..63931af4ba 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala
@@ -20,6 +20,8 @@ package org.apache.spark.sql.catalyst.expressions
import java.sql.Timestamp
import org.scalatest.FunSuite
+import org.scalatest.Matchers._
+import org.scalautils.TripleEqualsSupport.Spread
import org.apache.spark.sql.catalyst.types._
@@ -129,6 +131,13 @@ class ExpressionEvaluationSuite extends FunSuite {
}
}
+ def checkDoubleEvaluation(expression: Expression, expected: Spread[Double], inputRow: Row = EmptyRow): Unit = {
+ val actual = try evaluate(expression, inputRow) catch {
+ case e: Exception => fail(s"Exception evaluating $expression", e)
+ }
+ actual.asInstanceOf[Double] shouldBe expected
+ }
+
test("IN") {
checkEvaluation(In(Literal(1), Seq(Literal(1), Literal(2))), true)
checkEvaluation(In(Literal(2), Seq(Literal(1), Literal(2))), true)
@@ -471,6 +480,29 @@ class ExpressionEvaluationSuite extends FunSuite {
checkEvaluation(c1 % c2, 1, row)
}
+ test("fractional arithmetic") {
+ val row = new GenericRow(Array[Any](1.1, 2.0, 3.1, null))
+ val c1 = 'a.double.at(0)
+ val c2 = 'a.double.at(1)
+ val c3 = 'a.double.at(2)
+ val c4 = 'a.double.at(3)
+
+ checkEvaluation(UnaryMinus(c1), -1.1, row)
+ checkEvaluation(UnaryMinus(Literal(100.0, DoubleType)), -100.0)
+ checkEvaluation(Add(c1, c4), null, row)
+ checkEvaluation(Add(c1, c2), 3.1, row)
+ checkEvaluation(Add(c1, Literal(null, DoubleType)), null, row)
+ checkEvaluation(Add(Literal(null, DoubleType), c2), null, row)
+ checkEvaluation(Add(Literal(null, DoubleType), Literal(null, DoubleType)), null, row)
+
+ checkEvaluation(-c1, -1.1, row)
+ checkEvaluation(c1 + c2, 3.1, row)
+ checkDoubleEvaluation(c1 - c2, (-0.9 +- 0.001), row)
+ checkDoubleEvaluation(c1 * c2, (2.2 +- 0.001), row)
+ checkDoubleEvaluation(c1 / c2, (0.55 +- 0.001), row)
+ checkDoubleEvaluation(c3 % c2, (1.1 +- 0.001), row)
+ }
+
test("BinaryComparison") {
val row = new GenericRow(Array[Any](1, 2, 3, null, 3, null))
val c1 = 'a.int.at(0)
diff --git a/sql/hive/src/test/resources/golden/modulus-0-6afd4a359a478cfa3ebd9ad00ae3868e b/sql/hive/src/test/resources/golden/modulus-0-6afd4a359a478cfa3ebd9ad00ae3868e
new file mode 100644
index 0000000000..52eab0653c
--- /dev/null
+++ b/sql/hive/src/test/resources/golden/modulus-0-6afd4a359a478cfa3ebd9ad00ae3868e
@@ -0,0 +1 @@
+1 true 0.5
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
index 6fc891ba4c..426f5fcee6 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
@@ -138,6 +138,9 @@ class HiveQuerySuite extends HiveComparisonTest {
createQueryTest("division",
"SELECT 2 / 1, 1 / 2, 1 / 3, 1 / COUNT(*) FROM src LIMIT 1")
+ createQueryTest("modulus",
+ "SELECT 11 % 10, IF((101.1 % 100.0) BETWEEN 1.01 AND 1.11, \"true\", \"false\"), (101 / 2) % 10 FROM src LIMIT 1")
+
test("Query expressed in SQL") {
setConf("spark.sql.dialect", "sql")
assert(sql("SELECT 1").collect() === Array(Seq(1)))