aboutsummaryrefslogtreecommitdiff
path: root/sql/core/src
diff options
context:
space:
mode:
authorKousuke Saruta <sarutak@oss.nttdata.co.jp>2014-12-02 12:07:52 -0800
committerMichael Armbrust <michael@databricks.com>2014-12-02 12:07:52 -0800
commite75e04f980281389b881df76f59ba1adc6338629 (patch)
treede17da2da8b128c908cdc9f8cee55927b4548b3b /sql/core/src
parentb1f8fe316a6904841f0159ec02159b1af0ad730e (diff)
downloadspark-e75e04f980281389b881df76f59ba1adc6338629.tar.gz
spark-e75e04f980281389b881df76f59ba1adc6338629.tar.bz2
spark-e75e04f980281389b881df76f59ba1adc6338629.zip
[SPARK-4536][SQL] Add sqrt and abs to Spark SQL DSL
Spark SQL has embeded sqrt and abs but DSL doesn't support those functions. Author: Kousuke Saruta <sarutak@oss.nttdata.co.jp> Closes #3401 from sarutak/dsl-missing-operator and squashes the following commits: 07700cf [Kousuke Saruta] Modified Literal(null, NullType) to Literal(null) in DslQuerySuite 8f366f8 [Kousuke Saruta] Merge branch 'master' of git://git.apache.org/spark into dsl-missing-operator 1b88e2e [Kousuke Saruta] Merge branch 'master' of git://git.apache.org/spark into dsl-missing-operator 0396f89 [Kousuke Saruta] Added sqrt and abs to Spark SQL DSL
Diffstat (limited to 'sql/core/src')
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/DslQuerySuite.scala68
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/TestData.scala4
2 files changed, 72 insertions, 0 deletions
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DslQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DslQuerySuite.scala
index 94bd97758f..1a330a2bb6 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/DslQuerySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/DslQuerySuite.scala
@@ -282,4 +282,72 @@ class DslQuerySuite extends QueryTest {
(1, "1", "11") :: (2, "2", "22") :: (3, "3", "33") :: Nil
)
}
+
+ test("sqrt") {
+ checkAnswer(
+ testData.select(sqrt('key)).orderBy('key asc),
+ (1 to 100).map(n => Seq(math.sqrt(n)))
+ )
+
+ checkAnswer(
+ testData.select(sqrt('value), 'key).orderBy('key asc, 'value asc),
+ (1 to 100).map(n => Seq(math.sqrt(n), n))
+ )
+
+ checkAnswer(
+ testData.select(sqrt(Literal(null))),
+ (1 to 100).map(_ => Seq(null))
+ )
+ }
+
+ test("abs") {
+ checkAnswer(
+ testData.select(abs('key)).orderBy('key asc),
+ (1 to 100).map(n => Seq(n))
+ )
+
+ checkAnswer(
+ negativeData.select(abs('key)).orderBy('key desc),
+ (1 to 100).map(n => Seq(n))
+ )
+
+ checkAnswer(
+ testData.select(abs(Literal(null))),
+ (1 to 100).map(_ => Seq(null))
+ )
+ }
+
+ test("upper") {
+ checkAnswer(
+ lowerCaseData.select(upper('l)),
+ ('a' to 'd').map(c => Seq(c.toString.toUpperCase()))
+ )
+
+ checkAnswer(
+ testData.select(upper('value), 'key),
+ (1 to 100).map(n => Seq(n.toString, n))
+ )
+
+ checkAnswer(
+ testData.select(upper(Literal(null))),
+ (1 to 100).map(n => Seq(null))
+ )
+ }
+
+ test("lower") {
+ checkAnswer(
+ upperCaseData.select(lower('L)),
+ ('A' to 'F').map(c => Seq(c.toString.toLowerCase()))
+ )
+
+ checkAnswer(
+ testData.select(lower('value), 'key),
+ (1 to 100).map(n => Seq(n.toString, n))
+ )
+
+ checkAnswer(
+ testData.select(lower(Literal(null))),
+ (1 to 100).map(n => Seq(null))
+ )
+ }
}
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala b/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala
index 933e027436..bb553a0a1e 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala
@@ -32,6 +32,10 @@ object TestData {
(1 to 100).map(i => TestData(i, i.toString))).toSchemaRDD
testData.registerTempTable("testData")
+ val negativeData = TestSQLContext.sparkContext.parallelize(
+ (1 to 100).map(i => TestData(-i, (-i).toString))).toSchemaRDD
+ negativeData.registerTempTable("negativeData")
+
case class LargeAndSmallInts(a: Int, b: Int)
val largeAndSmallInts =
TestSQLContext.sparkContext.parallelize(