aboutsummaryrefslogtreecommitdiff
path: root/sql/core/src
diff options
context:
space:
mode:
authorVenkata Ramana Gollamudi <ramana.gollamudi@huawei.com>2014-10-01 15:57:06 -0700
committerMichael Armbrust <michael@databricks.com>2014-10-01 15:57:09 -0700
commitf84b228c4002073ee4ff53be50462a63f48bd508 (patch)
tree037fbbac555a2b60e2016dbcc930d905edd32cee /sql/core/src
parentf315fb7efc95afb2cc1208159b48359ba56a010d (diff)
downloadspark-f84b228c4002073ee4ff53be50462a63f48bd508.tar.gz
spark-f84b228c4002073ee4ff53be50462a63f48bd508.tar.bz2
spark-f84b228c4002073ee4ff53be50462a63f48bd508.zip
[SPARK-3593][SQL] Add support for sorting BinaryType
BinaryType is derived from NativeType and added Ordering support. Author: Venkata Ramana G <ramana.gollamudihuawei.com> Author: Venkata Ramana Gollamudi <ramana.gollamudi@huawei.com> Closes #2617 from gvramana/binarytype_sort and squashes the following commits: 1cf26f3 [Venkata Ramana Gollamudi] Supported Sorting of BinaryType
Diffstat (limited to 'sql/core/src')
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala8
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/TestData.scala10
2 files changed, 18 insertions, 0 deletions
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
index 08376eb5e5..fdf3a229a7 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
@@ -191,6 +191,14 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
Seq((3,1), (3,2), (2,1), (2,2), (1,1), (1,2)))
checkAnswer(
+ sql("SELECT b FROM binaryData ORDER BY a ASC"),
+ (1 to 5).map(Row(_)).toSeq)
+
+ checkAnswer(
+ sql("SELECT b FROM binaryData ORDER BY a DESC"),
+ (1 to 5).map(Row(_)).toSeq.reverse)
+
+ checkAnswer(
sql("SELECT * FROM arrayData ORDER BY data[0] ASC"),
arrayData.collect().sortBy(_.data(0)).toSeq)
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala b/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala
index eb33a61c6e..10b7979df7 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala
@@ -54,6 +54,16 @@ object TestData {
TestData2(3, 2) :: Nil)
testData2.registerTempTable("testData2")
+ case class BinaryData(a: Array[Byte], b: Int)
+ val binaryData: SchemaRDD =
+ TestSQLContext.sparkContext.parallelize(
+ BinaryData("12".getBytes(), 1) ::
+ BinaryData("22".getBytes(), 5) ::
+ BinaryData("122".getBytes(), 3) ::
+ BinaryData("121".getBytes(), 2) ::
+ BinaryData("123".getBytes(), 4) :: Nil)
+ binaryData.registerTempTable("binaryData")
+
// TODO: There is no way to express null primitives as case classes currently...
val testData3 =
logical.LocalRelation('a.int, 'b.int).loadData(