aboutsummaryrefslogtreecommitdiff
path: root/sql/core/src/test/resources/sql-tests/results/order-by-nulls-ordering.sql.out
diff options
context:
space:
mode:
authorxin wu <xinwu@us.ibm.com>2016-09-25 16:46:12 -0700
committerHerman van Hovell <hvanhovell@databricks.com>2016-09-25 16:46:12 -0700
commitde333d121da4cb80d45819cbcf8b4246e48ec4d0 (patch)
tree64d7b8dc9b34749574f5ddd78a9f951ca9ed8575 /sql/core/src/test/resources/sql-tests/results/order-by-nulls-ordering.sql.out
parent7945daed12542587d51ece8f07e5c828b40db14a (diff)
downloadspark-de333d121da4cb80d45819cbcf8b4246e48ec4d0.tar.gz
spark-de333d121da4cb80d45819cbcf8b4246e48ec4d0.tar.bz2
spark-de333d121da4cb80d45819cbcf8b4246e48ec4d0.zip
[SPARK-17551][SQL] Add DataFrame API for null ordering
## What changes were proposed in this pull request? This pull request adds Scala/Java DataFrame API for null ordering (NULLS FIRST | LAST). Also did some minor clean up for related code (e.g. incorrect indentation), and renamed "orderby-nulls-ordering.sql" to be consistent with existing test files. ## How was this patch tested? Added a new test case in DataFrameSuite. Author: petermaxlee <petermaxlee@gmail.com> Author: Xin Wu <xinwu@us.ibm.com> Closes #15123 from petermaxlee/SPARK-17551.
Diffstat (limited to 'sql/core/src/test/resources/sql-tests/results/order-by-nulls-ordering.sql.out')
-rw-r--r--sql/core/src/test/resources/sql-tests/results/order-by-nulls-ordering.sql.out254
1 files changed, 254 insertions, 0 deletions
diff --git a/sql/core/src/test/resources/sql-tests/results/order-by-nulls-ordering.sql.out b/sql/core/src/test/resources/sql-tests/results/order-by-nulls-ordering.sql.out
new file mode 100644
index 0000000000..c1b63dfb8c
--- /dev/null
+++ b/sql/core/src/test/resources/sql-tests/results/order-by-nulls-ordering.sql.out
@@ -0,0 +1,254 @@
+-- Automatically generated by SQLQueryTestSuite
+-- Number of queries: 17
+
+
+-- !query 0
+create table spark_10747(col1 int, col2 int, col3 int) using parquet
+-- !query 0 schema
+struct<>
+-- !query 0 output
+
+
+
+-- !query 1
+INSERT INTO spark_10747 VALUES (6, 12, 10), (6, 11, 4), (6, 9, 10), (6, 15, 8),
+(6, 15, 8), (6, 7, 4), (6, 7, 8), (6, 13, null), (6, 10, null)
+-- !query 1 schema
+struct<>
+-- !query 1 output
+
+
+
+-- !query 2
+select col1, col2, col3, sum(col2)
+ over (partition by col1
+ order by col3 desc nulls last, col2
+ rows between 2 preceding and 2 following ) as sum_col2
+from spark_10747 where col1 = 6 order by sum_col2
+-- !query 2 schema
+struct<col1:int,col2:int,col3:int,sum_col2:bigint>
+-- !query 2 output
+6 9 10 28
+6 13 NULL 34
+6 10 NULL 41
+6 12 10 43
+6 15 8 55
+6 15 8 56
+6 11 4 56
+6 7 8 58
+6 7 4 58
+
+
+-- !query 3
+select col1, col2, col3, sum(col2)
+ over (partition by col1
+ order by col3 desc nulls first, col2
+ rows between 2 preceding and 2 following ) as sum_col2
+from spark_10747 where col1 = 6 order by sum_col2
+-- !query 3 schema
+struct<col1:int,col2:int,col3:int,sum_col2:bigint>
+-- !query 3 output
+6 10 NULL 32
+6 11 4 33
+6 13 NULL 44
+6 7 4 48
+6 9 10 51
+6 15 8 55
+6 12 10 56
+6 15 8 56
+6 7 8 58
+
+
+-- !query 4
+select col1, col2, col3, sum(col2)
+ over (partition by col1
+ order by col3 asc nulls last, col2
+ rows between 2 preceding and 2 following ) as sum_col2
+from spark_10747 where col1 = 6 order by sum_col2
+-- !query 4 schema
+struct<col1:int,col2:int,col3:int,sum_col2:bigint>
+-- !query 4 output
+6 7 4 25
+6 13 NULL 35
+6 11 4 40
+6 10 NULL 44
+6 7 8 55
+6 15 8 57
+6 15 8 58
+6 12 10 59
+6 9 10 61
+
+
+-- !query 5
+select col1, col2, col3, sum(col2)
+ over (partition by col1
+ order by col3 asc nulls first, col2
+ rows between 2 preceding and 2 following ) as sum_col2
+from spark_10747 where col1 = 6 order by sum_col2
+-- !query 5 schema
+struct<col1:int,col2:int,col3:int,sum_col2:bigint>
+-- !query 5 output
+6 10 NULL 30
+6 12 10 36
+6 13 NULL 41
+6 7 4 48
+6 9 10 51
+6 11 4 53
+6 7 8 55
+6 15 8 57
+6 15 8 58
+
+
+-- !query 6
+SELECT COL1, COL2, COL3 FROM spark_10747 ORDER BY COL3 ASC NULLS FIRST, COL2
+-- !query 6 schema
+struct<COL1:int,COL2:int,COL3:int>
+-- !query 6 output
+6 10 NULL
+6 13 NULL
+6 7 4
+6 11 4
+6 7 8
+6 15 8
+6 15 8
+6 9 10
+6 12 10
+
+
+-- !query 7
+SELECT COL1, COL2, COL3 FROM spark_10747 ORDER BY COL3 NULLS LAST, COL2
+-- !query 7 schema
+struct<COL1:int,COL2:int,COL3:int>
+-- !query 7 output
+6 7 4
+6 11 4
+6 7 8
+6 15 8
+6 15 8
+6 9 10
+6 12 10
+6 10 NULL
+6 13 NULL
+
+
+-- !query 8
+SELECT COL1, COL2, COL3 FROM spark_10747 ORDER BY COL3 DESC NULLS FIRST, COL2
+-- !query 8 schema
+struct<COL1:int,COL2:int,COL3:int>
+-- !query 8 output
+6 10 NULL
+6 13 NULL
+6 9 10
+6 12 10
+6 7 8
+6 15 8
+6 15 8
+6 7 4
+6 11 4
+
+
+-- !query 9
+SELECT COL1, COL2, COL3 FROM spark_10747 ORDER BY COL3 DESC NULLS LAST, COL2
+-- !query 9 schema
+struct<COL1:int,COL2:int,COL3:int>
+-- !query 9 output
+6 9 10
+6 12 10
+6 7 8
+6 15 8
+6 15 8
+6 7 4
+6 11 4
+6 10 NULL
+6 13 NULL
+
+
+-- !query 10
+drop table spark_10747
+-- !query 10 schema
+struct<>
+-- !query 10 output
+
+
+
+-- !query 11
+create table spark_10747_mix(
+col1 string,
+col2 int,
+col3 double,
+col4 decimal(10,2),
+col5 decimal(20,1))
+using parquet
+-- !query 11 schema
+struct<>
+-- !query 11 output
+
+
+
+-- !query 12
+INSERT INTO spark_10747_mix VALUES
+('b', 2, 1.0, 1.00, 10.0),
+('d', 3, 2.0, 3.00, 0.0),
+('c', 3, 2.0, 2.00, 15.1),
+('d', 3, 0.0, 3.00, 1.0),
+(null, 3, 0.0, 3.00, 1.0),
+('d', 3, null, 4.00, 1.0),
+('a', 1, 1.0, 1.00, null),
+('c', 3, 2.0, 2.00, null)
+-- !query 12 schema
+struct<>
+-- !query 12 output
+
+
+
+-- !query 13
+select * from spark_10747_mix order by col1 nulls last, col5 nulls last
+-- !query 13 schema
+struct<col1:string,col2:int,col3:double,col4:decimal(10,2),col5:decimal(20,1)>
+-- !query 13 output
+a 1 1.0 1 NULL
+b 2 1.0 1 10
+c 3 2.0 2 15.1
+c 3 2.0 2 NULL
+d 3 2.0 3 0
+d 3 0.0 3 1
+d 3 NULL 4 1
+NULL 3 0.0 3 1
+
+
+-- !query 14
+select * from spark_10747_mix order by col1 desc nulls first, col5 desc nulls first
+-- !query 14 schema
+struct<col1:string,col2:int,col3:double,col4:decimal(10,2),col5:decimal(20,1)>
+-- !query 14 output
+NULL 3 0.0 3 1
+d 3 0.0 3 1
+d 3 NULL 4 1
+d 3 2.0 3 0
+c 3 2.0 2 NULL
+c 3 2.0 2 15.1
+b 2 1.0 1 10
+a 1 1.0 1 NULL
+
+
+-- !query 15
+select * from spark_10747_mix order by col5 desc nulls first, col3 desc nulls last
+-- !query 15 schema
+struct<col1:string,col2:int,col3:double,col4:decimal(10,2),col5:decimal(20,1)>
+-- !query 15 output
+c 3 2.0 2 NULL
+a 1 1.0 1 NULL
+c 3 2.0 2 15.1
+b 2 1.0 1 10
+d 3 0.0 3 1
+NULL 3 0.0 3 1
+d 3 NULL 4 1
+d 3 2.0 3 0
+
+
+-- !query 16
+drop table spark_10747_mix
+-- !query 16 schema
+struct<>
+-- !query 16 output
+