aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
Diffstat (limited to 'sql')
-rwxr-xr-xsql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SqlParser.scala8
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala7
2 files changed, 9 insertions, 6 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SqlParser.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SqlParser.scala
index d4fc9bbfd3..66860a4c09 100755
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SqlParser.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SqlParser.scala
@@ -209,15 +209,11 @@ class SqlParser extends AbstractSparkSQLParser {
)
protected lazy val ordering: Parser[Seq[SortOrder]] =
- ( rep1sep(singleOrder, ",")
- | rep1sep(expression, ",") ~ direction.? ^^ {
- case exps ~ d => exps.map(SortOrder(_, d.getOrElse(Ascending)))
+ ( rep1sep(expression ~ direction.? , ",") ^^ {
+ case exps => exps.map(pair => SortOrder(pair._1, pair._2.getOrElse(Ascending)))
}
)
- protected lazy val singleOrder: Parser[SortOrder] =
- expression ~ direction ^^ { case e ~ o => SortOrder(e, o) }
-
protected lazy val direction: Parser[SortDirection] =
( ASC ^^^ Ascending
| DESC ^^^ Descending
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
index ddf4776ecf..add4e218a2 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
@@ -987,6 +987,13 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
)
}
+ test("oder by asc by default when not specify ascending and descending") {
+ checkAnswer(
+ sql("SELECT a, b FROM testData2 ORDER BY a desc, b"),
+ Seq((3, 1), (3, 2), (2, 1), (2,2), (1, 1), (1, 2))
+ )
+ }
+
test("Supporting relational operator '<=>' in Spark SQL") {
val nullCheckData1 = TestData(1,"1") :: TestData(2,null) :: Nil
val rdd1 = sparkContext.parallelize((0 to 1).map(i => nullCheckData1(i)))