diff options
author | jliwork <jiali@us.ibm.com> | 2016-04-24 11:20:48 -0700 |
---|---|---|
committer | Reynold Xin <rxin@databricks.com> | 2016-04-24 11:22:06 -0700 |
commit | f0f1a8afde285b5d19c841dacb090f658906c4fd (patch) | |
tree | 8c5276c151eca7ec2d3b868c680957a66a0e5294 | |
parent | 337289d7128be100103fce10ec7747ad5bc9cbf0 (diff) | |
download | spark-f0f1a8afde285b5d19c841dacb090f658906c4fd.tar.gz spark-f0f1a8afde285b5d19c841dacb090f658906c4fd.tar.bz2 spark-f0f1a8afde285b5d19c841dacb090f658906c4fd.zip |
[SPARK-14548][SQL] Support not greater than and not less than operator in Spark SQL
!< means not less than which is equivalent to >=
!> means not greater than which is equivalent to <=
I'd to create a PR to support these two operators.
I've added new test cases in: DataFrameSuite, ExpressionParserSuite, JDBCSuite, PlanParserSuite, SQLQuerySuite
dilipbiswal viirya gatorsmile
Author: jliwork <jiali@us.ibm.com>
Closes #12316 from jliwork/SPARK-14548.
5 files changed, 15 insertions, 3 deletions
diff --git a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 index f96a86db02..fa4b8c4868 100644 --- a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 +++ b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 @@ -776,9 +776,9 @@ NSEQ: '<=>'; NEQ : '<>'; NEQJ: '!='; LT : '<'; -LTE : '<='; +LTE : '<=' | '!>'; GT : '>'; -GTE : '>='; +GTE : '>=' | '!<'; PLUS: '+'; MINUS: '-'; diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/DataTypeParser.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/DataTypeParser.scala index 0b570c9e42..0eb13c600c 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/DataTypeParser.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/DataTypeParser.scala @@ -134,7 +134,7 @@ class SqlLexical extends scala.util.parsing.combinator.lexical.StdLexical { def normalizeKeyword(str: String): String = str.toLowerCase delimiters += ( - "@", "*", "+", "-", "<", "=", "<>", "!=", "<=", ">=", ">", "/", "(", ")", + "@", "*", "+", "-", "<", "=", "<>", "!=", "<=", "!>", ">=", "!<", ">", "/", "(", ")", ",", ";", "%", "{", "}", ":", "[", "]", ".", "&", "|", "^", "~", "<=>" ) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala index d1dc8d621f..f0ddc92bcd 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala @@ -126,8 +126,10 @@ class ExpressionParserSuite extends PlanTest { assertEqual("a != b", 'a =!= 'b) assertEqual("a < b", 'a < 'b) assertEqual("a <= b", 'a <= 'b) + assertEqual("a !> b", 'a <= 'b) assertEqual("a > b", 'a > 'b) assertEqual("a >= b", 'a >= 'b) + assertEqual("a !< b", 'a >= 'b) } test("between expressions") { diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala index a1ca55c262..56c91a0fd5 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala @@ -428,4 +428,13 @@ class PlanParserSuite extends PlanTest { "Number of aliases must match the number of fields in an inline table.") intercept[ArrayIndexOutOfBoundsException](parsePlan("values (1, 'a'), (2, 'b', 5Y)")) } + + test("simple select query with !> and !<") { + // !< is equivalent to >= + assertEqual("select a, b from db.c where x !< 1", + table("db", "c").where('x >= 1).select('a, 'b)) + // !> is equivalent to <= + assertEqual("select a, b from db.c where x !> 1", + table("db", "c").where('x <= 1).select('a, 'b)) + } } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala index c24abf1650..783511b781 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala @@ -221,6 +221,7 @@ class JDBCSuite extends SparkFunSuite assert(checkPushdown(sql("SELECT * FROM foobar WHERE NAME <=> 'fred'")).collect().size == 1) assert(checkPushdown(sql("SELECT * FROM foobar WHERE NAME > 'fred'")).collect().size == 2) assert(checkPushdown(sql("SELECT * FROM foobar WHERE NAME != 'fred'")).collect().size == 2) + assert(checkPushdown(sql("SELECT * FROM foobar WHERE NAME IN ('mary', 'fred')")) .collect().size == 2) assert(checkPushdown(sql("SELECT * FROM foobar WHERE NAME NOT IN ('fred')")) |