aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorhyukjinkwon <gurwls223@gmail.com>2016-07-06 23:57:18 -0400
committerHerman van Hovell <hvanhovell@databricks.com>2016-07-06 23:57:18 -0400
commit34283de160808324da02964cd5dc5df80e59ae71 (patch)
treef4122927efb6b80b4c77772dbeaa0fea93f5ff73 /sql
parent44c7c62bcfca74c82ffc4e3c53997fff47bfacac (diff)
downloadspark-34283de160808324da02964cd5dc5df80e59ae71.tar.gz
spark-34283de160808324da02964cd5dc5df80e59ae71.tar.bz2
spark-34283de160808324da02964cd5dc5df80e59ae71.zip
[SPARK-14839][SQL] Support for other types for `tableProperty` rule in SQL syntax
## What changes were proposed in this pull request? Currently, Scala API supports to take options with the types, `String`, `Long`, `Double` and `Boolean` and Python API also supports other types. This PR corrects `tableProperty` rule to support other types (string, boolean, double and integer) so that support the options for data sources in a consistent way. This will affect other rules such as DBPROPERTIES and TBLPROPERTIES (allowing other types as values). Also, `TODO add bucketing and partitioning.` was removed because it was resolved in https://github.com/apache/spark/commit/24bea000476cdd0b43be5160a76bc5b170ef0b42 ## How was this patch tested? Unit test in `MetastoreDataSourcesSuite.scala`. Author: hyukjinkwon <gurwls223@gmail.com> Closes #13517 from HyukjinKwon/SPARK-14839.
Diffstat (limited to 'sql')
-rw-r--r--sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g49
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala22
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala61
3 files changed, 87 insertions, 5 deletions
diff --git a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
index 4c15f9cec6..7ccbb2dac9 100644
--- a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
+++ b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
@@ -249,7 +249,7 @@ tablePropertyList
;
tableProperty
- : key=tablePropertyKey (EQ? value=STRING)?
+ : key=tablePropertyKey (EQ? value=tablePropertyValue)?
;
tablePropertyKey
@@ -257,6 +257,13 @@ tablePropertyKey
| STRING
;
+tablePropertyValue
+ : INTEGER_VALUE
+ | DECIMAL_VALUE
+ | booleanValue
+ | STRING
+ ;
+
constantList
: '(' constant (',' constant)* ')'
;
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
index 42ec210baa..f77801fd86 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
@@ -311,8 +311,6 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
/**
* Create a [[CreateTableUsing]] or a [[CreateTableUsingAsSelect]] logical plan.
- *
- * TODO add bucketing and partitioning.
*/
override def visitCreateTableUsing(ctx: CreateTableUsingContext): LogicalPlan = withOrigin(ctx) {
val (table, temp, ifNotExists, external) = visitCreateTableHeader(ctx.createTableHeader)
@@ -413,7 +411,7 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
ctx: TablePropertyListContext): Map[String, String] = withOrigin(ctx) {
val properties = ctx.tableProperty.asScala.map { property =>
val key = visitTablePropertyKey(property.key)
- val value = Option(property.value).map(string).orNull
+ val value = visitTablePropertyValue(property.value)
key -> value
}
// Check for duplicate property names.
@@ -426,7 +424,7 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
*/
private def visitPropertyKeyValues(ctx: TablePropertyListContext): Map[String, String] = {
val props = visitTablePropertyList(ctx)
- val badKeys = props.filter { case (_, v) => v == null }.keys
+ val badKeys = props.collect { case (key, null) => key }
if (badKeys.nonEmpty) {
operationNotAllowed(
s"Values must be specified for key(s): ${badKeys.mkString("[", ",", "]")}", ctx)
@@ -461,6 +459,22 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
}
/**
+ * A table property value can be String, Integer, Boolean or Decimal. This function extracts
+ * the property value based on whether its a string, integer, boolean or decimal literal.
+ */
+ override def visitTablePropertyValue(value: TablePropertyValueContext): String = {
+ if (value == null) {
+ null
+ } else if (value.STRING != null) {
+ string(value.STRING)
+ } else if (value.booleanValue != null) {
+ value.getText.toLowerCase
+ } else {
+ value.getText
+ }
+ }
+
+ /**
* Create a [[CreateDatabaseCommand]] command.
*
* For example:
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala
index e1a7b9b004..23c2bef53e 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala
@@ -856,4 +856,65 @@ class DDLCommandSuite extends PlanTest {
comparePlans(parsed2, expected2)
comparePlans(parsed3, expected3)
}
+
+ test("support for other types in DBPROPERTIES") {
+ val sql =
+ """
+ |CREATE DATABASE database_name
+ |LOCATION '/home/user/db'
+ |WITH DBPROPERTIES ('a'=1, 'b'=0.1, 'c'=TRUE)
+ """.stripMargin
+ val parsed = parser.parsePlan(sql)
+ val expected = CreateDatabaseCommand(
+ "database_name",
+ ifNotExists = false,
+ Some("/home/user/db"),
+ None,
+ Map("a" -> "1", "b" -> "0.1", "c" -> "true"))
+
+ comparePlans(parsed, expected)
+ }
+
+ test("support for other types in TBLPROPERTIES") {
+ val sql =
+ """
+ |ALTER TABLE table_name
+ |SET TBLPROPERTIES ('a' = 1, 'b' = 0.1, 'c' = TRUE)
+ """.stripMargin
+ val parsed = parser.parsePlan(sql)
+ val expected = AlterTableSetPropertiesCommand(
+ TableIdentifier("table_name"),
+ Map("a" -> "1", "b" -> "0.1", "c" -> "true"),
+ isView = false)
+
+ comparePlans(parsed, expected)
+ }
+
+ test("support for other types in OPTIONS") {
+ val sql =
+ """
+ |CREATE TABLE table_name USING json
+ |OPTIONS (a 1, b 0.1, c TRUE)
+ """.stripMargin
+ val expected = CreateTableUsing(
+ TableIdentifier("table_name"),
+ None,
+ "json",
+ false,
+ Map("a" -> "1", "b" -> "0.1", "c" -> "true"),
+ null,
+ None,
+ false,
+ true)
+
+ parser.parsePlan(sql) match {
+ case ct: CreateTableUsing =>
+ // We can't compare array in `CreateTableUsing` directly, so here we explicitly
+ // set partitionColumns to `null` and then compare it.
+ comparePlans(ct.copy(partitionColumns = null), expected)
+ case other =>
+ fail(s"Expected to parse ${classOf[CreateTableCommand].getClass.getName} from query," +
+ s"got ${other.getClass.getName}: $sql")
+ }
+ }
}