aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorYin Huai <yhuai@databricks.com>2015-02-10 17:06:12 -0800
committerMichael Armbrust <michael@databricks.com>2015-02-10 17:06:12 -0800
commite28b6bdbb5c5e4fd62ec0b547b77719c3f7e476e (patch)
tree8562a50ece26427a8574cb60b3ce3f04ff8cb89e
parent2d50a010ff57a861b13c2088ac048662d535f5e7 (diff)
downloadspark-e28b6bdbb5c5e4fd62ec0b547b77719c3f7e476e.tar.gz
spark-e28b6bdbb5c5e4fd62ec0b547b77719c3f7e476e.tar.bz2
spark-e28b6bdbb5c5e4fd62ec0b547b77719c3f7e476e.zip
[SQL] Make Options in the data source API CREATE TABLE statements optional.
Users will not need to put `Options()` in a CREATE TABLE statement when there is not option provided. Author: Yin Huai <yhuai@databricks.com> Closes #4515 from yhuai/makeOptionsOptional and squashes the following commits: 1a898d3 [Yin Huai] Make options optional.
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/sources/ddl.scala7
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala4
2 files changed, 5 insertions, 6 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/sources/ddl.scala b/sql/core/src/main/scala/org/apache/spark/sql/sources/ddl.scala
index bf2ad14763..9f64f76100 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/sources/ddl.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/sources/ddl.scala
@@ -106,13 +106,14 @@ private[sql] class DDLParser extends AbstractSparkSQLParser with Logging {
protected lazy val createTable: Parser[LogicalPlan] =
(
(CREATE ~> TEMPORARY.? <~ TABLE) ~ (IF ~> NOT <~ EXISTS).? ~ ident
- ~ (tableCols).? ~ (USING ~> className) ~ (OPTIONS ~> options) ~ (AS ~> restInput).? ^^ {
+ ~ (tableCols).? ~ (USING ~> className) ~ (OPTIONS ~> options).? ~ (AS ~> restInput).? ^^ {
case temp ~ allowExisting ~ tableName ~ columns ~ provider ~ opts ~ query =>
if (temp.isDefined && allowExisting.isDefined) {
throw new DDLException(
"a CREATE TEMPORARY TABLE statement does not allow IF NOT EXISTS clause.")
}
+ val options = opts.getOrElse(Map.empty[String, String])
if (query.isDefined) {
if (columns.isDefined) {
throw new DDLException(
@@ -121,7 +122,7 @@ private[sql] class DDLParser extends AbstractSparkSQLParser with Logging {
CreateTableUsingAsSelect(tableName,
provider,
temp.isDefined,
- opts,
+ options,
allowExisting.isDefined,
query.get)
} else {
@@ -131,7 +132,7 @@ private[sql] class DDLParser extends AbstractSparkSQLParser with Logging {
userSpecifiedSchema,
provider,
temp.isDefined,
- opts,
+ options,
allowExisting.isDefined)
}
}
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
index 036efa84d7..9ce058909f 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
@@ -361,9 +361,7 @@ class MetastoreDataSourcesSuite extends QueryTest with BeforeAndAfterEach {
s"""
|CREATE TABLE ctasJsonTable
|USING org.apache.spark.sql.json.DefaultSource
- |OPTIONS (
- |
- |) AS
+ |AS
|SELECT * FROM jsonTable
""".stripMargin)