aboutsummaryrefslogtreecommitdiff
path: root/sql/core/src/test/scala/org/apache
diff options
context:
space:
mode:
authorgatorsmile <gatorsmile@gmail.com>2016-03-29 17:39:52 -0700
committerAndrew Or <andrew@databricks.com>2016-03-29 17:39:52 -0700
commitb66b97cd04067e1ec344fa2e28dd91e7ef937af5 (patch)
tree0d879872c83765c79e9a3de6483835dd5b7bf6c3 /sql/core/src/test/scala/org/apache
parente1f6845391078726f60e760f0ea68ccf81f9eca9 (diff)
downloadspark-b66b97cd04067e1ec344fa2e28dd91e7ef937af5.tar.gz
spark-b66b97cd04067e1ec344fa2e28dd91e7ef937af5.tar.bz2
spark-b66b97cd04067e1ec344fa2e28dd91e7ef937af5.zip
[SPARK-14124][SQL] Implement Database-related DDL Commands
#### What changes were proposed in this pull request? This PR is to implement the following four Database-related DDL commands: - `CREATE DATABASE|SCHEMA [IF NOT EXISTS] database_name` - `DROP DATABASE [IF EXISTS] database_name [RESTRICT|CASCADE]` - `DESCRIBE DATABASE [EXTENDED] db_name` - `ALTER (DATABASE|SCHEMA) database_name SET DBPROPERTIES (property_name=property_value, ...)` Another PR will be submitted to handle the unsupported commands. In the Database-related DDL commands, we will issue an error exception for `ALTER (DATABASE|SCHEMA) database_name SET OWNER [USER|ROLE] user_or_role`. cc yhuai andrewor14 rxin Could you review the changes? Is it in the right direction? Thanks! #### How was this patch tested? Added a few test cases in `command/DDLSuite.scala` for testing DDL command execution in `SQLContext`. Since `HiveContext` also shares the same implementation, the existing test cases in `\hive` also verifies the correctness of these commands. Author: gatorsmile <gatorsmile@gmail.com> Author: xiaoli <lixiao1983@gmail.com> Author: Xiao Li <xiaoli@Xiaos-MacBook-Pro.local> Closes #12009 from gatorsmile/dbDDL.
Diffstat (limited to 'sql/core/src/test/scala/org/apache')
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala42
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala151
2 files changed, 166 insertions, 27 deletions
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala
index 03079c6890..ccbfd41cca 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala
@@ -39,7 +39,7 @@ class DDLCommandSuite extends PlanTest {
ifNotExists = true,
Some("/home/user/db"),
Some("database_comment"),
- Map("a" -> "a", "b" -> "b", "c" -> "c"))(sql)
+ Map("a" -> "a", "b" -> "b", "c" -> "c"))
comparePlans(parsed, expected)
}
@@ -65,39 +65,27 @@ class DDLCommandSuite extends PlanTest {
val expected1 = DropDatabase(
"database_name",
ifExists = true,
- restrict = true)(sql1)
+ cascade = false)
val expected2 = DropDatabase(
"database_name",
ifExists = true,
- restrict = false)(sql2)
+ cascade = true)
val expected3 = DropDatabase(
"database_name",
- ifExists = true,
- restrict = true)(sql3)
- val expected4 = DropDatabase(
- "database_name",
- ifExists = true,
- restrict = false)(sql4)
- val expected5 = DropDatabase(
- "database_name",
- ifExists = true,
- restrict = true)(sql5)
- val expected6 = DropDatabase(
- "database_name",
ifExists = false,
- restrict = true)(sql6)
- val expected7 = DropDatabase(
+ cascade = false)
+ val expected4 = DropDatabase(
"database_name",
ifExists = false,
- restrict = false)(sql7)
+ cascade = true)
comparePlans(parsed1, expected1)
comparePlans(parsed2, expected2)
- comparePlans(parsed3, expected3)
- comparePlans(parsed4, expected4)
- comparePlans(parsed5, expected5)
- comparePlans(parsed6, expected6)
- comparePlans(parsed7, expected7)
+ comparePlans(parsed3, expected1)
+ comparePlans(parsed4, expected2)
+ comparePlans(parsed5, expected1)
+ comparePlans(parsed6, expected3)
+ comparePlans(parsed7, expected4)
}
test("alter database set dbproperties") {
@@ -110,10 +98,10 @@ class DDLCommandSuite extends PlanTest {
val expected1 = AlterDatabaseProperties(
"database_name",
- Map("a" -> "a", "b" -> "b", "c" -> "c"))(sql1)
+ Map("a" -> "a", "b" -> "b", "c" -> "c"))
val expected2 = AlterDatabaseProperties(
"database_name",
- Map("a" -> "a"))(sql2)
+ Map("a" -> "a"))
comparePlans(parsed1, expected1)
comparePlans(parsed2, expected2)
@@ -129,10 +117,10 @@ class DDLCommandSuite extends PlanTest {
val expected1 = DescribeDatabase(
"db_name",
- extended = true)(sql1)
+ extended = true)
val expected2 = DescribeDatabase(
"db_name",
- extended = false)(sql2)
+ extended = false)
comparePlans(parsed1, expected1)
comparePlans(parsed2, expected2)
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
new file mode 100644
index 0000000000..47c9a22acd
--- /dev/null
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
@@ -0,0 +1,151 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.execution.command
+
+import java.io.File
+
+import org.apache.spark.sql.{AnalysisException, QueryTest, Row}
+import org.apache.spark.sql.catalyst.catalog.CatalogDatabase
+import org.apache.spark.sql.catalyst.parser.ParserUtils._
+import org.apache.spark.sql.test.SharedSQLContext
+
+class DDLSuite extends QueryTest with SharedSQLContext {
+
+ /**
+ * Drops database `databaseName` after calling `f`.
+ */
+ private def withDatabase(dbNames: String*)(f: => Unit): Unit = {
+ try f finally {
+ dbNames.foreach { name =>
+ sqlContext.sql(s"DROP DATABASE IF EXISTS $name CASCADE")
+ }
+ }
+ }
+
+ test("Create/Drop Database") {
+ val catalog = sqlContext.sessionState.catalog
+
+ val databaseNames = Seq("db1", "`database`")
+
+ databaseNames.foreach { dbName =>
+ withDatabase(dbName) {
+ val dbNameWithoutBackTicks = cleanIdentifier(dbName)
+
+ sql(s"CREATE DATABASE $dbName")
+ val db1 = catalog.getDatabase(dbNameWithoutBackTicks)
+ assert(db1 == CatalogDatabase(
+ dbNameWithoutBackTicks,
+ "",
+ System.getProperty("java.io.tmpdir") + File.separator + s"$dbNameWithoutBackTicks.db",
+ Map.empty))
+ sql(s"DROP DATABASE $dbName CASCADE")
+ assert(!catalog.databaseExists(dbNameWithoutBackTicks))
+ }
+ }
+ }
+
+ test("Create Database - database already exists") {
+ val catalog = sqlContext.sessionState.catalog
+ val databaseNames = Seq("db1", "`database`")
+
+ databaseNames.foreach { dbName =>
+ val dbNameWithoutBackTicks = cleanIdentifier(dbName)
+ withDatabase(dbName) {
+ sql(s"CREATE DATABASE $dbName")
+ val db1 = catalog.getDatabase(dbNameWithoutBackTicks)
+ assert(db1 == CatalogDatabase(
+ dbNameWithoutBackTicks,
+ "",
+ System.getProperty("java.io.tmpdir") + File.separator + s"$dbNameWithoutBackTicks.db",
+ Map.empty))
+
+ val message = intercept[AnalysisException] {
+ sql(s"CREATE DATABASE $dbName")
+ }.getMessage
+ assert(message.contains(s"Database '$dbNameWithoutBackTicks' already exists."))
+ }
+ }
+ }
+
+ test("Alter/Describe Database") {
+ val catalog = sqlContext.sessionState.catalog
+ val databaseNames = Seq("db1", "`database`")
+
+ databaseNames.foreach { dbName =>
+ withDatabase(dbName) {
+ val dbNameWithoutBackTicks = cleanIdentifier(dbName)
+ val location =
+ System.getProperty("java.io.tmpdir") + File.separator + s"$dbNameWithoutBackTicks.db"
+ sql(s"CREATE DATABASE $dbName")
+
+ checkAnswer(
+ sql(s"DESCRIBE DATABASE EXTENDED $dbName"),
+ Row("Database Name", dbNameWithoutBackTicks) ::
+ Row("Description", "") ::
+ Row("Location", location) ::
+ Row("Properties", "") :: Nil)
+
+ sql(s"ALTER DATABASE $dbName SET DBPROPERTIES ('a'='a', 'b'='b', 'c'='c')")
+
+ checkAnswer(
+ sql(s"DESCRIBE DATABASE EXTENDED $dbName"),
+ Row("Database Name", dbNameWithoutBackTicks) ::
+ Row("Description", "") ::
+ Row("Location", location) ::
+ Row("Properties", "((a,a), (b,b), (c,c))") :: Nil)
+
+ sql(s"ALTER DATABASE $dbName SET DBPROPERTIES ('d'='d')")
+
+ checkAnswer(
+ sql(s"DESCRIBE DATABASE EXTENDED $dbName"),
+ Row("Database Name", dbNameWithoutBackTicks) ::
+ Row("Description", "") ::
+ Row("Location", location) ::
+ Row("Properties", "((a,a), (b,b), (c,c), (d,d))") :: Nil)
+ }
+ }
+ }
+
+ test("Drop/Alter/Describe Database - database does not exists") {
+ val databaseNames = Seq("db1", "`database`")
+
+ databaseNames.foreach { dbName =>
+ val dbNameWithoutBackTicks = cleanIdentifier(dbName)
+ assert(!sqlContext.sessionState.catalog.databaseExists(dbNameWithoutBackTicks))
+
+ var message = intercept[AnalysisException] {
+ sql(s"DROP DATABASE $dbName")
+ }.getMessage
+ assert(message.contains(s"Database '$dbNameWithoutBackTicks' does not exist"))
+
+ message = intercept[AnalysisException] {
+ sql(s"ALTER DATABASE $dbName SET DBPROPERTIES ('d'='d')")
+ }.getMessage
+ assert(message.contains(s"Database '$dbNameWithoutBackTicks' does not exist"))
+
+ message = intercept[AnalysisException] {
+ sql(s"DESCRIBE DATABASE EXTENDED $dbName")
+ }.getMessage
+ assert(message.contains(s"Database '$dbNameWithoutBackTicks' does not exist"))
+
+ sql(s"DROP DATABASE IF EXISTS $dbName")
+ }
+ }
+
+ // TODO: ADD a testcase for Drop Database in Restric when we can create tables in SQLContext
+}