aboutsummaryrefslogtreecommitdiff
path: root/sql/core/src/test/scala
diff options
context:
space:
mode:
authorWenchen Fan <wenchen@databricks.com>2016-09-05 13:09:20 +0800
committerWenchen Fan <wenchen@databricks.com>2016-09-05 13:09:20 +0800
commit3ccb23e445711ea5d9059eb6de7c490c8fc9d112 (patch)
tree0d3d7652b065b16c1fcaaf54c466d0907a00b2c7 /sql/core/src/test/scala
parentc1e9a6d274c281ec30e6d022eedfbe3a2988f721 (diff)
downloadspark-3ccb23e445711ea5d9059eb6de7c490c8fc9d112.tar.gz
spark-3ccb23e445711ea5d9059eb6de7c490c8fc9d112.tar.bz2
spark-3ccb23e445711ea5d9059eb6de7c490c8fc9d112.zip
[SPARK-17394][SQL] should not allow specify database in table/view name after RENAME TO
## What changes were proposed in this pull request? It's really weird that we allow users to specify database in both from table name and to table name in `ALTER TABLE RENAME TO`, while logically we can't support rename a table to a different database. Both postgres and MySQL disallow this syntax, it's reasonable to follow them and simply our code. ## How was this patch tested? new test in `DDLCommandSuite` Author: Wenchen Fan <wenchen@databricks.com> Closes #14955 from cloud-fan/rename.
Diffstat (limited to 'sql/core/src/test/scala')
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala9
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala35
2 files changed, 10 insertions, 34 deletions
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala
index 8dd883b37b..547fb63813 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala
@@ -388,14 +388,19 @@ class DDLCommandSuite extends PlanTest {
val parsed_view = parser.parsePlan(sql_view)
val expected_table = AlterTableRenameCommand(
TableIdentifier("table_name", None),
- TableIdentifier("new_table_name", None),
+ "new_table_name",
isView = false)
val expected_view = AlterTableRenameCommand(
TableIdentifier("table_name", None),
- TableIdentifier("new_table_name", None),
+ "new_table_name",
isView = true)
comparePlans(parsed_table, expected_table)
comparePlans(parsed_view, expected_view)
+
+ val e = intercept[ParseException](
+ parser.parsePlan("ALTER TABLE db1.tbl RENAME TO db1.tbl2")
+ )
+ assert(e.getMessage.contains("Can not specify database in table/view name after RENAME TO"))
}
// ALTER TABLE table_name SET TBLPROPERTIES ('comment' = new_comment);
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
index 0073659a31..fd35c987ca 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
@@ -657,7 +657,7 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
createDatabase(catalog, "dby")
createTable(catalog, tableIdent1)
assert(catalog.listTables("dbx") == Seq(tableIdent1))
- sql("ALTER TABLE dbx.tab1 RENAME TO dbx.tab2")
+ sql("ALTER TABLE dbx.tab1 RENAME TO tab2")
assert(catalog.listTables("dbx") == Seq(tableIdent2))
catalog.setCurrentDatabase("dbx")
// rename without explicitly specifying database
@@ -665,11 +665,7 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
assert(catalog.listTables("dbx") == Seq(tableIdent1))
// table to rename does not exist
intercept[AnalysisException] {
- sql("ALTER TABLE dbx.does_not_exist RENAME TO dbx.tab2")
- }
- // destination database is different
- intercept[AnalysisException] {
- sql("ALTER TABLE dbx.tab1 RENAME TO dby.tab2")
+ sql("ALTER TABLE dbx.does_not_exist RENAME TO tab2")
}
}
@@ -691,31 +687,6 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
assert(spark.table("teachers").collect().toSeq == df.collect().toSeq)
}
- test("rename temporary table - destination table with database name") {
- withTempView("tab1") {
- sql(
- """
- |CREATE TEMPORARY TABLE tab1
- |USING org.apache.spark.sql.sources.DDLScanSource
- |OPTIONS (
- | From '1',
- | To '10',
- | Table 'test1'
- |)
- """.stripMargin)
-
- val e = intercept[AnalysisException] {
- sql("ALTER TABLE tab1 RENAME TO default.tab2")
- }
- assert(e.getMessage.contains(
- "RENAME TEMPORARY TABLE from '`tab1`' to '`default`.`tab2`': " +
- "cannot specify database name 'default' in the destination table"))
-
- val catalog = spark.sessionState.catalog
- assert(catalog.listTables("default") == Seq(TableIdentifier("tab1")))
- }
- }
-
test("rename temporary table - destination table already exists") {
withTempView("tab1", "tab2") {
sql(
@@ -744,7 +715,7 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
sql("ALTER TABLE tab1 RENAME TO tab2")
}
assert(e.getMessage.contains(
- "RENAME TEMPORARY TABLE from '`tab1`' to '`tab2`': destination table already exists"))
+ "RENAME TEMPORARY TABLE from '`tab1`' to 'tab2': destination table already exists"))
val catalog = spark.sessionState.catalog
assert(catalog.listTables("default") == Seq(TableIdentifier("tab1"), TableIdentifier("tab2")))