aboutsummaryrefslogtreecommitdiff
path: root/sql/core/src/test/scala
diff options
context:
space:
mode:
authorgatorsmile <gatorsmile@gmail.com>2016-09-15 14:43:10 +0800
committerWenchen Fan <wenchen@databricks.com>2016-09-15 14:43:10 +0800
commit6a6adb1673775df63a62270879eac70f5f8d7d75 (patch)
treeef89cd39c929180ca4f899a43037c96d7f85d881 /sql/core/src/test/scala
parentbb322943623d14b85283705e74d913e31230387f (diff)
downloadspark-6a6adb1673775df63a62270879eac70f5f8d7d75.tar.gz
spark-6a6adb1673775df63a62270879eac70f5f8d7d75.tar.bz2
spark-6a6adb1673775df63a62270879eac70f5f8d7d75.zip
[SPARK-17440][SPARK-17441] Fixed Multiple Bugs in ALTER TABLE
### What changes were proposed in this pull request? For the following `ALTER TABLE` DDL, we should issue an exception when the target table is a `VIEW`: ```SQL ALTER TABLE viewName SET LOCATION '/path/to/your/lovely/heart' ALTER TABLE viewName SET SERDE 'whatever' ALTER TABLE viewName SET SERDEPROPERTIES ('x' = 'y') ALTER TABLE viewName PARTITION (a=1, b=2) SET SERDEPROPERTIES ('x' = 'y') ALTER TABLE viewName ADD IF NOT EXISTS PARTITION (a='4', b='8') ALTER TABLE viewName DROP IF EXISTS PARTITION (a='2') ALTER TABLE viewName RECOVER PARTITIONS ALTER TABLE viewName PARTITION (a='1', b='q') RENAME TO PARTITION (a='100', b='p') ``` In addition, `ALTER TABLE RENAME PARTITION` is unable to handle data source tables, just like the other `ALTER PARTITION` commands. We should issue an exception instead. ### How was this patch tested? Added a few test cases. Author: gatorsmile <gatorsmile@gmail.com> Closes #15004 from gatorsmile/altertable.
Diffstat (limited to 'sql/core/src/test/scala')
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala63
1 files changed, 49 insertions, 14 deletions
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
index 95672e01f5..4a171808c0 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
@@ -696,6 +696,18 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
assert(spark.table("teachers").collect().toSeq == df.collect().toSeq)
}
+ test("rename temporary table") {
+ withTempView("tab1", "tab2") {
+ spark.range(10).createOrReplaceTempView("tab1")
+ sql("ALTER TABLE tab1 RENAME TO tab2")
+ checkAnswer(spark.table("tab2"), spark.range(10).toDF())
+ intercept[NoSuchTableException] { spark.table("tab1") }
+ sql("ALTER VIEW tab2 RENAME TO tab1")
+ checkAnswer(spark.table("tab1"), spark.range(10).toDF())
+ intercept[NoSuchTableException] { spark.table("tab2") }
+ }
+ }
+
test("rename temporary table - destination table already exists") {
withTempView("tab1", "tab2") {
sql(
@@ -880,25 +892,16 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
test("alter table: rename partition") {
val catalog = spark.sessionState.catalog
val tableIdent = TableIdentifier("tab1", Some("dbx"))
- val part1 = Map("a" -> "1", "b" -> "q")
- val part2 = Map("a" -> "2", "b" -> "c")
- val part3 = Map("a" -> "3", "b" -> "p")
- createDatabase(catalog, "dbx")
- createTable(catalog, tableIdent)
- createTablePartition(catalog, part1, tableIdent)
- createTablePartition(catalog, part2, tableIdent)
- createTablePartition(catalog, part3, tableIdent)
- assert(catalog.listPartitions(tableIdent).map(_.spec).toSet ==
- Set(part1, part2, part3))
+ createPartitionedTable(tableIdent, isDatasourceTable = false)
sql("ALTER TABLE dbx.tab1 PARTITION (a='1', b='q') RENAME TO PARTITION (a='100', b='p')")
- sql("ALTER TABLE dbx.tab1 PARTITION (a='2', b='c') RENAME TO PARTITION (a='200', b='c')")
+ sql("ALTER TABLE dbx.tab1 PARTITION (a='2', b='c') RENAME TO PARTITION (a='20', b='c')")
assert(catalog.listPartitions(tableIdent).map(_.spec).toSet ==
- Set(Map("a" -> "100", "b" -> "p"), Map("a" -> "200", "b" -> "c"), part3))
+ Set(Map("a" -> "100", "b" -> "p"), Map("a" -> "20", "b" -> "c"), Map("a" -> "3", "b" -> "p")))
// rename without explicitly specifying database
catalog.setCurrentDatabase("dbx")
sql("ALTER TABLE tab1 PARTITION (a='100', b='p') RENAME TO PARTITION (a='10', b='p')")
assert(catalog.listPartitions(tableIdent).map(_.spec).toSet ==
- Set(Map("a" -> "10", "b" -> "p"), Map("a" -> "200", "b" -> "c"), part3))
+ Set(Map("a" -> "10", "b" -> "p"), Map("a" -> "20", "b" -> "c"), Map("a" -> "3", "b" -> "p")))
// table to alter does not exist
intercept[NoSuchTableException] {
sql("ALTER TABLE does_not_exist PARTITION (c='3') RENAME TO PARTITION (c='333')")
@@ -909,6 +912,38 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
}
}
+ test("alter table: rename partition (datasource table)") {
+ createPartitionedTable(TableIdentifier("tab1", Some("dbx")), isDatasourceTable = true)
+ val e = intercept[AnalysisException] {
+ sql("ALTER TABLE dbx.tab1 PARTITION (a='1', b='q') RENAME TO PARTITION (a='100', b='p')")
+ }.getMessage
+ assert(e.contains(
+ "ALTER TABLE RENAME PARTITION is not allowed for tables defined using the datasource API"))
+ // table to alter does not exist
+ intercept[NoSuchTableException] {
+ sql("ALTER TABLE does_not_exist PARTITION (c='3') RENAME TO PARTITION (c='333')")
+ }
+ }
+
+ private def createPartitionedTable(
+ tableIdent: TableIdentifier,
+ isDatasourceTable: Boolean): Unit = {
+ val catalog = spark.sessionState.catalog
+ val part1 = Map("a" -> "1", "b" -> "q")
+ val part2 = Map("a" -> "2", "b" -> "c")
+ val part3 = Map("a" -> "3", "b" -> "p")
+ createDatabase(catalog, "dbx")
+ createTable(catalog, tableIdent)
+ createTablePartition(catalog, part1, tableIdent)
+ createTablePartition(catalog, part2, tableIdent)
+ createTablePartition(catalog, part3, tableIdent)
+ assert(catalog.listPartitions(tableIdent).map(_.spec).toSet ==
+ Set(part1, part2, part3))
+ if (isDatasourceTable) {
+ convertToDatasourceTable(catalog, tableIdent)
+ }
+ }
+
test("show tables") {
withTempView("show1a", "show2b") {
sql(
@@ -1255,7 +1290,7 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
}
// table to alter does not exist
intercept[AnalysisException] {
- sql("ALTER TABLE does_not_exist SET SERDEPROPERTIES ('x' = 'y')")
+ sql("ALTER TABLE does_not_exist PARTITION (a=1, b=2) SET SERDEPROPERTIES ('x' = 'y')")
}
}