aboutsummaryrefslogtreecommitdiff
path: root/sql/core
diff options
context:
space:
mode:
authorgatorsmile <gatorsmile@gmail.com>2016-09-15 14:43:10 +0800
committerWenchen Fan <wenchen@databricks.com>2016-09-15 14:43:10 +0800
commit6a6adb1673775df63a62270879eac70f5f8d7d75 (patch)
treeef89cd39c929180ca4f899a43037c96d7f85d881 /sql/core
parentbb322943623d14b85283705e74d913e31230387f (diff)
downloadspark-6a6adb1673775df63a62270879eac70f5f8d7d75.tar.gz
spark-6a6adb1673775df63a62270879eac70f5f8d7d75.tar.bz2
spark-6a6adb1673775df63a62270879eac70f5f8d7d75.zip
[SPARK-17440][SPARK-17441] Fixed Multiple Bugs in ALTER TABLE
### What changes were proposed in this pull request? For the following `ALTER TABLE` DDL, we should issue an exception when the target table is a `VIEW`: ```SQL ALTER TABLE viewName SET LOCATION '/path/to/your/lovely/heart' ALTER TABLE viewName SET SERDE 'whatever' ALTER TABLE viewName SET SERDEPROPERTIES ('x' = 'y') ALTER TABLE viewName PARTITION (a=1, b=2) SET SERDEPROPERTIES ('x' = 'y') ALTER TABLE viewName ADD IF NOT EXISTS PARTITION (a='4', b='8') ALTER TABLE viewName DROP IF EXISTS PARTITION (a='2') ALTER TABLE viewName RECOVER PARTITIONS ALTER TABLE viewName PARTITION (a='1', b='q') RENAME TO PARTITION (a='100', b='p') ``` In addition, `ALTER TABLE RENAME PARTITION` is unable to handle data source tables, just like the other `ALTER PARTITION` commands. We should issue an exception instead. ### How was this patch tested? Added a few test cases. Author: gatorsmile <gatorsmile@gmail.com> Closes #15004 from gatorsmile/altertable.
Diffstat (limited to 'sql/core')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala45
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala4
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala63
3 files changed, 83 insertions, 29 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala
index dcda2f8d1c..c0ccdca98e 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala
@@ -230,8 +230,8 @@ case class AlterTableSetPropertiesCommand(
override def run(sparkSession: SparkSession): Seq[Row] = {
val catalog = sparkSession.sessionState.catalog
- DDLUtils.verifyAlterTableType(catalog, tableName, isView)
val table = catalog.getTableMetadata(tableName)
+ DDLUtils.verifyAlterTableType(catalog, table, isView)
// This overrides old properties
val newTable = table.copy(properties = table.properties ++ properties)
catalog.alterTable(newTable)
@@ -258,8 +258,8 @@ case class AlterTableUnsetPropertiesCommand(
override def run(sparkSession: SparkSession): Seq[Row] = {
val catalog = sparkSession.sessionState.catalog
- DDLUtils.verifyAlterTableType(catalog, tableName, isView)
val table = catalog.getTableMetadata(tableName)
+ DDLUtils.verifyAlterTableType(catalog, table, isView)
if (!ifExists) {
propKeys.foreach { k =>
if (!table.properties.contains(k)) {
@@ -299,6 +299,7 @@ case class AlterTableSerDePropertiesCommand(
override def run(sparkSession: SparkSession): Seq[Row] = {
val catalog = sparkSession.sessionState.catalog
val table = catalog.getTableMetadata(tableName)
+ DDLUtils.verifyAlterTableType(catalog, table, isView = false)
// For datasource tables, disallow setting serde or specifying partition
if (partSpec.isDefined && DDLUtils.isDatasourceTable(table)) {
throw new AnalysisException("Operation not allowed: ALTER TABLE SET " +
@@ -348,6 +349,7 @@ case class AlterTableAddPartitionCommand(
override def run(sparkSession: SparkSession): Seq[Row] = {
val catalog = sparkSession.sessionState.catalog
val table = catalog.getTableMetadata(tableName)
+ DDLUtils.verifyAlterTableType(catalog, table, isView = false)
if (DDLUtils.isDatasourceTable(table)) {
throw new AnalysisException(
"ALTER TABLE ADD PARTITION is not allowed for tables defined using the datasource API")
@@ -377,7 +379,14 @@ case class AlterTableRenamePartitionCommand(
extends RunnableCommand {
override def run(sparkSession: SparkSession): Seq[Row] = {
- sparkSession.sessionState.catalog.renamePartitions(
+ val catalog = sparkSession.sessionState.catalog
+ val table = catalog.getTableMetadata(tableName)
+ if (DDLUtils.isDatasourceTable(table)) {
+ throw new AnalysisException(
+ "ALTER TABLE RENAME PARTITION is not allowed for tables defined using the datasource API")
+ }
+ DDLUtils.verifyAlterTableType(catalog, table, isView = false)
+ catalog.renamePartitions(
tableName, Seq(oldPartition), Seq(newPartition))
Seq.empty[Row]
}
@@ -408,6 +417,7 @@ case class AlterTableDropPartitionCommand(
override def run(sparkSession: SparkSession): Seq[Row] = {
val catalog = sparkSession.sessionState.catalog
val table = catalog.getTableMetadata(tableName)
+ DDLUtils.verifyAlterTableType(catalog, table, isView = false)
if (DDLUtils.isDatasourceTable(table)) {
throw new AnalysisException(
"ALTER TABLE DROP PARTITIONS is not allowed for tables defined using the datasource API")
@@ -469,6 +479,7 @@ case class AlterTableRecoverPartitionsCommand(
s"Operation not allowed: $cmd on temporary tables: $tableName")
}
val table = catalog.getTableMetadata(tableName)
+ DDLUtils.verifyAlterTableType(catalog, table, isView = false)
if (DDLUtils.isDatasourceTable(table)) {
throw new AnalysisException(
s"Operation not allowed: $cmd on datasource tables: $tableName")
@@ -644,6 +655,7 @@ case class AlterTableSetLocationCommand(
override def run(sparkSession: SparkSession): Seq[Row] = {
val catalog = sparkSession.sessionState.catalog
val table = catalog.getTableMetadata(tableName)
+ DDLUtils.verifyAlterTableType(catalog, table, isView = false)
partitionSpec match {
case Some(spec) =>
// Partition spec is specified, so we set the location only for this partition
@@ -682,19 +694,26 @@ object DDLUtils {
/**
* If the command ALTER VIEW is to alter a table or ALTER TABLE is to alter a view,
* issue an exception [[AnalysisException]].
+ *
+ * Note: temporary views can be altered by both ALTER VIEW and ALTER TABLE commands,
+ * since temporary views can be also created by CREATE TEMPORARY TABLE. In the future,
+ * when we decided to drop the support, we should disallow users to alter temporary views
+ * by ALTER TABLE.
*/
def verifyAlterTableType(
catalog: SessionCatalog,
- tableIdentifier: TableIdentifier,
+ tableMetadata: CatalogTable,
isView: Boolean): Unit = {
- catalog.getTableMetadataOption(tableIdentifier).map(_.tableType match {
- case CatalogTableType.VIEW if !isView =>
- throw new AnalysisException(
- "Cannot alter a view with ALTER TABLE. Please use ALTER VIEW instead")
- case o if o != CatalogTableType.VIEW && isView =>
- throw new AnalysisException(
- s"Cannot alter a table with ALTER VIEW. Please use ALTER TABLE instead")
- case _ =>
- })
+ if (!catalog.isTemporaryTable(tableMetadata.identifier)) {
+ tableMetadata.tableType match {
+ case CatalogTableType.VIEW if !isView =>
+ throw new AnalysisException(
+ "Cannot alter a view with ALTER TABLE. Please use ALTER VIEW instead")
+ case o if o != CatalogTableType.VIEW && isView =>
+ throw new AnalysisException(
+ s"Cannot alter a table with ALTER VIEW. Please use ALTER TABLE instead")
+ case _ =>
+ }
+ }
}
}
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala
index 9fbcd48b4a..60e6b5db62 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala
@@ -158,7 +158,8 @@ case class AlterTableRenameCommand(
override def run(sparkSession: SparkSession): Seq[Row] = {
val catalog = sparkSession.sessionState.catalog
- DDLUtils.verifyAlterTableType(catalog, oldName, isView)
+ val table = catalog.getTableMetadata(oldName)
+ DDLUtils.verifyAlterTableType(catalog, table, isView)
// If this is a temp view, just rename the view.
// Otherwise, if this is a real table, we also need to uncache and invalidate the table.
val isTemporary = catalog.isTemporaryTable(oldName)
@@ -177,7 +178,6 @@ case class AlterTableRenameCommand(
}
}
// For datasource tables, we also need to update the "path" serde property
- val table = catalog.getTableMetadata(oldName)
if (DDLUtils.isDatasourceTable(table) && table.tableType == CatalogTableType.MANAGED) {
val newPath = catalog.defaultTablePath(newTblName)
val newTable = table.withNewStorage(
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
index 95672e01f5..4a171808c0 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
@@ -696,6 +696,18 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
assert(spark.table("teachers").collect().toSeq == df.collect().toSeq)
}
+ test("rename temporary table") {
+ withTempView("tab1", "tab2") {
+ spark.range(10).createOrReplaceTempView("tab1")
+ sql("ALTER TABLE tab1 RENAME TO tab2")
+ checkAnswer(spark.table("tab2"), spark.range(10).toDF())
+ intercept[NoSuchTableException] { spark.table("tab1") }
+ sql("ALTER VIEW tab2 RENAME TO tab1")
+ checkAnswer(spark.table("tab1"), spark.range(10).toDF())
+ intercept[NoSuchTableException] { spark.table("tab2") }
+ }
+ }
+
test("rename temporary table - destination table already exists") {
withTempView("tab1", "tab2") {
sql(
@@ -880,25 +892,16 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
test("alter table: rename partition") {
val catalog = spark.sessionState.catalog
val tableIdent = TableIdentifier("tab1", Some("dbx"))
- val part1 = Map("a" -> "1", "b" -> "q")
- val part2 = Map("a" -> "2", "b" -> "c")
- val part3 = Map("a" -> "3", "b" -> "p")
- createDatabase(catalog, "dbx")
- createTable(catalog, tableIdent)
- createTablePartition(catalog, part1, tableIdent)
- createTablePartition(catalog, part2, tableIdent)
- createTablePartition(catalog, part3, tableIdent)
- assert(catalog.listPartitions(tableIdent).map(_.spec).toSet ==
- Set(part1, part2, part3))
+ createPartitionedTable(tableIdent, isDatasourceTable = false)
sql("ALTER TABLE dbx.tab1 PARTITION (a='1', b='q') RENAME TO PARTITION (a='100', b='p')")
- sql("ALTER TABLE dbx.tab1 PARTITION (a='2', b='c') RENAME TO PARTITION (a='200', b='c')")
+ sql("ALTER TABLE dbx.tab1 PARTITION (a='2', b='c') RENAME TO PARTITION (a='20', b='c')")
assert(catalog.listPartitions(tableIdent).map(_.spec).toSet ==
- Set(Map("a" -> "100", "b" -> "p"), Map("a" -> "200", "b" -> "c"), part3))
+ Set(Map("a" -> "100", "b" -> "p"), Map("a" -> "20", "b" -> "c"), Map("a" -> "3", "b" -> "p")))
// rename without explicitly specifying database
catalog.setCurrentDatabase("dbx")
sql("ALTER TABLE tab1 PARTITION (a='100', b='p') RENAME TO PARTITION (a='10', b='p')")
assert(catalog.listPartitions(tableIdent).map(_.spec).toSet ==
- Set(Map("a" -> "10", "b" -> "p"), Map("a" -> "200", "b" -> "c"), part3))
+ Set(Map("a" -> "10", "b" -> "p"), Map("a" -> "20", "b" -> "c"), Map("a" -> "3", "b" -> "p")))
// table to alter does not exist
intercept[NoSuchTableException] {
sql("ALTER TABLE does_not_exist PARTITION (c='3') RENAME TO PARTITION (c='333')")
@@ -909,6 +912,38 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
}
}
+ test("alter table: rename partition (datasource table)") {
+ createPartitionedTable(TableIdentifier("tab1", Some("dbx")), isDatasourceTable = true)
+ val e = intercept[AnalysisException] {
+ sql("ALTER TABLE dbx.tab1 PARTITION (a='1', b='q') RENAME TO PARTITION (a='100', b='p')")
+ }.getMessage
+ assert(e.contains(
+ "ALTER TABLE RENAME PARTITION is not allowed for tables defined using the datasource API"))
+ // table to alter does not exist
+ intercept[NoSuchTableException] {
+ sql("ALTER TABLE does_not_exist PARTITION (c='3') RENAME TO PARTITION (c='333')")
+ }
+ }
+
+ private def createPartitionedTable(
+ tableIdent: TableIdentifier,
+ isDatasourceTable: Boolean): Unit = {
+ val catalog = spark.sessionState.catalog
+ val part1 = Map("a" -> "1", "b" -> "q")
+ val part2 = Map("a" -> "2", "b" -> "c")
+ val part3 = Map("a" -> "3", "b" -> "p")
+ createDatabase(catalog, "dbx")
+ createTable(catalog, tableIdent)
+ createTablePartition(catalog, part1, tableIdent)
+ createTablePartition(catalog, part2, tableIdent)
+ createTablePartition(catalog, part3, tableIdent)
+ assert(catalog.listPartitions(tableIdent).map(_.spec).toSet ==
+ Set(part1, part2, part3))
+ if (isDatasourceTable) {
+ convertToDatasourceTable(catalog, tableIdent)
+ }
+ }
+
test("show tables") {
withTempView("show1a", "show2b") {
sql(
@@ -1255,7 +1290,7 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
}
// table to alter does not exist
intercept[AnalysisException] {
- sql("ALTER TABLE does_not_exist SET SERDEPROPERTIES ('x' = 'y')")
+ sql("ALTER TABLE does_not_exist PARTITION (a=1, b=2) SET SERDEPROPERTIES ('x' = 'y')")
}
}