aboutsummaryrefslogtreecommitdiff
path: root/sql/core/src
diff options
context:
space:
mode:
authorgatorsmile <gatorsmile@gmail.com>2016-04-14 08:34:11 -0700
committerYin Huai <yhuai@databricks.com>2016-04-14 08:34:11 -0700
commit0d22092cd9c8876a7f226add578ff1c025012fe9 (patch)
tree5ae02c9621897418c34ca5c63f13662cc0c21b58 /sql/core/src
parentf83ba454a507bec0cc389d9a382cd71add7f17c1 (diff)
downloadspark-0d22092cd9c8876a7f226add578ff1c025012fe9.tar.gz
spark-0d22092cd9c8876a7f226add578ff1c025012fe9.tar.bz2
spark-0d22092cd9c8876a7f226add578ff1c025012fe9.zip
[SPARK-14125][SQL] Native DDL Support: Alter View
#### What changes were proposed in this pull request? This PR is to provide a native DDL support for the following three Alter View commands: Based on the Hive DDL document: https://cwiki.apache.org/confluence/display/Hive/LanguageManual+DDL ##### 1. ALTER VIEW RENAME **Syntax:** ```SQL ALTER VIEW view_name RENAME TO new_view_name ``` - to change the name of a view to a different name - not allowed to rename a view's name by ALTER TABLE ##### 2. ALTER VIEW SET TBLPROPERTIES **Syntax:** ```SQL ALTER VIEW view_name SET TBLPROPERTIES ('comment' = new_comment); ``` - to add metadata to a view - not allowed to set views' properties by ALTER TABLE - ignore it if trying to set a view's existing property key when the value is the same - overwrite the value if trying to set a view's existing key to a different value ##### 3. ALTER VIEW UNSET TBLPROPERTIES **Syntax:** ```SQL ALTER VIEW view_name UNSET TBLPROPERTIES [IF EXISTS] ('comment', 'key') ``` - to remove metadata from a view - not allowed to unset views' properties by ALTER TABLE - issue an exception if trying to unset a view's non-existent key #### How was this patch tested? Added test cases to verify if it works properly. Author: gatorsmile <gatorsmile@gmail.com> Author: xiaoli <lixiao1983@gmail.com> Author: Xiao Li <xiaoli@Xiaos-MacBook-Pro.local> Closes #12324 from gatorsmile/alterView.
Diffstat (limited to 'sql/core/src')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala9
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala29
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala4
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala18
4 files changed, 45 insertions, 15 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
index af92cecee5..8ed6ed21d0 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
@@ -393,7 +393,8 @@ class SparkSqlAstBuilder extends AstBuilder {
override def visitRenameTable(ctx: RenameTableContext): LogicalPlan = withOrigin(ctx) {
AlterTableRename(
visitTableIdentifier(ctx.from),
- visitTableIdentifier(ctx.to))
+ visitTableIdentifier(ctx.to),
+ ctx.VIEW != null)
}
/**
@@ -409,7 +410,8 @@ class SparkSqlAstBuilder extends AstBuilder {
ctx: SetTablePropertiesContext): LogicalPlan = withOrigin(ctx) {
AlterTableSetProperties(
visitTableIdentifier(ctx.tableIdentifier),
- visitTablePropertyList(ctx.tablePropertyList))
+ visitTablePropertyList(ctx.tablePropertyList),
+ ctx.VIEW != null)
}
/**
@@ -426,7 +428,8 @@ class SparkSqlAstBuilder extends AstBuilder {
AlterTableUnsetProperties(
visitTableIdentifier(ctx.tableIdentifier),
visitTablePropertyList(ctx.tablePropertyList).keys.toSeq,
- ctx.EXISTS != null)
+ ctx.EXISTS != null,
+ ctx.VIEW != null)
}
/**
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala
index 234099ad15..fc37a142cd 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala
@@ -23,7 +23,7 @@ import org.apache.spark.internal.Logging
import org.apache.spark.sql.{AnalysisException, Row, SQLContext}
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.catalog.{CatalogDatabase, CatalogTable}
-import org.apache.spark.sql.catalyst.catalog.{CatalogTablePartition, CatalogTableType}
+import org.apache.spark.sql.catalyst.catalog.{CatalogTablePartition, CatalogTableType, SessionCatalog}
import org.apache.spark.sql.catalyst.catalog.ExternalCatalog.TablePartitionSpec
import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference}
import org.apache.spark.sql.types._
@@ -235,11 +235,13 @@ case class DropTable(
*/
case class AlterTableSetProperties(
tableName: TableIdentifier,
- properties: Map[String, String])
+ properties: Map[String, String],
+ isView: Boolean)
extends RunnableCommand {
override def run(sqlContext: SQLContext): Seq[Row] = {
val catalog = sqlContext.sessionState.catalog
+ DDLUtils.verifyAlterTableType(catalog, tableName, isView)
val table = catalog.getTableMetadata(tableName)
val newProperties = table.properties ++ properties
if (DDLUtils.isDatasourceTable(newProperties)) {
@@ -265,11 +267,13 @@ case class AlterTableSetProperties(
case class AlterTableUnsetProperties(
tableName: TableIdentifier,
propKeys: Seq[String],
- ifExists: Boolean)
+ ifExists: Boolean,
+ isView: Boolean)
extends RunnableCommand {
override def run(sqlContext: SQLContext): Seq[Row] = {
val catalog = sqlContext.sessionState.catalog
+ DDLUtils.verifyAlterTableType(catalog, tableName, isView)
val table = catalog.getTableMetadata(tableName)
if (DDLUtils.isDatasourceTable(table)) {
throw new AnalysisException(
@@ -513,5 +517,24 @@ private object DDLUtils {
def isDatasourceTable(table: CatalogTable): Boolean = {
isDatasourceTable(table.properties)
}
+
+ /**
+ * If the command ALTER VIEW is to alter a table or ALTER TABLE is to alter a view,
+ * issue an exception [[AnalysisException]].
+ */
+ def verifyAlterTableType(
+ catalog: SessionCatalog,
+ tableIdentifier: TableIdentifier,
+ isView: Boolean): Unit = {
+ catalog.getTableMetadataOption(tableIdentifier).map(_.tableType match {
+ case CatalogTableType.VIRTUAL_VIEW if !isView =>
+ throw new AnalysisException(
+ "Cannot alter a view with ALTER TABLE. Please use ALTER VIEW instead")
+ case o if o != CatalogTableType.VIRTUAL_VIEW && isView =>
+ throw new AnalysisException(
+ s"Cannot alter a table with ALTER VIEW. Please use ALTER TABLE instead")
+ case _ =>
+ })
+ }
}
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala
index 9c6030502d..e315598daa 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala
@@ -67,11 +67,13 @@ case class CreateTable(table: CatalogTable, ifNotExists: Boolean) extends Runnab
*/
case class AlterTableRename(
oldName: TableIdentifier,
- newName: TableIdentifier)
+ newName: TableIdentifier,
+ isView: Boolean)
extends RunnableCommand {
override def run(sqlContext: SQLContext): Seq[Row] = {
val catalog = sqlContext.sessionState.catalog
+ DDLUtils.verifyAlterTableType(catalog, oldName, isView)
catalog.invalidateTable(oldName)
catalog.renameTable(oldName, newName)
Seq.empty[Row]
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala
index 6e6475ee29..d6ccaf9348 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala
@@ -214,10 +214,12 @@ class DDLCommandSuite extends PlanTest {
val parsed_view = parser.parsePlan(sql_view)
val expected_table = AlterTableRename(
TableIdentifier("table_name", None),
- TableIdentifier("new_table_name", None))
+ TableIdentifier("new_table_name", None),
+ isView = false)
val expected_view = AlterTableRename(
TableIdentifier("table_name", None),
- TableIdentifier("new_table_name", None))
+ TableIdentifier("new_table_name", None),
+ isView = true)
comparePlans(parsed_table, expected_table)
comparePlans(parsed_view, expected_view)
}
@@ -244,14 +246,14 @@ class DDLCommandSuite extends PlanTest {
val tableIdent = TableIdentifier("table_name", None)
val expected1_table = AlterTableSetProperties(
- tableIdent, Map("test" -> "test", "comment" -> "new_comment"))
+ tableIdent, Map("test" -> "test", "comment" -> "new_comment"), isView = false)
val expected2_table = AlterTableUnsetProperties(
- tableIdent, Seq("comment", "test"), ifExists = false)
+ tableIdent, Seq("comment", "test"), ifExists = false, isView = false)
val expected3_table = AlterTableUnsetProperties(
- tableIdent, Seq("comment", "test"), ifExists = true)
- val expected1_view = expected1_table
- val expected2_view = expected2_table
- val expected3_view = expected3_table
+ tableIdent, Seq("comment", "test"), ifExists = true, isView = false)
+ val expected1_view = expected1_table.copy(isView = true)
+ val expected2_view = expected2_table.copy(isView = true)
+ val expected3_view = expected3_table.copy(isView = true)
comparePlans(parsed1_table, expected1_table)
comparePlans(parsed2_table, expected2_table)