diff options
author | Yin Huai <yhuai@databricks.com> | 2016-04-27 00:30:54 -0700 |
---|---|---|
committer | Reynold Xin <rxin@databricks.com> | 2016-04-27 00:30:54 -0700 |
commit | 54a3eb8312bf4dfe97f9b294a2466785a1a17de6 (patch) | |
tree | 418d32c2ba458ff0618302430ee6694d1287d0be /sql/core/src | |
parent | d73d67f623dd65b90d0edbd7ba62e9a6ce7ebd1e (diff) | |
download | spark-54a3eb8312bf4dfe97f9b294a2466785a1a17de6.tar.gz spark-54a3eb8312bf4dfe97f9b294a2466785a1a17de6.tar.bz2 spark-54a3eb8312bf4dfe97f9b294a2466785a1a17de6.zip |
[SPARK-14130][SQL] Throw exceptions for ALTER TABLE ADD/REPLACE/CHANGE COLUMN, ALTER TABLE SET FILEFORMAT, DFS, and transaction related commands
## What changes were proposed in this pull request?
This PR will make Spark SQL not allow ALTER TABLE ADD/REPLACE/CHANGE COLUMN, ALTER TABLE SET FILEFORMAT, DFS, and transaction related commands.
## How was this patch tested?
Existing tests. For those tests that I put in the blacklist, I am adding the useful parts back to SQLQuerySuite.
Author: Yin Huai <yhuai@databricks.com>
Closes #12714 from yhuai/banNativeCommand.
Diffstat (limited to 'sql/core/src')
3 files changed, 61 insertions, 16 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala index e04e130eb6..79fdf9fb22 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala @@ -645,7 +645,7 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder { Option(ctx.partitionSpec).map(visitNonOptionalPartitionSpec), fileFormat, genericFormat)( - command(ctx)) + parseException("ALTER TABLE SET FILEFORMAT", ctx)) } /** @@ -693,7 +693,7 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder { // Note that Restrict and Cascade are mutually exclusive. ctx.RESTRICT != null, ctx.CASCADE != null)( - command(ctx)) + parseException("ALTER TABLE CHANGE COLUMN", ctx)) } /** @@ -713,7 +713,7 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder { // Note that Restrict and Cascade are mutually exclusive. ctx.RESTRICT != null, ctx.CASCADE != null)( - command(ctx)) + parseException("ALTER TABLE ADD COLUMNS", ctx)) } /** @@ -733,7 +733,7 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder { // Note that Restrict and Cascade are mutually exclusive. ctx.RESTRICT != null, ctx.CASCADE != null)( - command(ctx)) + parseException("ALTER TABLE REPLACE COLUMNS", ctx)) } /** @@ -782,7 +782,7 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder { // SET ROLE is the exception to the rule, because we handle this before other SET commands. "SET ROLE" } - throw new ParseException(s"Operation not allowed: $keywords", ctx) + throw parseException(keywords, ctx) } /** diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala index 12167ee307..4a9a1603d0 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala @@ -26,6 +26,7 @@ import org.apache.spark.sql.catalyst.catalog.{CatalogDatabase, CatalogTable} import org.apache.spark.sql.catalyst.catalog.{CatalogTablePartition, CatalogTableType, SessionCatalog} import org.apache.spark.sql.catalyst.catalog.ExternalCatalog.TablePartitionSpec import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference} +import org.apache.spark.sql.catalyst.parser.ParseException import org.apache.spark.sql.types._ @@ -34,12 +35,17 @@ import org.apache.spark.sql.types._ // https://cwiki.apache.org/confluence/display/Hive/LanguageManual+DDL /** - * A DDL command expected to be parsed and run in an underlying system instead of in Spark. + * A DDL command that is not supported right now. Since we have already implemented + * the parsing rules for some commands that are not allowed, we use this as the base class + * of those commands. */ -abstract class NativeDDLCommand(val sql: String) extends RunnableCommand { +abstract class UnsupportedCommand(exception: ParseException) extends RunnableCommand { + + // Throws the ParseException when we create this command. + throw exception override def run(sparkSession: SparkSession): Seq[Row] = { - sparkSession.runNativeSql(sql) + Seq.empty[Row] } override val output: Seq[Attribute] = { @@ -426,8 +432,8 @@ case class AlterTableSetFileFormat( tableName: TableIdentifier, partitionSpec: Option[TablePartitionSpec], fileFormat: Seq[String], - genericFormat: Option[String])(sql: String) - extends NativeDDLCommand(sql) with Logging + genericFormat: Option[String])(exception: ParseException) + extends UnsupportedCommand(exception) with Logging /** * A command that sets the location of a table or a partition. @@ -488,24 +494,24 @@ case class AlterTableChangeCol( comment: Option[String], afterColName: Option[String], restrict: Boolean, - cascade: Boolean)(sql: String) - extends NativeDDLCommand(sql) with Logging + cascade: Boolean)(exception: ParseException) + extends UnsupportedCommand(exception) with Logging case class AlterTableAddCol( tableName: TableIdentifier, partitionSpec: Option[TablePartitionSpec], columns: StructType, restrict: Boolean, - cascade: Boolean)(sql: String) - extends NativeDDLCommand(sql) with Logging + cascade: Boolean)(exception: ParseException) + extends UnsupportedCommand(exception) with Logging case class AlterTableReplaceCol( tableName: TableIdentifier, partitionSpec: Option[TablePartitionSpec], columns: StructType, restrict: Boolean, - cascade: Boolean)(sql: String) - extends NativeDDLCommand(sql) with Logging + cascade: Boolean)(exception: ParseException) + extends UnsupportedCommand(exception) with Logging private[sql] object DDLUtils { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala index 9db5ccbbd6..be0f4d78a5 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala @@ -441,6 +441,7 @@ class DDLCommandSuite extends PlanTest { assertUnsupported("ALTER TABLE table_name UNARCHIVE PARTITION (dt='2008-08-08', country='us')") } + /* test("alter table: set file format") { val sql1 = "ALTER TABLE table_name SET FILEFORMAT INPUTFORMAT 'test' " + "OUTPUTFORMAT 'test' SERDE 'test'" @@ -461,6 +462,15 @@ class DDLCommandSuite extends PlanTest { Some("PARQUET"))(sql2) comparePlans(parsed1, expected1) comparePlans(parsed2, expected2) + } */ + + test("alter table: set file format (not allowed)") { + assertUnsupported( + "ALTER TABLE table_name SET FILEFORMAT INPUTFORMAT 'test' " + + "OUTPUTFORMAT 'test' SERDE 'test'") + assertUnsupported( + "ALTER TABLE table_name PARTITION (dt='2008-08-08', country='us') " + + "SET FILEFORMAT PARQUET") } test("alter table: set location") { @@ -517,6 +527,7 @@ class DDLCommandSuite extends PlanTest { assertUnsupported("ALTER TABLE table_name SKEWED BY (key) ON (1,5,6) STORED AS DIRECTORIES") } + /* test("alter table: change column name/type/position/comment") { val sql1 = "ALTER TABLE table_name CHANGE col_old_name col_new_name INT" val sql2 = @@ -566,8 +577,22 @@ class DDLCommandSuite extends PlanTest { comparePlans(parsed1, expected1) comparePlans(parsed2, expected2) comparePlans(parsed3, expected3) + } */ + + test("alter table: change column name/type/position/comment (not allowed)") { + assertUnsupported("ALTER TABLE table_name CHANGE col_old_name col_new_name INT") + assertUnsupported( + """ + |ALTER TABLE table_name CHANGE COLUMN col_old_name col_new_name INT + |COMMENT 'col_comment' FIRST CASCADE + """.stripMargin) + assertUnsupported(""" + |ALTER TABLE table_name CHANGE COLUMN col_old_name col_new_name INT + |COMMENT 'col_comment' AFTER column_name RESTRICT + """.stripMargin) } + /* test("alter table: add/replace columns") { val sql1 = """ @@ -603,6 +628,20 @@ class DDLCommandSuite extends PlanTest { cascade = false)(sql2) comparePlans(parsed1, expected1) comparePlans(parsed2, expected2) + } */ + + test("alter table: add/replace columns (not allowed)") { + assertUnsupported( + """ + |ALTER TABLE table_name PARTITION (dt='2008-08-08', country='us') + |ADD COLUMNS (new_col1 INT COMMENT 'test_comment', new_col2 LONG + |COMMENT 'test_comment2') CASCADE + """.stripMargin) + assertUnsupported( + """ + |ALTER TABLE table_name REPLACE COLUMNS (new_col1 INT + |COMMENT 'test_comment', new_col2 LONG COMMENT 'test_comment2') RESTRICT + """.stripMargin) } test("show databases") { |