aboutsummaryrefslogtreecommitdiff
path: root/sql/core/src/main/scala/org/apache
diff options
context:
space:
mode:
authorgatorsmile <gatorsmile@gmail.com>2016-04-24 18:53:27 +0200
committerHerman van Hovell <hvanhovell@questtec.nl>2016-04-24 18:53:27 +0200
commit337289d7128be100103fce10ec7747ad5bc9cbf0 (patch)
tree32c4abf808fab6afb021355ca6e4cf510ef3cc27 /sql/core/src/main/scala/org/apache
parent8df8a81825709dbefe5aecd7642748c1b3a38e99 (diff)
downloadspark-337289d7128be100103fce10ec7747ad5bc9cbf0.tar.gz
spark-337289d7128be100103fce10ec7747ad5bc9cbf0.tar.bz2
spark-337289d7128be100103fce10ec7747ad5bc9cbf0.zip
[SPARK-14691][SQL] Simplify and Unify Error Generation for Unsupported Alter Table DDL
#### What changes were proposed in this pull request? So far, we are capturing each unsupported Alter Table in separate visit functions. They should be unified and issue the same ParseException instead. This PR is to refactor the existing implementation and make error message consistent for Alter Table DDL. #### How was this patch tested? Updated the existing test cases and also added new test cases to ensure all the unsupported statements are covered. Author: gatorsmile <gatorsmile@gmail.com> Author: xiaoli <lixiao1983@gmail.com> Author: Xiao Li <xiaoli@Xiaos-MacBook-Pro.local> Closes #12459 from gatorsmile/cleanAlterTable.
Diffstat (limited to 'sql/core/src/main/scala/org/apache')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala125
1 files changed, 7 insertions, 118 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
index 7dc888cdde..2b301a68db 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
@@ -22,7 +22,7 @@ import scala.util.Try
import org.antlr.v4.runtime.{ParserRuleContext, Token}
-import org.apache.spark.sql.{AnalysisException, SaveMode}
+import org.apache.spark.sql.SaveMode
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.catalog.{CatalogColumn, CatalogStorageFormat, CatalogTable, CatalogTableType}
import org.apache.spark.sql.catalyst.parser._
@@ -511,40 +511,6 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
Option(ctx.partitionSpec).map(visitNonOptionalPartitionSpec))
}
- // TODO: don't even bother parsing alter table commands related to bucketing and skewing
-
- override def visitBucketTable(ctx: BucketTableContext): LogicalPlan = withOrigin(ctx) {
- throw new AnalysisException(
- "Operation not allowed: ALTER TABLE ... CLUSTERED BY ... INTO N BUCKETS")
- }
-
- override def visitUnclusterTable(ctx: UnclusterTableContext): LogicalPlan = withOrigin(ctx) {
- throw new AnalysisException("Operation not allowed: ALTER TABLE ... NOT CLUSTERED")
- }
-
- override def visitUnsortTable(ctx: UnsortTableContext): LogicalPlan = withOrigin(ctx) {
- throw new AnalysisException("Operation not allowed: ALTER TABLE ... NOT SORTED")
- }
-
- override def visitSkewTable(ctx: SkewTableContext): LogicalPlan = withOrigin(ctx) {
- throw new AnalysisException("Operation not allowed: ALTER TABLE ... SKEWED BY ...")
- }
-
- override def visitUnskewTable(ctx: UnskewTableContext): LogicalPlan = withOrigin(ctx) {
- throw new AnalysisException("Operation not allowed: ALTER TABLE ... NOT SKEWED")
- }
-
- override def visitUnstoreTable(ctx: UnstoreTableContext): LogicalPlan = withOrigin(ctx) {
- throw new AnalysisException(
- "Operation not allowed: ALTER TABLE ... NOT STORED AS DIRECTORIES")
- }
-
- override def visitSetTableSkewLocations(
- ctx: SetTableSkewLocationsContext): LogicalPlan = withOrigin(ctx) {
- throw new AnalysisException(
- "Operation not allowed: ALTER TABLE ... SET SKEWED LOCATION ...")
- }
-
/**
* Create an [[AlterTableAddPartition]] command.
*
@@ -560,7 +526,7 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
override def visitAddTablePartition(
ctx: AddTablePartitionContext): LogicalPlan = withOrigin(ctx) {
if (ctx.VIEW != null) {
- throw new AnalysisException(s"Operation not allowed: partitioned views")
+ throw new ParseException(s"Operation not allowed: partitioned views", ctx)
}
// Create partition spec to location mapping.
val specsAndLocs = if (ctx.partitionSpec.isEmpty) {
@@ -581,20 +547,6 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
}
/**
- * Create an (Hive's) AlterTableExchangePartition command.
- *
- * For example:
- * {{{
- * ALTER TABLE table1 EXCHANGE PARTITION spec WITH TABLE table2;
- * }}}
- */
- override def visitExchangeTablePartition(
- ctx: ExchangeTablePartitionContext): LogicalPlan = withOrigin(ctx) {
- throw new AnalysisException(
- "Operation not allowed: ALTER TABLE ... EXCHANGE PARTITION ...")
- }
-
- /**
* Create an [[AlterTableRenamePartition]] command
*
* For example:
@@ -625,10 +577,10 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
override def visitDropTablePartitions(
ctx: DropTablePartitionsContext): LogicalPlan = withOrigin(ctx) {
if (ctx.VIEW != null) {
- throw new AnalysisException(s"Operation not allowed: partitioned views")
+ throw new ParseException(s"Operation not allowed: partitioned views", ctx)
}
if (ctx.PURGE != null) {
- throw new AnalysisException(s"Operation not allowed: PURGE")
+ throw new ParseException(s"Operation not allowed: PURGE", ctx)
}
AlterTableDropPartition(
visitTableIdentifier(ctx.tableIdentifier),
@@ -637,34 +589,6 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
}
/**
- * Create an (Hive's) AlterTableArchivePartition command
- *
- * For example:
- * {{{
- * ALTER TABLE table ARCHIVE PARTITION spec;
- * }}}
- */
- override def visitArchiveTablePartition(
- ctx: ArchiveTablePartitionContext): LogicalPlan = withOrigin(ctx) {
- throw new AnalysisException(
- "Operation not allowed: ALTER TABLE ... ARCHIVE PARTITION ...")
- }
-
- /**
- * Create an (Hive's) AlterTableUnarchivePartition command
- *
- * For example:
- * {{{
- * ALTER TABLE table UNARCHIVE PARTITION spec;
- * }}}
- */
- override def visitUnarchiveTablePartition(
- ctx: UnarchiveTablePartitionContext): LogicalPlan = withOrigin(ctx) {
- throw new AnalysisException(
- "Operation not allowed: ALTER TABLE ... UNARCHIVE PARTITION ...")
- }
-
- /**
* Create an [[AlterTableSetFileFormat]] command
*
* For example:
@@ -709,42 +633,6 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
}
/**
- * Create an (Hive's) AlterTableTouch command
- *
- * For example:
- * {{{
- * ALTER TABLE table TOUCH [PARTITION spec];
- * }}}
- */
- override def visitTouchTable(ctx: TouchTableContext): LogicalPlan = withOrigin(ctx) {
- throw new AnalysisException("Operation not allowed: ALTER TABLE ... TOUCH ...")
- }
-
- /**
- * Create an (Hive's) AlterTableCompact command
- *
- * For example:
- * {{{
- * ALTER TABLE table [PARTITION spec] COMPACT 'compaction_type';
- * }}}
- */
- override def visitCompactTable(ctx: CompactTableContext): LogicalPlan = withOrigin(ctx) {
- throw new AnalysisException("Operation not allowed: ALTER TABLE ... COMPACT ...")
- }
-
- /**
- * Create an (Hive's) AlterTableMerge command
- *
- * For example:
- * {{{
- * ALTER TABLE table [PARTITION spec] CONCATENATE;
- * }}}
- */
- override def visitConcatenateTable(ctx: ConcatenateTableContext): LogicalPlan = withOrigin(ctx) {
- throw new AnalysisException("Operation not allowed: ALTER TABLE ... CONCATENATE")
- }
-
- /**
* Create an [[AlterTableChangeCol]] command
*
* For example:
@@ -857,12 +745,13 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
override def visitFailNativeCommand(
ctx: FailNativeCommandContext): LogicalPlan = withOrigin(ctx) {
val keywords = if (ctx.kws != null) {
- Seq(ctx.kws.kw1, ctx.kws.kw2, ctx.kws.kw3).filter(_ != null).map(_.getText).mkString(" ")
+ Seq(ctx.kws.kw1, ctx.kws.kw2, ctx.kws.kw3, ctx.kws.kw4, ctx.kws.kw5, ctx.kws.kw6)
+ .filter(_ != null).map(_.getText).mkString(" ")
} else {
// SET ROLE is the exception to the rule, because we handle this before other SET commands.
"SET ROLE"
}
- throw new ParseException(s"Unsupported operation: $keywords", ctx)
+ throw new ParseException(s"Operation not allowed: $keywords", ctx)
}
/**