From 9be5558e009069925d1f2d737d42e1683ed6b47f Mon Sep 17 00:00:00 2001 From: gatorsmile Date: Sat, 9 Apr 2016 14:10:44 -0700 Subject: [SPARK-14481][SQL] Issue Exceptions for All Unsupported Options during Parsing #### What changes were proposed in this pull request? "Not good to slightly ignore all the un-supported options/clauses. We should either support it or throw an exception." A comment from yhuai in another PR https://github.com/apache/spark/pull/12146 - Can `Explain` be an exception? The `Formatted` clause is used in `HiveCompatibilitySuite`. - Two unsupported clauses in `Drop Table` are handled in a separate PR: https://github.com/apache/spark/pull/12146 #### How was this patch tested? Test cases are added to verify all the cases. Author: gatorsmile Closes #12255 from gatorsmile/warningToException. --- .../apache/spark/sql/hive/execution/HiveSqlParser.scala | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) (limited to 'sql/hive/src/main') diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveSqlParser.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveSqlParser.scala index ab69d3502e..657edb493a 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveSqlParser.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveSqlParser.scala @@ -162,14 +162,16 @@ class HiveSqlAstBuilder extends SparkSqlAstBuilder { // Unsupported clauses. if (temp) { - logWarning("TEMPORARY clause is ignored.") + throw new ParseException(s"Unsupported operation: TEMPORARY clause.", ctx) } if (ctx.bucketSpec != null) { // TODO add this - we need cluster columns in the CatalogTable for this to work. - logWarning("CLUSTERED BY ... [ORDERED BY ...] INTO ... BUCKETS clause is ignored.") + throw new ParseException("Unsupported operation: " + + "CLUSTERED BY ... [ORDERED BY ...] INTO ... BUCKETS clause.", ctx) } if (ctx.skewSpec != null) { - logWarning("SKEWED BY ... ON ... [STORED AS DIRECTORIES] clause is ignored.") + throw new ParseException("Operation not allowed: " + + "SKEWED BY ... ON ... [STORED AS DIRECTORIES] clause.", ctx) } // Create the schema. @@ -230,7 +232,7 @@ class HiveSqlAstBuilder extends SparkSqlAstBuilder { throw new ParseException(s"Operation not allowed: partitioned views", ctx) } else { if (ctx.STRING != null) { - logWarning("COMMENT clause is ignored.") + throw new ParseException("Unsupported operation: COMMENT clause", ctx) } val identifiers = Option(ctx.identifierCommentList).toSeq.flatMap(_.identifierComment.asScala) val schema = identifiers.map { ic => @@ -296,7 +298,8 @@ class HiveSqlAstBuilder extends SparkSqlAstBuilder { recordReader: Token, schemaLess: Boolean): HiveScriptIOSchema = { if (recordWriter != null || recordReader != null) { - logWarning("Used defined record reader/writer classes are currently ignored.") + throw new ParseException( + "Unsupported operation: Used defined record reader/writer classes.", ctx) } // Decode and input/output format. @@ -370,7 +373,8 @@ class HiveSqlAstBuilder extends SparkSqlAstBuilder { ctx: TableFileFormatContext): CatalogStorageFormat = withOrigin(ctx) { import ctx._ if (inDriver != null || outDriver != null) { - logWarning("INPUTDRIVER ... OUTPUTDRIVER ... clauses are ignored.") + throw new ParseException( + s"Operation not allowed: INPUTDRIVER ... OUTPUTDRIVER ... clauses", ctx) } EmptyStorageFormat.copy( inputFormat = Option(string(inFmt)), -- cgit v1.2.3