aboutsummaryrefslogtreecommitdiff
path: root/sql/hive
diff options
context:
space:
mode:
authorgatorsmile <gatorsmile@gmail.com>2016-04-09 14:10:44 -0700
committerYin Huai <yhuai@databricks.com>2016-04-09 14:10:44 -0700
commit9be5558e009069925d1f2d737d42e1683ed6b47f (patch)
tree4506ed79f1edb0be9529cb27f2da95382fa1be27 /sql/hive
parent415446cc9b2652f6da11ee8ead5eb4e66685c45f (diff)
downloadspark-9be5558e009069925d1f2d737d42e1683ed6b47f.tar.gz
spark-9be5558e009069925d1f2d737d42e1683ed6b47f.tar.bz2
spark-9be5558e009069925d1f2d737d42e1683ed6b47f.zip
[SPARK-14481][SQL] Issue Exceptions for All Unsupported Options during Parsing
#### What changes were proposed in this pull request? "Not good to slightly ignore all the un-supported options/clauses. We should either support it or throw an exception." A comment from yhuai in another PR https://github.com/apache/spark/pull/12146 - Can `Explain` be an exception? The `Formatted` clause is used in `HiveCompatibilitySuite`. - Two unsupported clauses in `Drop Table` are handled in a separate PR: https://github.com/apache/spark/pull/12146 #### How was this patch tested? Test cases are added to verify all the cases. Author: gatorsmile <gatorsmile@gmail.com> Closes #12255 from gatorsmile/warningToException.
Diffstat (limited to 'sql/hive')
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveSqlParser.scala16
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveDDLCommandSuite.scala60
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala2
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala1
4 files changed, 70 insertions, 9 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveSqlParser.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveSqlParser.scala
index ab69d3502e..657edb493a 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveSqlParser.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveSqlParser.scala
@@ -162,14 +162,16 @@ class HiveSqlAstBuilder extends SparkSqlAstBuilder {
// Unsupported clauses.
if (temp) {
- logWarning("TEMPORARY clause is ignored.")
+ throw new ParseException(s"Unsupported operation: TEMPORARY clause.", ctx)
}
if (ctx.bucketSpec != null) {
// TODO add this - we need cluster columns in the CatalogTable for this to work.
- logWarning("CLUSTERED BY ... [ORDERED BY ...] INTO ... BUCKETS clause is ignored.")
+ throw new ParseException("Unsupported operation: " +
+ "CLUSTERED BY ... [ORDERED BY ...] INTO ... BUCKETS clause.", ctx)
}
if (ctx.skewSpec != null) {
- logWarning("SKEWED BY ... ON ... [STORED AS DIRECTORIES] clause is ignored.")
+ throw new ParseException("Operation not allowed: " +
+ "SKEWED BY ... ON ... [STORED AS DIRECTORIES] clause.", ctx)
}
// Create the schema.
@@ -230,7 +232,7 @@ class HiveSqlAstBuilder extends SparkSqlAstBuilder {
throw new ParseException(s"Operation not allowed: partitioned views", ctx)
} else {
if (ctx.STRING != null) {
- logWarning("COMMENT clause is ignored.")
+ throw new ParseException("Unsupported operation: COMMENT clause", ctx)
}
val identifiers = Option(ctx.identifierCommentList).toSeq.flatMap(_.identifierComment.asScala)
val schema = identifiers.map { ic =>
@@ -296,7 +298,8 @@ class HiveSqlAstBuilder extends SparkSqlAstBuilder {
recordReader: Token,
schemaLess: Boolean): HiveScriptIOSchema = {
if (recordWriter != null || recordReader != null) {
- logWarning("Used defined record reader/writer classes are currently ignored.")
+ throw new ParseException(
+ "Unsupported operation: Used defined record reader/writer classes.", ctx)
}
// Decode and input/output format.
@@ -370,7 +373,8 @@ class HiveSqlAstBuilder extends SparkSqlAstBuilder {
ctx: TableFileFormatContext): CatalogStorageFormat = withOrigin(ctx) {
import ctx._
if (inDriver != null || outDriver != null) {
- logWarning("INPUTDRIVER ... OUTPUTDRIVER ... clauses are ignored.")
+ throw new ParseException(
+ s"Operation not allowed: INPUTDRIVER ... OUTPUTDRIVER ... clauses", ctx)
}
EmptyStorageFormat.copy(
inputFormat = Option(string(inFmt)),
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveDDLCommandSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveDDLCommandSuite.scala
index c5f01da4fa..12a582c10a 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveDDLCommandSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveDDLCommandSuite.scala
@@ -180,6 +180,65 @@ class HiveDDLCommandSuite extends PlanTest {
assert(desc.properties == Map(("tbl_p1" -> "p11"), ("tbl_p2" -> "p22")))
}
+ test("unsupported operations") {
+ intercept[ParseException] {
+ parser.parsePlan(
+ """
+ |CREATE TEMPORARY TABLE ctas2
+ |ROW FORMAT SERDE "org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe"
+ |WITH SERDEPROPERTIES("serde_p1"="p1","serde_p2"="p2")
+ |STORED AS RCFile
+ |TBLPROPERTIES("tbl_p1"="p11", "tbl_p2"="p22")
+ |AS SELECT key, value FROM src ORDER BY key, value
+ """.stripMargin)
+ }
+ intercept[ParseException] {
+ parser.parsePlan(
+ """CREATE TABLE ctas2
+ |STORED AS
+ |INPUTFORMAT "org.apache.hadoop.mapred.TextInputFormat"
+ |OUTPUTFORMAT "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat"
+ |INPUTDRIVER "org.apache.hadoop.hive.howl.rcfile.RCFileInputDriver"
+ |OUTPUTDRIVER "org.apache.hadoop.hive.howl.rcfile.RCFileOutputDriver"
+ |AS SELECT key, value FROM src ORDER BY key, value
+ """.stripMargin)
+ }
+ intercept[ParseException] {
+ parser.parsePlan(
+ """
+ |CREATE TABLE user_info_bucketed(user_id BIGINT, firstname STRING, lastname STRING)
+ |CLUSTERED BY(user_id) INTO 256 BUCKETS
+ |AS SELECT key, value FROM src ORDER BY key, value
+ """.stripMargin)
+ }
+ intercept[ParseException] {
+ parser.parsePlan(
+ """
+ |CREATE TABLE user_info_bucketed(user_id BIGINT, firstname STRING, lastname STRING)
+ |SKEWED BY (key) ON (1,5,6)
+ |AS SELECT key, value FROM src ORDER BY key, value
+ """.stripMargin)
+ }
+ intercept[ParseException] {
+ parser.parsePlan(
+ """
+ |SELECT TRANSFORM (key, value) USING 'cat' AS (tKey, tValue)
+ |ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe'
+ |RECORDREADER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordReader'
+ |FROM testData
+ """.stripMargin)
+ }
+ intercept[ParseException] {
+ parser.parsePlan(
+ """
+ |CREATE OR REPLACE VIEW IF NOT EXISTS view1 (col1, col3)
+ |COMMENT 'blabla'
+ |TBLPROPERTIES('prop1Key'="prop1Val")
+ |AS SELECT * FROM tab1
+ """.stripMargin)
+ }
+ }
+
test("Invalid interval term should throw AnalysisException") {
def assertError(sql: String, errorMessage: String): Unit = {
val e = intercept[AnalysisException] {
@@ -277,7 +336,6 @@ class HiveDDLCommandSuite extends PlanTest {
"""
|CREATE OR REPLACE VIEW IF NOT EXISTS view1
|(col1, col3)
- |COMMENT 'I cannot spell'
|TBLPROPERTIES('prop1Key'="prop1Val")
|AS SELECT * FROM tab1
""".stripMargin
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala
index 4c3f450522..8de2bdcfc0 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala
@@ -26,7 +26,7 @@ class HiveCommandSuite extends QueryTest with SQLTestUtils with TestHiveSingleto
super.beforeAll()
sql(
"""
- |CREATE EXTERNAL TABLE parquet_tab1 (c1 INT, c2 STRING)
+ |CREATE TABLE parquet_tab1 (c1 INT, c2 STRING)
|USING org.apache.spark.sql.parquet.DefaultSource
""".stripMargin)
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
index d7ec85c15d..f3796a9966 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
@@ -1491,7 +1491,6 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
sql(
"""CREATE VIEW IF NOT EXISTS
|default.testView (c1 COMMENT 'blabla', c2 COMMENT 'blabla')
- |COMMENT 'blabla'
|TBLPROPERTIES ('a' = 'b')
|AS SELECT * FROM jt""".stripMargin)
checkAnswer(sql("SELECT c1, c2 FROM testView ORDER BY c1"), (1 to 9).map(i => Row(i, i)))