aboutsummaryrefslogtreecommitdiff
path: root/sql/core
diff options
context:
space:
mode:
authorgatorsmile <gatorsmile@gmail.com>2016-04-05 11:19:46 +0200
committerHerman van Hovell <hvanhovell@questtec.nl>2016-04-05 11:19:46 +0200
commit78071736799b6c86b5c01b27395f4ab87075342b (patch)
tree08a4b18ebd0563c84cc8540ac81f511838e3810d /sql/core
parent2715bc68bd1661d207b1af5f44ae8d02aec9d4ec (diff)
downloadspark-78071736799b6c86b5c01b27395f4ab87075342b.tar.gz
spark-78071736799b6c86b5c01b27395f4ab87075342b.tar.bz2
spark-78071736799b6c86b5c01b27395f4ab87075342b.zip
[SPARK-14349][SQL] Issue Error Messages for Unsupported Operators/DML/DDL in SQL Context.
#### What changes were proposed in this pull request? Currently, the weird error messages are issued if we use Hive Context-only operations in SQL Context. For example, - When calling `Drop Table` in SQL Context, we got the following message: ``` Expected exception org.apache.spark.sql.catalyst.parser.ParseException to be thrown, but java.lang.ClassCastException was thrown. ``` - When calling `Script Transform` in SQL Context, we got the message: ``` assertion failed: No plan for ScriptTransformation [key#9,value#10], cat, [tKey#155,tValue#156], null +- LogicalRDD [key#9,value#10], MapPartitionsRDD[3] at beforeAll at BeforeAndAfterAll.scala:187 ``` Updates: Based on the investigation from hvanhovell , the root cause is `visitChildren`, which is the default implementation. It always returns the result of the last defined context child. After merging the code changes from hvanhovell , it works! Thank you hvanhovell ! #### How was this patch tested? A few test cases are added. Not sure if the same issue exist for the other operators/DDL/DML. hvanhovell Author: gatorsmile <gatorsmile@gmail.com> Author: xiaoli <lixiao1983@gmail.com> Author: Herman van Hovell <hvanhovell@questtec.nl> Author: Xiao Li <xiaoli@Xiaos-MacBook-Pro.local> Closes #12134 from gatorsmile/hiveParserCommand.
Diffstat (limited to 'sql/core')
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala23
1 files changed, 23 insertions, 0 deletions
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala
index 8b2a5979e2..47e295a7e7 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala
@@ -18,6 +18,7 @@
package org.apache.spark.sql.execution.command
import org.apache.spark.sql.catalyst.TableIdentifier
+import org.apache.spark.sql.catalyst.parser.ParseException
import org.apache.spark.sql.catalyst.plans.PlanTest
import org.apache.spark.sql.execution.SparkSqlParser
import org.apache.spark.sql.execution.datasources.BucketSpec
@@ -781,4 +782,26 @@ class DDLCommandSuite extends PlanTest {
comparePlans(parsed1, expected1)
comparePlans(parsed2, expected2)
}
+
+ test("commands only available in HiveContext") {
+ intercept[ParseException] {
+ parser.parsePlan("DROP TABLE D1.T1")
+ }
+ intercept[ParseException] {
+ parser.parsePlan("CREATE VIEW testView AS SELECT id FROM tab")
+ }
+ intercept[ParseException] {
+ parser.parsePlan("ALTER VIEW testView AS SELECT id FROM tab")
+ }
+ intercept[ParseException] {
+ parser.parsePlan(
+ """
+ |CREATE EXTERNAL TABLE parquet_tab2(c1 INT, c2 STRING)
+ |TBLPROPERTIES('prop1Key '= "prop1Val", ' `prop2Key` '= "prop2Val")
+ """.stripMargin)
+ }
+ intercept[ParseException] {
+ parser.parsePlan("SELECT TRANSFORM (key, value) USING 'cat' AS (tKey, tValue) FROM testData")
+ }
+ }
}