aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorjiangxingbo <jiangxb1987@gmail.com>2017-03-06 12:35:03 -0800
committerXiao Li <gatorsmile@gmail.com>2017-03-06 12:35:03 -0800
commit9991c2dad6d09d77d5a61e4c4dcd1770e5d984d4 (patch)
tree364cd2f6f6ce57106fc3433057b5944cc559eaf5
parent12bf832407eaaed90d7c599522457cb36b303b6c (diff)
downloadspark-9991c2dad6d09d77d5a61e4c4dcd1770e5d984d4.tar.gz
spark-9991c2dad6d09d77d5a61e4c4dcd1770e5d984d4.tar.bz2
spark-9991c2dad6d09d77d5a61e4c4dcd1770e5d984d4.zip
[SPARK-19211][SQL] Explicitly prevent Insert into View or Create View As Insert
## What changes were proposed in this pull request? Currently we don't explicitly forbid the following behaviors: 1. The statement CREATE VIEW AS INSERT INTO throws the following exception: ``` scala> spark.sql("CREATE VIEW testView AS INSERT INTO tab VALUES (1, \"a\")") org.apache.spark.sql.AnalysisException: org.apache.hadoop.hive.ql.metadata.HiveException: org.apache.hadoop.hive.ql.metadata.HiveException: at least one column must be specified for the table; scala> spark.sql("CREATE VIEW testView(a, b) AS INSERT INTO tab VALUES (1, \"a\")") org.apache.spark.sql.AnalysisException: The number of columns produced by the SELECT clause (num: `0`) does not match the number of column names specified by CREATE VIEW (num: `2`).; ``` 2. The statement INSERT INTO view VALUES throws the following exception from checkAnalysis: ``` scala> spark.sql("INSERT INTO testView VALUES (1, \"a\")") org.apache.spark.sql.AnalysisException: Inserting into an RDD-based table is not allowed.;; 'InsertIntoTable View (`default`.`testView`, [a#16,b#17]), false, false +- LocalRelation [col1#14, col2#15] ``` After this PR, the behavior changes to: ``` scala> spark.sql("CREATE VIEW testView AS INSERT INTO tab VALUES (1, \"a\")") org.apache.spark.sql.catalyst.parser.ParseException: Operation not allowed: CREATE VIEW ... AS INSERT INTO; scala> spark.sql("CREATE VIEW testView(a, b) AS INSERT INTO tab VALUES (1, \"a\")") org.apache.spark.sql.catalyst.parser.ParseException: Operation not allowed: CREATE VIEW ... AS INSERT INTO; scala> spark.sql("INSERT INTO testView VALUES (1, \"a\")") org.apache.spark.sql.AnalysisException: `default`.`testView` is a view, inserting into a view is not allowed; ``` ## How was this patch tested? Add a new test case in `SparkSqlParserSuite`; Update the corresponding test case in `SQLViewSuite`. Author: jiangxingbo <jiangxb1987@gmail.com> Closes #17125 from jiangxb1987/insert-with-view.
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala6
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala9
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala2
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/SparkSqlParserSuite.scala11
4 files changed, 26 insertions, 2 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
index 2f8489de6b..ffa5aed30e 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
@@ -606,7 +606,11 @@ class Analyzer(
def apply(plan: LogicalPlan): LogicalPlan = plan resolveOperators {
case i @ InsertIntoTable(u: UnresolvedRelation, parts, child, _, _) if child.resolved =>
- i.copy(table = EliminateSubqueryAliases(lookupTableFromCatalog(u)))
+ lookupTableFromCatalog(u).canonicalized match {
+ case v: View =>
+ u.failAnalysis(s"Inserting into a view is not allowed. View: ${v.desc.identifier}.")
+ case other => i.copy(table = other)
+ }
case u: UnresolvedRelation => resolveRelation(u)
}
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
index c106163741..00d1d6d270 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
@@ -1331,6 +1331,15 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
if (ctx.identifierList != null) {
operationNotAllowed("CREATE VIEW ... PARTITIONED ON", ctx)
} else {
+ // CREATE VIEW ... AS INSERT INTO is not allowed.
+ ctx.query.queryNoWith match {
+ case s: SingleInsertQueryContext if s.insertInto != null =>
+ operationNotAllowed("CREATE VIEW ... AS INSERT INTO", ctx)
+ case _: MultiInsertQueryContext =>
+ operationNotAllowed("CREATE VIEW ... AS FROM ... [INSERT INTO ...]+", ctx)
+ case _ => // OK
+ }
+
val userSpecifiedColumns = Option(ctx.identifierCommentList).toSeq.flatMap { icl =>
icl.identifierComment.asScala.map { ic =>
ic.identifier.getText -> Option(ic.STRING).map(string)
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala
index 2d95cb6d64..0e5a1dc6ab 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala
@@ -172,7 +172,7 @@ abstract class SQLViewSuite extends QueryTest with SQLTestUtils {
var e = intercept[AnalysisException] {
sql(s"INSERT INTO TABLE $viewName SELECT 1")
}.getMessage
- assert(e.contains("Inserting into an RDD-based table is not allowed"))
+ assert(e.contains("Inserting into a view is not allowed. View: `default`.`testview`"))
val dataFilePath =
Thread.currentThread().getContextClassLoader.getResource("data/files/employee.dat")
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/SparkSqlParserSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/SparkSqlParserSuite.scala
index bb6c486e88..d44a6e41cb 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/SparkSqlParserSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/SparkSqlParserSuite.scala
@@ -210,6 +210,17 @@ class SparkSqlParserSuite extends PlanTest {
"no viable alternative at input")
}
+ test("create view as insert into table") {
+ // Single insert query
+ intercept("CREATE VIEW testView AS INSERT INTO jt VALUES(1, 1)",
+ "Operation not allowed: CREATE VIEW ... AS INSERT INTO")
+
+ // Multi insert query
+ intercept("CREATE VIEW testView AS FROM jt INSERT INTO tbl1 SELECT * WHERE jt.id < 5 " +
+ "INSERT INTO tbl2 SELECT * WHERE jt.id > 4",
+ "Operation not allowed: CREATE VIEW ... AS FROM ... [INSERT INTO ...]+")
+ }
+
test("SPARK-17328 Fix NPE with EXPLAIN DESCRIBE TABLE") {
assertEqual("describe table t",
DescribeTableCommand(