aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorgatorsmile <gatorsmile@gmail.com>2016-07-07 00:07:25 -0700
committerReynold Xin <rxin@databricks.com>2016-07-07 00:07:25 -0700
commitab05db0b48f395543cd7d91e2ad9dd760516868b (patch)
tree44649953351e49186afc362d975a64269313cdea
parentce3ea96980e4b31ee0e26d3054c9be94be6f2003 (diff)
downloadspark-ab05db0b48f395543cd7d91e2ad9dd760516868b.tar.gz
spark-ab05db0b48f395543cd7d91e2ad9dd760516868b.tar.bz2
spark-ab05db0b48f395543cd7d91e2ad9dd760516868b.zip
[SPARK-16368][SQL] Fix Strange Errors When Creating View With Unmatched Column Num
#### What changes were proposed in this pull request? When creating a view, a common user error is the number of columns produced by the `SELECT` clause does not match the number of column names specified by `CREATE VIEW`. For example, given Table `t1` only has 3 columns ```SQL create view v1(col2, col4, col3, col5) as select * from t1 ``` Currently, Spark SQL reports the following error: ``` requirement failed java.lang.IllegalArgumentException: requirement failed at scala.Predef$.require(Predef.scala:212) at org.apache.spark.sql.execution.command.CreateViewCommand.run(views.scala:90) ``` This error message is very confusing. This PR is to detect the error and issue a meaningful error message. #### How was this patch tested? Added test cases Author: gatorsmile <gatorsmile@gmail.com> Closes #14047 from gatorsmile/viewMismatchedColumns.
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/command/views.scala6
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala23
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala23
3 files changed, 51 insertions, 1 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/views.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/views.scala
index 007fa46943..16b333a402 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/views.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/views.scala
@@ -88,7 +88,11 @@ case class CreateViewCommand(
qe.assertAnalyzed()
val analyzedPlan = qe.analyzed
- require(tableDesc.schema == Nil || tableDesc.schema.length == analyzedPlan.output.length)
+ if (tableDesc.schema != Nil && tableDesc.schema.length != analyzedPlan.output.length) {
+ throw new AnalysisException(s"The number of columns produced by the SELECT clause " +
+ s"(num: `${analyzedPlan.output.length}`) does not match the number of column names " +
+ s"specified by CREATE VIEW (num: `${tableDesc.schema.length}`).")
+ }
val sessionState = sparkSession.sessionState
if (isTemporary) {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
index 0ee8d179d7..7d1f1d1e62 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
@@ -1314,6 +1314,29 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
}
}
+ test("create temporary view with mismatched schema") {
+ withTable("tab1") {
+ spark.range(10).write.saveAsTable("tab1")
+ withView("view1") {
+ val e = intercept[AnalysisException] {
+ sql("CREATE TEMPORARY VIEW view1 (col1, col3) AS SELECT * FROM tab1")
+ }.getMessage
+ assert(e.contains("the SELECT clause (num: `1`) does not match")
+ && e.contains("CREATE VIEW (num: `2`)"))
+ }
+ }
+ }
+
+ test("create temporary view with specified schema") {
+ withView("view1") {
+ sql("CREATE TEMPORARY VIEW view1 (col1, col2) AS SELECT 1, 2")
+ checkAnswer(
+ sql("SELECT * FROM view1"),
+ Row(1, 2) :: Nil
+ )
+ }
+ }
+
test("truncate table - external table, temporary table, view (not allowed)") {
import testImplicits._
val path = Utils.createTempDir().getAbsolutePath
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
index 9d3c4cd3d5..93e50f4ee9 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
@@ -391,6 +391,29 @@ class HiveDDLSuite
}
}
+ test("create view with mismatched schema") {
+ withTable("tab1") {
+ spark.range(10).write.saveAsTable("tab1")
+ withView("view1") {
+ val e = intercept[AnalysisException] {
+ sql("CREATE VIEW view1 (col1, col3) AS SELECT * FROM tab1")
+ }.getMessage
+ assert(e.contains("the SELECT clause (num: `1`) does not match")
+ && e.contains("CREATE VIEW (num: `2`)"))
+ }
+ }
+ }
+
+ test("create view with specified schema") {
+ withView("view1") {
+ sql("CREATE VIEW view1 (col1, col2) AS SELECT 1, 2")
+ checkAnswer(
+ sql("SELECT * FROM view1"),
+ Row(1, 2) :: Nil
+ )
+ }
+ }
+
test("desc table for Hive table") {
withTable("tab1") {
val tabName = "tab1"