aboutsummaryrefslogtreecommitdiff
path: root/sql/hive/src
diff options
context:
space:
mode:
authorgatorsmile <gatorsmile@gmail.com>2016-09-20 20:11:48 +0800
committerWenchen Fan <wenchen@databricks.com>2016-09-20 20:11:48 +0800
commitd5ec5dbb0dc0358b0394626c80781e422f9af581 (patch)
tree73d9bb2105d441cffae360fd8450a5ae180d15ec /sql/hive/src
parent4a426ff8aea4faa31a3016a453dec5b7954578dd (diff)
downloadspark-d5ec5dbb0dc0358b0394626c80781e422f9af581.tar.gz
spark-d5ec5dbb0dc0358b0394626c80781e422f9af581.tar.bz2
spark-d5ec5dbb0dc0358b0394626c80781e422f9af581.zip
[SPARK-17502][SQL] Fix Multiple Bugs in DDL Statements on Temporary Views
### What changes were proposed in this pull request? - When the permanent tables/views do not exist but the temporary view exists, the expected error should be `NoSuchTableException` for partition-related ALTER TABLE commands. However, it always reports a confusing error message. For example, ``` Partition spec is invalid. The spec (a, b) must match the partition spec () defined in table '`testview`'; ``` - When the permanent tables/views do not exist but the temporary view exists, the expected error should be `NoSuchTableException` for `ALTER TABLE ... UNSET TBLPROPERTIES`. However, it reports a missing table property. For example, ``` Attempted to unset non-existent property 'p' in table '`testView`'; ``` - When `ANALYZE TABLE` is called on a view or a temporary view, we should issue an error message. However, it reports a strange error: ``` ANALYZE TABLE is not supported for Project ``` - When inserting into a temporary view that is generated from `Range`, we will get the following error message: ``` assertion failed: No plan for 'InsertIntoTable Range (0, 10, step=1, splits=Some(1)), false, false +- Project [1 AS 1#20] +- OneRowRelation$ ``` This PR is to fix the above four issues. ### How was this patch tested? Added multiple test cases Author: gatorsmile <gatorsmile@gmail.com> Closes #15054 from gatorsmile/tempViewDDL.
Diffstat (limited to 'sql/hive/src')
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala17
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala6
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLViewSuite.scala63
3 files changed, 54 insertions, 32 deletions
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala
index df33731df2..b2103b3bfc 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala
@@ -406,25 +406,24 @@ class HiveCommandSuite extends QueryTest with SQLTestUtils with TestHiveSingleto
|USING org.apache.spark.sql.parquet.DefaultSource
""".stripMargin)
// An empty sequence of row is returned for session temporary table.
- val message1 = intercept[AnalysisException] {
+ intercept[NoSuchTableException] {
sql("SHOW PARTITIONS parquet_temp")
- }.getMessage
- assert(message1.contains("is not allowed on a temporary table"))
+ }
- val message2 = intercept[AnalysisException] {
+ val message1 = intercept[AnalysisException] {
sql("SHOW PARTITIONS parquet_tab3")
}.getMessage
- assert(message2.contains("not allowed on a table that is not partitioned"))
+ assert(message1.contains("not allowed on a table that is not partitioned"))
- val message3 = intercept[AnalysisException] {
+ val message2 = intercept[AnalysisException] {
sql("SHOW PARTITIONS parquet_tab4 PARTITION(abcd=2015, xyz=1)")
}.getMessage
- assert(message3.contains("Non-partitioning column(s) [abcd, xyz] are specified"))
+ assert(message2.contains("Non-partitioning column(s) [abcd, xyz] are specified"))
- val message4 = intercept[AnalysisException] {
+ val message3 = intercept[AnalysisException] {
sql("SHOW PARTITIONS parquet_view1")
}.getMessage
- assert(message4.contains("is not allowed on a view"))
+ assert(message3.contains("is not allowed on a view"))
}
}
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
index aa35a335fa..38482f66a3 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
@@ -300,7 +300,7 @@ class HiveDDLSuite
sql(s"ALTER VIEW $viewName UNSET TBLPROPERTIES ('p')")
}.getMessage
assert(message.contains(
- "Attempted to unset non-existent property 'p' in table '`view1`'"))
+ "Attempted to unset non-existent property 'p' in table '`default`.`view1`'"))
}
}
}
@@ -678,8 +678,8 @@ class HiveDDLSuite
.createTempView(sourceViewName)
sql(s"CREATE TABLE $targetTabName LIKE $sourceViewName")
- val sourceTable = spark.sessionState.catalog.getTableMetadata(
- TableIdentifier(sourceViewName, None))
+ val sourceTable =
+ spark.sessionState.catalog.getTempViewOrPermanentTableMetadata(sourceViewName)
val targetTable = spark.sessionState.catalog.getTableMetadata(
TableIdentifier(targetTabName, Some("default")))
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLViewSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLViewSuite.scala
index bc999d4724..a215c70da0 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLViewSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLViewSuite.scala
@@ -82,25 +82,53 @@ class SQLViewSuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
}
}
- test("error handling: insert/load/truncate table commands against a temp view") {
+ test("Issue exceptions for ALTER VIEW on the temporary view") {
val viewName = "testView"
withTempView(viewName) {
- sql(s"CREATE TEMPORARY VIEW $viewName AS SELECT id FROM jt")
- var e = intercept[AnalysisException] {
+ spark.range(10).createTempView(viewName)
+ assertNoSuchTable(s"ALTER VIEW $viewName SET TBLPROPERTIES ('p' = 'an')")
+ assertNoSuchTable(s"ALTER VIEW $viewName UNSET TBLPROPERTIES ('p')")
+ }
+ }
+
+ test("Issue exceptions for ALTER TABLE on the temporary view") {
+ val viewName = "testView"
+ withTempView(viewName) {
+ spark.range(10).createTempView(viewName)
+ assertNoSuchTable(s"ALTER TABLE $viewName SET SERDE 'whatever'")
+ assertNoSuchTable(s"ALTER TABLE $viewName PARTITION (a=1, b=2) SET SERDE 'whatever'")
+ assertNoSuchTable(s"ALTER TABLE $viewName SET SERDEPROPERTIES ('p' = 'an')")
+ assertNoSuchTable(s"ALTER TABLE $viewName SET LOCATION '/path/to/your/lovely/heart'")
+ assertNoSuchTable(s"ALTER TABLE $viewName PARTITION (a='4') SET LOCATION '/path/to/home'")
+ assertNoSuchTable(s"ALTER TABLE $viewName ADD IF NOT EXISTS PARTITION (a='4', b='8')")
+ assertNoSuchTable(s"ALTER TABLE $viewName DROP PARTITION (a='4', b='8')")
+ assertNoSuchTable(s"ALTER TABLE $viewName PARTITION (a='4') RENAME TO PARTITION (a='5')")
+ assertNoSuchTable(s"ALTER TABLE $viewName RECOVER PARTITIONS")
+ }
+ }
+
+ test("Issue exceptions for other table DDL on the temporary view") {
+ val viewName = "testView"
+ withTempView(viewName) {
+ spark.range(10).createTempView(viewName)
+
+ val e = intercept[AnalysisException] {
sql(s"INSERT INTO TABLE $viewName SELECT 1")
}.getMessage
assert(e.contains("Inserting into an RDD-based table is not allowed"))
val testData = hiveContext.getHiveFile("data/files/employee.dat").getCanonicalPath
- e = intercept[AnalysisException] {
- sql(s"""LOAD DATA LOCAL INPATH "$testData" INTO TABLE $viewName""")
- }.getMessage
- assert(e.contains(s"Target table in LOAD DATA cannot be temporary: `$viewName`"))
+ assertNoSuchTable(s"""LOAD DATA LOCAL INPATH "$testData" INTO TABLE $viewName""")
+ assertNoSuchTable(s"TRUNCATE TABLE $viewName")
+ assertNoSuchTable(s"SHOW CREATE TABLE $viewName")
+ assertNoSuchTable(s"SHOW PARTITIONS $viewName")
+ assertNoSuchTable(s"ANALYZE TABLE $viewName COMPUTE STATISTICS")
+ }
+ }
- e = intercept[AnalysisException] {
- sql(s"TRUNCATE TABLE $viewName")
- }.getMessage
- assert(e.contains(s"Operation not allowed: TRUNCATE TABLE on temporary tables: `$viewName`"))
+ private def assertNoSuchTable(query: String): Unit = {
+ intercept[NoSuchTableException] {
+ sql(query)
}
}
@@ -117,12 +145,12 @@ class SQLViewSuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
e = intercept[AnalysisException] {
sql(s"""LOAD DATA LOCAL INPATH "$testData" INTO TABLE $viewName""")
}.getMessage
- assert(e.contains(s"Target table in LOAD DATA cannot be a view: `$viewName`"))
+ assert(e.contains(s"Target table in LOAD DATA cannot be a view: `default`.`testview`"))
e = intercept[AnalysisException] {
sql(s"TRUNCATE TABLE $viewName")
}.getMessage
- assert(e.contains(s"Operation not allowed: TRUNCATE TABLE on views: `$viewName`"))
+ assert(e.contains(s"Operation not allowed: TRUNCATE TABLE on views: `default`.`testview`"))
}
}
@@ -277,13 +305,8 @@ class SQLViewSuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
}
test("should not allow ALTER VIEW AS when the view does not exist") {
- intercept[NoSuchTableException](
- sql("ALTER VIEW testView AS SELECT 1, 2")
- )
-
- intercept[NoSuchTableException](
- sql("ALTER VIEW default.testView AS SELECT 1, 2")
- )
+ assertNoSuchTable("ALTER VIEW testView AS SELECT 1, 2")
+ assertNoSuchTable("ALTER VIEW default.testView AS SELECT 1, 2")
}
test("ALTER VIEW AS should try to alter temp view first if view name has no database part") {