aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorgatorsmile <gatorsmile@gmail.com>2016-11-09 00:11:48 -0800
committergatorsmile <gatorsmile@gmail.com>2016-11-09 00:11:48 -0800
commite256392a128c8fffa8abb86ab99224ae09b0e1ff (patch)
tree835e4344b65e5a4288768e4f31e2d0d53f9b70bc
parentb9192bb3ffc319ebee7dbd15c24656795e454749 (diff)
downloadspark-e256392a128c8fffa8abb86ab99224ae09b0e1ff.tar.gz
spark-e256392a128c8fffa8abb86ab99224ae09b0e1ff.tar.bz2
spark-e256392a128c8fffa8abb86ab99224ae09b0e1ff.zip
[SPARK-17659][SQL] Partitioned View is Not Supported By SHOW CREATE TABLE
### What changes were proposed in this pull request? `Partitioned View` is not supported by SPARK SQL. For Hive partitioned view, SHOW CREATE TABLE is unable to generate the right DDL. Thus, SHOW CREATE TABLE should not support it like the other Hive-only features. This PR is to issue an exception when detecting the view is a partitioned view. ### How was this patch tested? Added a test case Author: gatorsmile <gatorsmile@gmail.com> Closes #15233 from gatorsmile/partitionedView.
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala2
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala4
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/ShowCreateTableSuite.scala28
3 files changed, 33 insertions, 1 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala
index 3a856fa0f5..e49a1f5acd 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala
@@ -780,7 +780,7 @@ case class ShowCreateTableCommand(table: TableIdentifier) extends RunnableComman
private def showCreateHiveTable(metadata: CatalogTable): String = {
def reportUnsupportedError(features: Seq[String]): Unit = {
throw new AnalysisException(
- s"Failed to execute SHOW CREATE TABLE against table ${metadata.identifier.quotedString}, " +
+ s"Failed to execute SHOW CREATE TABLE against table/view ${metadata.identifier}, " +
"which is created by Hive and uses the following unsupported feature(s)\n" +
features.map(" - " + _).mkString("\n")
)
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala
index 84873bbbb8..2bf9a26b0b 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala
@@ -377,6 +377,10 @@ private[hive] class HiveClientImpl(
unsupportedFeatures += "bucketing"
}
+ if (h.getTableType == HiveTableType.VIRTUAL_VIEW && partCols.nonEmpty) {
+ unsupportedFeatures += "partitioned view"
+ }
+
val properties = Option(h.getParameters).map(_.asScala.toMap).orNull
CatalogTable(
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/ShowCreateTableSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/ShowCreateTableSuite.scala
index e925921165..68df809434 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/ShowCreateTableSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/ShowCreateTableSuite.scala
@@ -265,6 +265,34 @@ class ShowCreateTableSuite extends QueryTest with SQLTestUtils with TestHiveSing
}
}
+ test("hive partitioned view is not supported") {
+ withTable("t1") {
+ withView("v1") {
+ sql(
+ s"""
+ |CREATE TABLE t1 (c1 INT, c2 STRING)
+ |PARTITIONED BY (
+ | p1 BIGINT COMMENT 'bla',
+ | p2 STRING )
+ """.stripMargin)
+
+ createRawHiveTable(
+ s"""
+ |CREATE VIEW v1
+ |PARTITIONED ON (p1, p2)
+ |AS SELECT * from t1
+ """.stripMargin
+ )
+
+ val cause = intercept[AnalysisException] {
+ sql("SHOW CREATE TABLE v1")
+ }
+
+ assert(cause.getMessage.contains(" - partitioned view"))
+ }
+ }
+ }
+
private def createRawHiveTable(ddl: String): Unit = {
hiveContext.sharedState.externalCatalog.asInstanceOf[HiveExternalCatalog].client.runSqlHive(ddl)
}