diff options
author | Wenchen Fan <wenchen@databricks.com> | 2016-08-23 23:46:09 -0700 |
---|---|---|
committer | Reynold Xin <rxin@databricks.com> | 2016-08-23 23:46:09 -0700 |
commit | 52fa45d62a5a0bc832442f38f9e634c5d8e29e08 (patch) | |
tree | 7917979e885c84c835507a63a5324d9eafd2bec0 /sql/core | |
parent | b9994ad05628077016331e6b411fbc09017b1e63 (diff) | |
download | spark-52fa45d62a5a0bc832442f38f9e634c5d8e29e08.tar.gz spark-52fa45d62a5a0bc832442f38f9e634c5d8e29e08.tar.bz2 spark-52fa45d62a5a0bc832442f38f9e634c5d8e29e08.zip |
[SPARK-17186][SQL] remove catalog table type INDEX
## What changes were proposed in this pull request?
Actually Spark SQL doesn't support index, the catalog table type `INDEX` is from Hive. However, most operations in Spark SQL can't handle index table, e.g. create table, alter table, etc.
Logically index table should be invisible to end users, and Hive also generates special table name for index table to avoid users accessing it directly. Hive has special SQL syntax to create/show/drop index tables.
At Spark SQL side, although we can describe index table directly, but the result is unreadable, we should use the dedicated SQL syntax to do it(e.g. `SHOW INDEX ON tbl`). Spark SQL can also read index table directly, but the result is always empty.(Can hive read index table directly?)
This PR remove the table type `INDEX`, to make it clear that Spark SQL doesn't support index currently.
## How was this patch tested?
existing tests.
Author: Wenchen Fan <wenchen@databricks.com>
Closes #14752 from cloud-fan/minor2.
Diffstat (limited to 'sql/core')
-rw-r--r-- | sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala | 8 |
1 files changed, 3 insertions, 5 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala index 21544a37d9..b4a15b8b28 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala @@ -620,12 +620,11 @@ case class ShowPartitionsCommand( * Validate and throws an [[AnalysisException]] exception under the following conditions: * 1. If the table is not partitioned. * 2. If it is a datasource table. - * 3. If it is a view or index table. + * 3. If it is a view. */ - if (tab.tableType == VIEW || - tab.tableType == INDEX) { + if (tab.tableType == VIEW) { throw new AnalysisException( - s"SHOW PARTITIONS is not allowed on a view or index table: ${tab.qualifiedName}") + s"SHOW PARTITIONS is not allowed on a view: ${tab.qualifiedName}") } if (tab.partitionColumnNames.isEmpty) { @@ -708,7 +707,6 @@ case class ShowCreateTableCommand(table: TableIdentifier) extends RunnableComman case EXTERNAL => " EXTERNAL TABLE" case VIEW => " VIEW" case MANAGED => " TABLE" - case INDEX => reportUnsupportedError(Seq("index table")) } builder ++= s"CREATE$tableTypeString ${table.quotedString}" |