aboutsummaryrefslogtreecommitdiff
path: root/sql/core/src
diff options
context:
space:
mode:
authorjiangxingbo <jiangxb1987@gmail.com>2016-11-30 03:59:25 -0800
committerHerman van Hovell <hvanhovell@databricks.com>2016-11-30 03:59:25 -0800
commitc24076dcf867f8d7bb328055ca817bc09ad0c1d1 (patch)
tree844d600e0ed88f9189cd7d83a42c946d010d5e81 /sql/core/src
parent2eb093decb5e87a1ea71bbaa28092876a8c84996 (diff)
downloadspark-c24076dcf867f8d7bb328055ca817bc09ad0c1d1.tar.gz
spark-c24076dcf867f8d7bb328055ca817bc09ad0c1d1.tar.bz2
spark-c24076dcf867f8d7bb328055ca817bc09ad0c1d1.zip
[SPARK-17932][SQL] Support SHOW TABLES EXTENDED LIKE 'identifier_with_wildcards' statement
## What changes were proposed in this pull request? Currently we haven't implemented `SHOW TABLE EXTENDED` in Spark 2.0. This PR is to implement the statement. Goals: 1. Support `SHOW TABLES EXTENDED LIKE 'identifier_with_wildcards'`; 2. Explicitly output an unsupported error message for `SHOW TABLES [EXTENDED] ... PARTITION` statement; 3. Improve test cases for `SHOW TABLES` statement. ## How was this patch tested? 1. Add new test cases in file `show-tables.sql`. 2. Modify tests for `SHOW TABLES` in `DDLSuite`. Author: jiangxingbo <jiangxb1987@gmail.com> Closes #15958 from jiangxb1987/show-table-extended.
Diffstat (limited to 'sql/core/src')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala14
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala24
-rw-r--r--sql/core/src/test/resources/sql-tests/inputs/show-tables.sql31
-rw-r--r--sql/core/src/test/resources/sql-tests/results/show-tables.sql.out187
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala22
5 files changed, 255 insertions, 23 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
index 5f89a229d6..ffd6b0146b 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
@@ -126,13 +126,23 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
* Create a [[ShowTablesCommand]] logical plan.
* Example SQL :
* {{{
- * SHOW TABLES [(IN|FROM) database_name] [[LIKE] 'identifier_with_wildcards'];
+ * SHOW TABLES [EXTENDED] [(IN|FROM) database_name] [[LIKE] 'identifier_with_wildcards']
+ * [PARTITION(partition_spec)];
* }}}
*/
override def visitShowTables(ctx: ShowTablesContext): LogicalPlan = withOrigin(ctx) {
+ if (ctx.partitionSpec != null) {
+ operationNotAllowed("SHOW TABLES [EXTENDED] ... PARTITION", ctx)
+ }
+ if (ctx.EXTENDED != null && ctx.pattern == null) {
+ throw new AnalysisException(
+ s"SHOW TABLES EXTENDED must have identifier_with_wildcards specified.")
+ }
+
ShowTablesCommand(
Option(ctx.db).map(_.getText),
- Option(ctx.pattern).map(string))
+ Option(ctx.pattern).map(string),
+ ctx.EXTENDED != null)
}
/**
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala
index 57d66f1f14..dc0720d78d 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala
@@ -590,18 +590,25 @@ case class DescribeTableCommand(
* If a databaseName is not given, the current database will be used.
* The syntax of using this command in SQL is:
* {{{
- * SHOW TABLES [(IN|FROM) database_name] [[LIKE] 'identifier_with_wildcards'];
+ * SHOW TABLES [EXTENDED] [(IN|FROM) database_name] [[LIKE] 'identifier_with_wildcards'];
* }}}
*/
case class ShowTablesCommand(
databaseName: Option[String],
- tableIdentifierPattern: Option[String]) extends RunnableCommand {
+ tableIdentifierPattern: Option[String],
+ isExtended: Boolean = false) extends RunnableCommand {
- // The result of SHOW TABLES has three columns: database, tableName and isTemporary.
+ // The result of SHOW TABLES has three basic columns: database, tableName and isTemporary.
+ // If `isExtended` is true, append column `information` to the output columns.
override val output: Seq[Attribute] = {
+ val tableExtendedInfo = if (isExtended) {
+ AttributeReference("information", StringType, nullable = false)() :: Nil
+ } else {
+ Nil
+ }
AttributeReference("database", StringType, nullable = false)() ::
AttributeReference("tableName", StringType, nullable = false)() ::
- AttributeReference("isTemporary", BooleanType, nullable = false)() :: Nil
+ AttributeReference("isTemporary", BooleanType, nullable = false)() :: tableExtendedInfo
}
override def run(sparkSession: SparkSession): Seq[Row] = {
@@ -612,8 +619,15 @@ case class ShowTablesCommand(
val tables =
tableIdentifierPattern.map(catalog.listTables(db, _)).getOrElse(catalog.listTables(db))
tables.map { tableIdent =>
+ val database = tableIdent.database.getOrElse("")
+ val tableName = tableIdent.table
val isTemp = catalog.isTemporaryTable(tableIdent)
- Row(tableIdent.database.getOrElse(""), tableIdent.table, isTemp)
+ if (isExtended) {
+ val information = catalog.getTempViewOrPermanentTableMetadata(tableIdent).toString
+ Row(database, tableName, isTemp, s"${information}\n")
+ } else {
+ Row(database, tableName, isTemp)
+ }
}
}
}
diff --git a/sql/core/src/test/resources/sql-tests/inputs/show-tables.sql b/sql/core/src/test/resources/sql-tests/inputs/show-tables.sql
new file mode 100644
index 0000000000..a16c39819a
--- /dev/null
+++ b/sql/core/src/test/resources/sql-tests/inputs/show-tables.sql
@@ -0,0 +1,31 @@
+-- Test data.
+CREATE DATABASE showdb;
+USE showdb;
+CREATE TABLE show_t1(a String, b Int) PARTITIONED BY (c String, d String);
+ALTER TABLE show_t1 ADD PARTITION (c='Us', d=1);
+CREATE TABLE show_t2(b String, d Int);
+CREATE TEMPORARY VIEW show_t3(e int) USING parquet;
+CREATE GLOBAL TEMP VIEW show_t4 AS SELECT 1 as col1;
+
+-- SHOW TABLES
+SHOW TABLES;
+SHOW TABLES IN showdb;
+
+-- SHOW TABLES WITH wildcard match
+SHOW TABLES 'show_t*';
+SHOW TABLES LIKE 'show_t1*|show_t2*';
+SHOW TABLES IN showdb 'show_t*';
+
+-- SHOW TABLES EXTENDED
+-- Ignore these because there exist timestamp results, e.g. `Created`.
+-- SHOW TABLES EXTENDED LIKE 'show_t*';
+SHOW TABLES EXTENDED;
+SHOW TABLES EXTENDED LIKE 'show_t1' PARTITION(c='Us');
+
+-- Clean Up
+DROP TABLE show_t1;
+DROP TABLE show_t2;
+DROP VIEW show_t3;
+DROP VIEW global_temp.show_t4;
+USE default;
+DROP DATABASE showdb;
diff --git a/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out b/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out
new file mode 100644
index 0000000000..a4f411258d
--- /dev/null
+++ b/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out
@@ -0,0 +1,187 @@
+-- Automatically generated by SQLQueryTestSuite
+-- Number of queries: 20
+
+
+-- !query 0
+CREATE DATABASE showdb
+-- !query 0 schema
+struct<>
+-- !query 0 output
+
+
+
+-- !query 1
+USE showdb
+-- !query 1 schema
+struct<>
+-- !query 1 output
+
+
+
+-- !query 2
+CREATE TABLE show_t1(a String, b Int) PARTITIONED BY (c String, d String)
+-- !query 2 schema
+struct<>
+-- !query 2 output
+
+
+
+-- !query 3
+ALTER TABLE show_t1 ADD PARTITION (c='Us', d=1)
+-- !query 3 schema
+struct<>
+-- !query 3 output
+
+
+
+-- !query 4
+CREATE TABLE show_t2(b String, d Int)
+-- !query 4 schema
+struct<>
+-- !query 4 output
+
+
+
+-- !query 5
+CREATE TEMPORARY VIEW show_t3(e int) USING parquet
+-- !query 5 schema
+struct<>
+-- !query 5 output
+
+
+
+-- !query 6
+CREATE GLOBAL TEMP VIEW show_t4 AS SELECT 1 as col1
+-- !query 6 schema
+struct<>
+-- !query 6 output
+
+
+
+-- !query 7
+SHOW TABLES
+-- !query 7 schema
+struct<database:string,tableName:string,isTemporary:boolean>
+-- !query 7 output
+arraydata
+mapdata
+show_t1
+show_t2
+show_t3
+testdata
+
+
+-- !query 8
+SHOW TABLES IN showdb
+-- !query 8 schema
+struct<database:string,tableName:string,isTemporary:boolean>
+-- !query 8 output
+arraydata
+mapdata
+show_t1
+show_t2
+show_t3
+testdata
+
+
+-- !query 9
+SHOW TABLES 'show_t*'
+-- !query 9 schema
+struct<database:string,tableName:string,isTemporary:boolean>
+-- !query 9 output
+show_t1
+show_t2
+show_t3
+
+
+-- !query 10
+SHOW TABLES LIKE 'show_t1*|show_t2*'
+-- !query 10 schema
+struct<database:string,tableName:string,isTemporary:boolean>
+-- !query 10 output
+show_t1
+show_t2
+
+
+-- !query 11
+SHOW TABLES IN showdb 'show_t*'
+-- !query 11 schema
+struct<database:string,tableName:string,isTemporary:boolean>
+-- !query 11 output
+show_t1
+show_t2
+show_t3
+
+
+-- !query 12
+SHOW TABLES EXTENDED
+-- !query 12 schema
+struct<>
+-- !query 12 output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+SHOW TABLES EXTENDED must have identifier_with_wildcards specified.
+== SQL ==
+SHOW TABLES EXTENDED
+
+
+-- !query 13
+SHOW TABLES EXTENDED LIKE 'show_t1' PARTITION(c='Us')
+-- !query 13 schema
+struct<>
+-- !query 13 output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+Operation not allowed: SHOW TABLES [EXTENDED] ... PARTITION(line 1, pos 0)
+
+== SQL ==
+SHOW TABLES EXTENDED LIKE 'show_t1' PARTITION(c='Us')
+^^^
+
+
+-- !query 14
+DROP TABLE show_t1
+-- !query 14 schema
+struct<>
+-- !query 14 output
+
+
+
+-- !query 15
+DROP TABLE show_t2
+-- !query 15 schema
+struct<>
+-- !query 15 output
+
+
+
+-- !query 16
+DROP VIEW show_t3
+-- !query 16 schema
+struct<>
+-- !query 16 output
+
+
+
+-- !query 17
+DROP VIEW global_temp.show_t4
+-- !query 17 schema
+struct<>
+-- !query 17 output
+
+
+
+-- !query 18
+USE default
+-- !query 18 schema
+struct<>
+-- !query 18 output
+
+
+
+-- !query 19
+DROP DATABASE showdb
+-- !query 19 schema
+struct<>
+-- !query 19 output
+
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
index a602d750d7..2a004ba2f1 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
@@ -901,24 +901,14 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
| Table 'test1'
|)
""".stripMargin)
- checkAnswer(
- sql("SHOW TABLES IN default 'show1*'"),
- Row("", "show1a", true) :: Nil)
-
- checkAnswer(
- sql("SHOW TABLES IN default 'show1*|show2*'"),
- Row("", "show1a", true) ::
- Row("", "show2b", true) :: Nil)
-
- checkAnswer(
- sql("SHOW TABLES 'show1*|show2*'"),
- Row("", "show1a", true) ::
- Row("", "show2b", true) :: Nil)
-
assert(
- sql("SHOW TABLES").count() >= 2)
+ sql("SHOW TABLES EXTENDED LIKE 'show*'").count() >= 2)
assert(
- sql("SHOW TABLES IN default").count() >= 2)
+ sql("SHOW TABLES EXTENDED LIKE 'show*'").schema ==
+ StructType(StructField("database", StringType, false) ::
+ StructField("tableName", StringType, false) ::
+ StructField("isTemporary", BooleanType, false) ::
+ StructField("information", StringType, false) :: Nil))
}
}