aboutsummaryrefslogtreecommitdiff
path: root/sql/hive
diff options
context:
space:
mode:
authorYin Huai <yhuai@databricks.com>2015-02-16 15:59:23 -0800
committerMichael Armbrust <michael@databricks.com>2015-02-16 15:59:23 -0800
commite189cbb052d59eb499dd4312403925fdd72f5718 (patch)
treea7e2424be5efbc3b45a0efb6ee7c987a3fc6c1b1 /sql/hive
parentcb6c48c874af2bd78ee73c1dc8a44fd28ecc0991 (diff)
downloadspark-e189cbb052d59eb499dd4312403925fdd72f5718.tar.gz
spark-e189cbb052d59eb499dd4312403925fdd72f5718.tar.bz2
spark-e189cbb052d59eb499dd4312403925fdd72f5718.zip
[SPARK-4865][SQL]Include temporary tables in SHOW TABLES
This PR adds a `ShowTablesCommand` to support `SHOW TABLES [IN databaseName]` SQL command. The result of `SHOW TABLE` has two columns, `tableName` and `isTemporary`. For temporary tables, the value of `isTemporary` column will be `false`. JIRA: https://issues.apache.org/jira/browse/SPARK-4865 Author: Yin Huai <yhuai@databricks.com> Closes #4618 from yhuai/showTablesCommand and squashes the following commits: 0c09791 [Yin Huai] Use ShowTablesCommand. 85ee76d [Yin Huai] Since SHOW TABLES is not a Hive native command any more and we will not see "OK" (originally generated by Hive's driver), use SHOW DATABASES in the test. 94bacac [Yin Huai] Add SHOW TABLES to the list of noExplainCommands. d71ed09 [Yin Huai] Fix test. a4a6ec3 [Yin Huai] Add SHOW TABLE command.
Diffstat (limited to 'sql/hive')
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala10
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala2
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/ListTablesSuite.scala46
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala14
4 files changed, 41 insertions, 31 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
index 87bc9fe4fe..0e43faa8af 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
@@ -220,8 +220,14 @@ private[hive] class HiveMetastoreCatalog(hive: HiveContext) extends Catalog with
}
override def getTables(databaseName: Option[String]): Seq[(String, Boolean)] = {
- val dbName = databaseName.getOrElse(hive.sessionState.getCurrentDatabase)
- client.getAllTables(dbName).map(tableName => (tableName, false))
+ val dbName = if (!caseSensitive) {
+ if (databaseName.isDefined) Some(databaseName.get.toLowerCase) else None
+ } else {
+ databaseName
+ }
+ val db = dbName.getOrElse(hive.sessionState.getCurrentDatabase)
+
+ client.getAllTables(db).map(tableName => (tableName, false))
}
/**
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala
index 5a1825a87d..98263f602e 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala
@@ -64,7 +64,6 @@ private[hive] object HiveQl {
"TOK_SHOWINDEXES",
"TOK_SHOWINDEXES",
"TOK_SHOWPARTITIONS",
- "TOK_SHOWTABLES",
"TOK_SHOW_TBLPROPERTIES",
"TOK_LOCKTABLE",
@@ -129,6 +128,7 @@ private[hive] object HiveQl {
// Commands that we do not need to explain.
protected val noExplainCommands = Seq(
"TOK_DESCTABLE",
+ "TOK_SHOWTABLES",
"TOK_TRUNCATETABLE" // truncate table" is a NativeCommand, does not need to explain.
) ++ nativeCommands
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/ListTablesSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/ListTablesSuite.scala
index 321b784a3f..e12a6c21cc 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/ListTablesSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/ListTablesSuite.scala
@@ -49,29 +49,33 @@ class ListTablesSuite extends QueryTest with BeforeAndAfterAll {
}
test("get all tables of current database") {
- val allTables = tables()
- // We are using default DB.
- checkAnswer(
- allTables.filter("tableName = 'listtablessuitetable'"),
- Row("listtablessuitetable", true))
- assert(allTables.filter("tableName = 'indblisttablessuitetable'").count() === 0)
- checkAnswer(
- allTables.filter("tableName = 'hivelisttablessuitetable'"),
- Row("hivelisttablessuitetable", false))
- assert(allTables.filter("tableName = 'hiveindblisttablessuitetable'").count() === 0)
+ Seq(tables(), sql("SHOW TABLes")).foreach {
+ case allTables =>
+ // We are using default DB.
+ checkAnswer(
+ allTables.filter("tableName = 'listtablessuitetable'"),
+ Row("listtablessuitetable", true))
+ assert(allTables.filter("tableName = 'indblisttablessuitetable'").count() === 0)
+ checkAnswer(
+ allTables.filter("tableName = 'hivelisttablessuitetable'"),
+ Row("hivelisttablessuitetable", false))
+ assert(allTables.filter("tableName = 'hiveindblisttablessuitetable'").count() === 0)
+ }
}
test("getting all tables with a database name") {
- val allTables = tables("ListTablesSuiteDB")
- checkAnswer(
- allTables.filter("tableName = 'listtablessuitetable'"),
- Row("listtablessuitetable", true))
- checkAnswer(
- allTables.filter("tableName = 'indblisttablessuitetable'"),
- Row("indblisttablessuitetable", true))
- assert(allTables.filter("tableName = 'hivelisttablessuitetable'").count() === 0)
- checkAnswer(
- allTables.filter("tableName = 'hiveindblisttablessuitetable'"),
- Row("hiveindblisttablessuitetable", false))
+ Seq(tables("listtablessuiteDb"), sql("SHOW TABLes in listTablesSuitedb")).foreach {
+ case allTables =>
+ checkAnswer(
+ allTables.filter("tableName = 'listtablessuitetable'"),
+ Row("listtablessuitetable", true))
+ checkAnswer(
+ allTables.filter("tableName = 'indblisttablessuitetable'"),
+ Row("indblisttablessuitetable", true))
+ assert(allTables.filter("tableName = 'hivelisttablessuitetable'").count() === 0)
+ checkAnswer(
+ allTables.filter("tableName = 'hiveindblisttablessuitetable'"),
+ Row("hiveindblisttablessuitetable", false))
+ }
}
}
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
index d01dbf80ef..955f3f51cf 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
@@ -62,7 +62,7 @@ class HiveQuerySuite extends HiveComparisonTest with BeforeAndAfter {
test("SPARK-4908: concurrent hive native commands") {
(1 to 100).par.map { _ =>
sql("USE default")
- sql("SHOW TABLES")
+ sql("SHOW DATABASES")
}
}
@@ -630,24 +630,24 @@ class HiveQuerySuite extends HiveComparisonTest with BeforeAndAfter {
}
test("Query Hive native command execution result") {
- val tableName = "test_native_commands"
+ val databaseName = "test_native_commands"
assertResult(0) {
- sql(s"DROP TABLE IF EXISTS $tableName").count()
+ sql(s"DROP DATABASE IF EXISTS $databaseName").count()
}
assertResult(0) {
- sql(s"CREATE TABLE $tableName(key INT, value STRING)").count()
+ sql(s"CREATE DATABASE $databaseName").count()
}
assert(
- sql("SHOW TABLES")
+ sql("SHOW DATABASES")
.select('result)
.collect()
.map(_.getString(0))
- .contains(tableName))
+ .contains(databaseName))
- assert(isExplanation(sql(s"EXPLAIN SELECT key, COUNT(*) FROM $tableName GROUP BY key")))
+ assert(isExplanation(sql(s"EXPLAIN SELECT key, COUNT(*) FROM src GROUP BY key")))
TestHive.reset()
}