aboutsummaryrefslogtreecommitdiff
path: root/sql/core
diff options
context:
space:
mode:
authorDilip Biswal <dbiswal@us.ibm.com>2016-04-05 08:41:59 +0200
committerHerman van Hovell <hvanhovell@questtec.nl>2016-04-05 08:41:59 +0200
commit2715bc68bd1661d207b1af5f44ae8d02aec9d4ec (patch)
tree60a4b9d0a7a98db115f21af890e550f66fb49c44 /sql/core
parent064623014e0d6dfb0376722f24e81027fde649de (diff)
downloadspark-2715bc68bd1661d207b1af5f44ae8d02aec9d4ec.tar.gz
spark-2715bc68bd1661d207b1af5f44ae8d02aec9d4ec.tar.bz2
spark-2715bc68bd1661d207b1af5f44ae8d02aec9d4ec.zip
[SPARK-14348][SQL] Support native execution of SHOW TBLPROPERTIES command
## What changes were proposed in this pull request? This PR adds Native execution of SHOW TBLPROPERTIES command. Command Syntax: ``` SQL SHOW TBLPROPERTIES table_name[(property_key_literal)] ``` ## How was this patch tested? Tests added in HiveComandSuiie and DDLCommandSuite Author: Dilip Biswal <dbiswal@us.ibm.com> Closes #12133 from dilipbiswal/dkb_show_tblproperties.
Diffstat (limited to 'sql/core')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala37
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/command/commands.scala44
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala8
3 files changed, 81 insertions, 8 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
index ff3ab7746c..fb106d1aef 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
@@ -93,6 +93,22 @@ class SparkSqlAstBuilder extends AstBuilder {
}
/**
+ * A command for users to list the properties for a table. If propertyKey is specified, the value
+ * for the propertyKey is returned. If propertyKey is not specified, all the keys and their
+ * corresponding values are returned.
+ * The syntax of using this command in SQL is:
+ * {{{
+ * SHOW TBLPROPERTIES table_name[('propertyKey')];
+ * }}}
+ */
+ override def visitShowTblProperties(
+ ctx: ShowTblPropertiesContext): LogicalPlan = withOrigin(ctx) {
+ ShowTablePropertiesCommand(
+ visitTableIdentifier(ctx.tableIdentifier),
+ Option(ctx.key).map(visitTablePropertyKey))
+ }
+
+ /**
* Create a [[RefreshTable]] logical plan.
*/
override def visitRefreshTable(ctx: RefreshTableContext): LogicalPlan = withOrigin(ctx) {
@@ -220,19 +236,26 @@ class SparkSqlAstBuilder extends AstBuilder {
override def visitTablePropertyList(
ctx: TablePropertyListContext): Map[String, String] = withOrigin(ctx) {
ctx.tableProperty.asScala.map { property =>
- // A key can either be a String or a collection of dot separated elements. We need to treat
- // these differently.
- val key = if (property.key.STRING != null) {
- string(property.key.STRING)
- } else {
- property.key.getText
- }
+ val key = visitTablePropertyKey(property.key)
val value = Option(property.value).map(string).orNull
key -> value
}.toMap
}
/**
+ * A table property key can either be String or a collection of dot separated elements. This
+ * function extracts the property key based on whether its a string literal or a table property
+ * identifier.
+ */
+ override def visitTablePropertyKey(key: TablePropertyKeyContext): String = {
+ if (key.STRING != null) {
+ string(key.STRING)
+ } else {
+ key.getText
+ }
+ }
+
+ /**
* Create a [[CreateDatabase]] command.
*
* For example:
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/commands.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/commands.scala
index 4eb8d7ff0d..a4be3bc333 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/commands.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/commands.scala
@@ -21,7 +21,7 @@ import java.util.NoSuchElementException
import org.apache.spark.internal.Logging
import org.apache.spark.rdd.RDD
-import org.apache.spark.sql.{Dataset, Row, SQLContext}
+import org.apache.spark.sql.{AnalysisException, Dataset, Row, SQLContext}
import org.apache.spark.sql.catalyst.{CatalystTypeConverters, InternalRow, TableIdentifier}
import org.apache.spark.sql.catalyst.errors.TreeNodeException
import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference}
@@ -381,6 +381,48 @@ case class ShowDatabasesCommand(databasePattern: Option[String]) extends Runnabl
}
/**
+ * A command for users to list the properties for a table If propertyKey is specified, the value
+ * for the propertyKey is returned. If propertyKey is not specified, all the keys and their
+ * corresponding values are returned.
+ * The syntax of using this command in SQL is:
+ * {{{
+ * SHOW TBLPROPERTIES table_name[('propertyKey')];
+ * }}}
+ */
+case class ShowTablePropertiesCommand(
+ table: TableIdentifier,
+ propertyKey: Option[String]) extends RunnableCommand {
+
+ override val output: Seq[Attribute] = {
+ val schema = AttributeReference("value", StringType, nullable = false)() :: Nil
+ propertyKey match {
+ case None => AttributeReference("key", StringType, nullable = false)() :: schema
+ case _ => schema
+ }
+ }
+
+ override def run(sqlContext: SQLContext): Seq[Row] = {
+ val catalog = sqlContext.sessionState.catalog
+
+ if (catalog.isTemporaryTable(table)) {
+ Seq.empty[Row]
+ } else {
+ val catalogTable = sqlContext.sessionState.catalog.getTable(table)
+
+ propertyKey match {
+ case Some(p) =>
+ val propValue = catalogTable
+ .properties
+ .getOrElse(p, s"Table ${catalogTable.qualifiedName} does not have property: $p")
+ Seq(Row(propValue))
+ case None =>
+ catalogTable.properties.map(p => Row(p._1, p._2)).toSeq
+ }
+ }
+ }
+}
+
+/**
* A command for users to list all of the registered functions.
* The syntax of using this command in SQL is:
* {{{
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala
index 458f36e832..8b2a5979e2 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala
@@ -773,4 +773,12 @@ class DDLCommandSuite extends PlanTest {
comparePlans(parsed2, expected2)
}
+ test("show tblproperties") {
+ val parsed1 = parser.parsePlan("SHOW TBLPROPERTIES tab1")
+ val expected1 = ShowTablePropertiesCommand(TableIdentifier("tab1", None), None)
+ val parsed2 = parser.parsePlan("SHOW TBLPROPERTIES tab1('propKey1')")
+ val expected2 = ShowTablePropertiesCommand(TableIdentifier("tab1", None), Some("propKey1"))
+ comparePlans(parsed1, expected1)
+ comparePlans(parsed2, expected2)
+ }
}