diff options
author | Dongjoon Hyun <dongjoon@apache.org> | 2016-07-14 17:18:34 +0200 |
---|---|---|
committer | Herman van Hovell <hvanhovell@databricks.com> | 2016-07-14 17:18:34 +0200 |
commit | 56183b84fb64ea13977d89ec55a9dd3997b4dacf (patch) | |
tree | 041dc3bc4a343404ec8d4d1f5124df0691ba1eb2 | |
parent | 1b5c9e52a7d5cdd3b4da1334ddff0518a8e14505 (diff) | |
download | spark-56183b84fb64ea13977d89ec55a9dd3997b4dacf.tar.gz spark-56183b84fb64ea13977d89ec55a9dd3997b4dacf.tar.bz2 spark-56183b84fb64ea13977d89ec55a9dd3997b4dacf.zip |
[SPARK-16543][SQL] Rename the columns of `SHOW PARTITION/COLUMNS` commands
## What changes were proposed in this pull request?
This PR changes the name of columns returned by `SHOW PARTITION` and `SHOW COLUMNS` commands. Currently, both commands uses `result` as a column name.
**Comparison: Column Name**
Command|Spark(Before)|Spark(After)|Hive
----------|--------------|------------|-----
SHOW PARTITIONS|result|partition|partition
SHOW COLUMNS|result|col_name|field
Note that Spark/Hive uses `col_name` in `DESC TABLES`. So, this PR chooses `col_name` for consistency among Spark commands.
**Before**
```scala
scala> sql("show partitions p").show()
+------+
|result|
+------+
| b=2|
+------+
scala> sql("show columns in p").show()
+------+
|result|
+------+
| a|
| b|
+------+
```
**After**
```scala
scala> sql("show partitions p").show
+---------+
|partition|
+---------+
| b=2|
+---------+
scala> sql("show columns in p").show
+--------+
|col_name|
+--------+
| a|
| b|
+--------+
```
## How was this patch tested?
Manual.
Author: Dongjoon Hyun <dongjoon@apache.org>
Closes #14199 from dongjoon-hyun/SPARK-16543.
-rw-r--r-- | sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala | 6 |
1 files changed, 2 insertions, 4 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala index 6651c33a3a..6e52a4609a 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala @@ -622,9 +622,8 @@ case class ShowTablePropertiesCommand(table: TableIdentifier, propertyKey: Optio * }}} */ case class ShowColumnsCommand(table: TableIdentifier) extends RunnableCommand { - // The result of SHOW COLUMNS has one column called 'result' override val output: Seq[Attribute] = { - AttributeReference("result", StringType, nullable = false)() :: Nil + AttributeReference("col_name", StringType, nullable = false)() :: Nil } override def run(sparkSession: SparkSession): Seq[Row] = { @@ -652,9 +651,8 @@ case class ShowColumnsCommand(table: TableIdentifier) extends RunnableCommand { case class ShowPartitionsCommand( table: TableIdentifier, spec: Option[TablePartitionSpec]) extends RunnableCommand { - // The result of SHOW PARTITIONS has one column called 'result' override val output: Seq[Attribute] = { - AttributeReference("result", StringType, nullable = false)() :: Nil + AttributeReference("partition", StringType, nullable = false)() :: Nil } private def getPartName(spec: TablePartitionSpec, partColNames: Seq[String]): String = { |