aboutsummaryrefslogtreecommitdiff
path: root/sql/catalyst/src
diff options
context:
space:
mode:
authorCheng Lian <lian@databricks.com>2016-05-04 16:44:09 +0800
committerCheng Lian <lian@databricks.com>2016-05-04 16:44:09 +0800
commitf152fae306dc75565cb4648ee1211416d7c0bb23 (patch)
treef5e7075d75968f037e8f670e1488bbb10a2c1c33 /sql/catalyst/src
parent6c12e801e84565265d2939b920901d1805d5935f (diff)
downloadspark-f152fae306dc75565cb4648ee1211416d7c0bb23.tar.gz
spark-f152fae306dc75565cb4648ee1211416d7c0bb23.tar.bz2
spark-f152fae306dc75565cb4648ee1211416d7c0bb23.zip
[SPARK-14127][SQL] Native "DESC [EXTENDED | FORMATTED] <table>" DDL command
## What changes were proposed in this pull request? This PR implements native `DESC [EXTENDED | FORMATTED] <table>` DDL command. Sample output: ``` scala> spark.sql("desc extended src").show(100, truncate = false) +----------------------------+---------------------------------+-------+ |col_name |data_type |comment| +----------------------------+---------------------------------+-------+ |key |int | | |value |string | | | | | | |# Detailed Table Information|CatalogTable(`default`.`src`, ...| | +----------------------------+---------------------------------+-------+ scala> spark.sql("desc formatted src").show(100, truncate = false) +----------------------------+----------------------------------------------------------+-------+ |col_name |data_type |comment| +----------------------------+----------------------------------------------------------+-------+ |key |int | | |value |string | | | | | | |# Detailed Table Information| | | |Database: |default | | |Owner: |lian | | |Create Time: |Mon Jan 04 17:06:00 CST 2016 | | |Last Access Time: |Thu Jan 01 08:00:00 CST 1970 | | |Location: |hdfs://localhost:9000/user/hive/warehouse_hive121/src | | |Table Type: |MANAGED | | |Table Parameters: | | | | transient_lastDdlTime |1451898360 | | | | | | |# Storage Information | | | |SerDe Library: |org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe | | |InputFormat: |org.apache.hadoop.mapred.TextInputFormat | | |OutputFormat: |org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat| | |Num Buckets: |-1 | | |Bucket Columns: |[] | | |Sort Columns: |[] | | |Storage Desc Parameters: | | | | serialization.format |1 | | +----------------------------+----------------------------------------------------------+-------+ ``` ## How was this patch tested? A test case is added to `HiveDDLSuite` to check command output. Author: Cheng Lian <lian@databricks.com> Closes #12844 from liancheng/spark-14127-desc-table.
Diffstat (limited to 'sql/catalyst/src')
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala5
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala1
2 files changed, 5 insertions, 1 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala
index 3851e4c706..2c6e9f53b2 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala
@@ -48,6 +48,7 @@ case class CatalogStorageFormat(
inputFormat: Option[String],
outputFormat: Option[String],
serde: Option[String],
+ compressed: Boolean,
serdeProperties: Map[String, String])
@@ -89,6 +90,7 @@ case class CatalogTable(
sortColumnNames: Seq[String] = Seq.empty,
bucketColumnNames: Seq[String] = Seq.empty,
numBuckets: Int = -1,
+ owner: String = "",
createTime: Long = System.currentTimeMillis,
lastAccessTime: Long = -1,
properties: Map[String, String] = Map.empty,
@@ -123,10 +125,11 @@ case class CatalogTable(
locationUri: Option[String] = storage.locationUri,
inputFormat: Option[String] = storage.inputFormat,
outputFormat: Option[String] = storage.outputFormat,
+ compressed: Boolean = false,
serde: Option[String] = storage.serde,
serdeProperties: Map[String, String] = storage.serdeProperties): CatalogTable = {
copy(storage = CatalogStorageFormat(
- locationUri, inputFormat, outputFormat, serde, serdeProperties))
+ locationUri, inputFormat, outputFormat, serde, compressed, serdeProperties))
}
}
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala
index d739b17743..ae7c503e65 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala
@@ -507,6 +507,7 @@ abstract class CatalogTestUtils {
inputFormat = Some(tableInputFormat),
outputFormat = Some(tableOutputFormat),
serde = None,
+ compressed = false,
serdeProperties = Map.empty)
lazy val part1 = CatalogTablePartition(Map("a" -> "1", "b" -> "2"), storageFormat)
lazy val part2 = CatalogTablePartition(Map("a" -> "3", "b" -> "4"), storageFormat)