aboutsummaryrefslogtreecommitdiff
path: root/sql/hive
diff options
context:
space:
mode:
authorgatorsmile <gatorsmile@gmail.com>2016-06-23 09:12:20 +0800
committerWenchen Fan <wenchen@databricks.com>2016-06-23 09:12:20 +0800
commit9f990fa3f9e0b798d8018cf4132b93a3468f33bb (patch)
tree1d03022a5bcd4ac0e98ddc8514deb9df34ef0a63 /sql/hive
parent4f869f88ee96fa57be79f972f218111b6feac67f (diff)
downloadspark-9f990fa3f9e0b798d8018cf4132b93a3468f33bb.tar.gz
spark-9f990fa3f9e0b798d8018cf4132b93a3468f33bb.tar.bz2
spark-9f990fa3f9e0b798d8018cf4132b93a3468f33bb.zip
[SPARK-16024][SQL][TEST] Verify Column Comment for Data Source Tables
#### What changes were proposed in this pull request? This PR is to improve test coverage. It verifies whether `Comment` of `Column` can be appropriate handled. The test cases verify the related parts in Parser, both SQL and DataFrameWriter interface, and both Hive Metastore catalog and In-memory catalog. #### How was this patch tested? N/A Author: gatorsmile <gatorsmile@gmail.com> Closes #13764 from gatorsmile/dataSourceComment.
Diffstat (limited to 'sql/hive')
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala14
1 files changed, 14 insertions, 0 deletions
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
index b2f01fcc83..89f69c8e4d 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
@@ -22,6 +22,7 @@ import java.io.File
import org.apache.hadoop.fs.Path
import org.scalatest.BeforeAndAfterEach
+import org.apache.spark.internal.config._
import org.apache.spark.sql.{AnalysisException, QueryTest, Row, SaveMode}
import org.apache.spark.sql.catalyst.catalog.{CatalogDatabase, CatalogTableType}
import org.apache.spark.sql.catalyst.TableIdentifier
@@ -407,6 +408,19 @@ class HiveDDLSuite
}
}
+ test("desc table for data source table using Hive Metastore") {
+ assume(spark.sparkContext.conf.get(CATALOG_IMPLEMENTATION) == "hive")
+ val tabName = "tab1"
+ withTable(tabName) {
+ sql(s"CREATE TABLE $tabName(a int comment 'test') USING parquet ")
+
+ checkAnswer(
+ sql(s"DESC $tabName").select("col_name", "data_type", "comment"),
+ Row("a", "int", "test")
+ )
+ }
+ }
+
private def createDatabaseWithLocation(tmpDir: File, dirExists: Boolean): Unit = {
val catalog = spark.sessionState.catalog
val dbName = "db1"