aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorgatorsmile <gatorsmile@gmail.com>2016-06-23 09:12:20 +0800
committerWenchen Fan <wenchen@databricks.com>2016-06-23 09:12:20 +0800
commit9f990fa3f9e0b798d8018cf4132b93a3468f33bb (patch)
tree1d03022a5bcd4ac0e98ddc8514deb9df34ef0a63 /sql
parent4f869f88ee96fa57be79f972f218111b6feac67f (diff)
downloadspark-9f990fa3f9e0b798d8018cf4132b93a3468f33bb.tar.gz
spark-9f990fa3f9e0b798d8018cf4132b93a3468f33bb.tar.bz2
spark-9f990fa3f9e0b798d8018cf4132b93a3468f33bb.zip
[SPARK-16024][SQL][TEST] Verify Column Comment for Data Source Tables
#### What changes were proposed in this pull request? This PR is to improve test coverage. It verifies whether `Comment` of `Column` can be appropriate handled. The test cases verify the related parts in Parser, both SQL and DataFrameWriter interface, and both Hive Metastore catalog and In-memory catalog. #### How was this patch tested? N/A Author: gatorsmile <gatorsmile@gmail.com> Closes #13764 from gatorsmile/dataSourceComment.
Diffstat (limited to 'sql')
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala10
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala13
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala14
3 files changed, 34 insertions, 3 deletions
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala
index 5bee28b446..7b96f4c99a 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala
@@ -28,7 +28,7 @@ import org.apache.spark.sql.catalyst.plans.logical.Project
import org.apache.spark.sql.execution.SparkSqlParser
import org.apache.spark.sql.execution.datasources.{BucketSpec, CreateTableUsing}
import org.apache.spark.sql.internal.{HiveSerDe, SQLConf}
-import org.apache.spark.sql.types.{IntegerType, StringType, StructType}
+import org.apache.spark.sql.types.{IntegerType, MetadataBuilder, StringType, StructType}
// TODO: merge this with DDLSuite (SPARK-14441)
@@ -349,10 +349,14 @@ class DDLCommandSuite extends PlanTest {
}
test("create table using - with partitioned by") {
- val query = "CREATE TABLE my_tab(a INT, b STRING) USING parquet PARTITIONED BY (a)"
+ val query = "CREATE TABLE my_tab(a INT comment 'test', b STRING) " +
+ "USING parquet PARTITIONED BY (a)"
val expected = CreateTableUsing(
TableIdentifier("my_tab"),
- Some(new StructType().add("a", IntegerType).add("b", StringType)),
+ Some(new StructType()
+ .add("a", IntegerType, nullable = true,
+ new MetadataBuilder().putString("comment", s"test").build())
+ .add("b", StringType)),
"parquet",
false,
Map.empty,
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
index f40ddcc95a..47d8a28f49 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
@@ -252,6 +252,19 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
}
}
+ test("desc table for parquet data source table using in-memory catalog") {
+ assume(spark.sparkContext.conf.get(CATALOG_IMPLEMENTATION) == "in-memory")
+ val tabName = "tab1"
+ withTable(tabName) {
+ sql(s"CREATE TABLE $tabName(a int comment 'test') USING parquet ")
+
+ checkAnswer(
+ sql(s"DESC $tabName").select("col_name", "data_type", "comment"),
+ Row("a", "int", "test")
+ )
+ }
+ }
+
test("Alter/Describe Database") {
withTempDir { tmpDir =>
val path = tmpDir.toString
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
index b2f01fcc83..89f69c8e4d 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
@@ -22,6 +22,7 @@ import java.io.File
import org.apache.hadoop.fs.Path
import org.scalatest.BeforeAndAfterEach
+import org.apache.spark.internal.config._
import org.apache.spark.sql.{AnalysisException, QueryTest, Row, SaveMode}
import org.apache.spark.sql.catalyst.catalog.{CatalogDatabase, CatalogTableType}
import org.apache.spark.sql.catalyst.TableIdentifier
@@ -407,6 +408,19 @@ class HiveDDLSuite
}
}
+ test("desc table for data source table using Hive Metastore") {
+ assume(spark.sparkContext.conf.get(CATALOG_IMPLEMENTATION) == "hive")
+ val tabName = "tab1"
+ withTable(tabName) {
+ sql(s"CREATE TABLE $tabName(a int comment 'test') USING parquet ")
+
+ checkAnswer(
+ sql(s"DESC $tabName").select("col_name", "data_type", "comment"),
+ Row("a", "int", "test")
+ )
+ }
+ }
+
private def createDatabaseWithLocation(tmpDir: File, dirExists: Boolean): Unit = {
val catalog = spark.sessionState.catalog
val dbName = "db1"