aboutsummaryrefslogtreecommitdiff
path: root/sql/hive
diff options
context:
space:
mode:
authorgatorsmile <gatorsmile@gmail.com>2016-07-13 15:23:37 -0700
committerYin Huai <yhuai@databricks.com>2016-07-13 15:23:37 -0700
commitc5ec879828369ec1d21acd7f18a792306634ff74 (patch)
tree4a50a0a09e2143e7163f7854009cbec4c959bcdb /sql/hive
parentfb2e8eeb0b1e56bea535165f7a3bec6558b3f4a3 (diff)
downloadspark-c5ec879828369ec1d21acd7f18a792306634ff74.tar.gz
spark-c5ec879828369ec1d21acd7f18a792306634ff74.tar.bz2
spark-c5ec879828369ec1d21acd7f18a792306634ff74.zip
[SPARK-16482][SQL] Describe Table Command for Tables Requiring Runtime Inferred Schema
#### What changes were proposed in this pull request? If we create a table pointing to a parquet/json datasets without specifying the schema, describe table command does not show the schema at all. It only shows `# Schema of this table is inferred at runtime`. In 1.6, describe table does show the schema of such a table. ~~For data source tables, to infer the schema, we need to load the data source tables at runtime. Thus, this PR calls the function `lookupRelation`.~~ For data source tables, we infer the schema before table creation. Thus, this PR set the inferred schema as the table schema when table creation. #### How was this patch tested? Added test cases Author: gatorsmile <gatorsmile@gmail.com> Closes #14148 from gatorsmile/describeSchema.
Diffstat (limited to 'sql/hive')
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala16
1 files changed, 9 insertions, 7 deletions
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
index 343d7bae98..9228242021 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
@@ -612,15 +612,17 @@ class HiveDDLSuite
}
test("desc table for data source table - no user-defined schema") {
- withTable("t1") {
- withTempPath { dir =>
- val path = dir.getCanonicalPath
- spark.range(1).write.parquet(path)
- sql(s"CREATE TABLE t1 USING parquet OPTIONS (PATH '$path')")
+ Seq("parquet", "json", "orc").foreach { fileFormat =>
+ withTable("t1") {
+ withTempPath { dir =>
+ val path = dir.getCanonicalPath
+ spark.range(1).write.format(fileFormat).save(path)
+ sql(s"CREATE TABLE t1 USING $fileFormat OPTIONS (PATH '$path')")
- val desc = sql("DESC FORMATTED t1").collect().toSeq
+ val desc = sql("DESC FORMATTED t1").collect().toSeq
- assert(desc.contains(Row("# Schema of this table is inferred at runtime", "", "")))
+ assert(desc.contains(Row("id", "bigint", "")))
+ }
}
}
}