aboutsummaryrefslogtreecommitdiff
path: root/sql/hive/src/test
diff options
context:
space:
mode:
authorWenchen Fan <wenchen@databricks.com>2016-11-17 00:00:38 -0800
committerReynold Xin <rxin@databricks.com>2016-11-17 00:00:38 -0800
commit07b3f045cd6f79b92bc86b3b1b51d3d5e6bd37ce (patch)
tree57845267ae2e85f58386ff59880b114a4b9b7945 /sql/hive/src/test
parent170eeb345f951de89a39fe565697b3e913011768 (diff)
downloadspark-07b3f045cd6f79b92bc86b3b1b51d3d5e6bd37ce.tar.gz
spark-07b3f045cd6f79b92bc86b3b1b51d3d5e6bd37ce.tar.bz2
spark-07b3f045cd6f79b92bc86b3b1b51d3d5e6bd37ce.zip
[SPARK-18464][SQL] support old table which doesn't store schema in metastore
## What changes were proposed in this pull request? Before Spark 2.1, users can create an external data source table without schema, and we will infer the table schema at runtime. In Spark 2.1, we decided to infer the schema when the table was created, so that we don't need to infer it again and again at runtime. This is a good improvement, but we should still respect and support old tables which doesn't store table schema in metastore. ## How was this patch tested? regression test. Author: Wenchen Fan <wenchen@databricks.com> Closes #15900 from cloud-fan/hive-catalog.
Diffstat (limited to 'sql/hive/src/test')
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala22
1 files changed, 22 insertions, 0 deletions
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
index c50f92e783..4ab1a54edc 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
@@ -1371,4 +1371,26 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with TestHiv
}
}
}
+
+ test("SPARK-18464: support old table which doesn't store schema in table properties") {
+ withTable("old") {
+ withTempPath { path =>
+ Seq(1 -> "a").toDF("i", "j").write.parquet(path.getAbsolutePath)
+ val tableDesc = CatalogTable(
+ identifier = TableIdentifier("old", Some("default")),
+ tableType = CatalogTableType.EXTERNAL,
+ storage = CatalogStorageFormat.empty.copy(
+ properties = Map("path" -> path.getAbsolutePath)
+ ),
+ schema = new StructType(),
+ properties = Map(
+ HiveExternalCatalog.DATASOURCE_PROVIDER -> "parquet"))
+ hiveClient.createTable(tableDesc, ignoreIfExists = false)
+
+ checkAnswer(spark.table("old"), Row(1, "a"))
+
+ checkAnswer(sql("DESC old"), Row("i", "int", null) :: Row("j", "string", null) :: Nil)
+ }
+ }
+ }
}