aboutsummaryrefslogtreecommitdiff
path: root/sql/hive
diff options
context:
space:
mode:
authorShixiong Zhu <shixiong@databricks.com>2016-05-10 23:53:55 -0700
committerYin Huai <yhuai@databricks.com>2016-05-10 23:53:55 -0700
commit875ef764280428acd095aec1834fee0ddad08611 (patch)
tree82ccd9556e78d06e826403aacb366e5ba00f6c14 /sql/hive
parent007882c7ee06de37ba309424fced1e4c6b408572 (diff)
downloadspark-875ef764280428acd095aec1834fee0ddad08611.tar.gz
spark-875ef764280428acd095aec1834fee0ddad08611.tar.bz2
spark-875ef764280428acd095aec1834fee0ddad08611.zip
[SPARK-15231][SQL] Document the semantic of saveAsTable and insertInto and don't drop columns silently
## What changes were proposed in this pull request? This PR adds documents about the different behaviors between `insertInto` and `saveAsTable`, and throws an exception when the user try to add too man columns using `saveAsTable with append`. ## How was this patch tested? Unit tests added in this PR. Author: Shixiong Zhu <shixiong@databricks.com> Closes #13013 from zsxwing/SPARK-15231.
Diffstat (limited to 'sql/hive')
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala43
1 files changed, 43 insertions, 0 deletions
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
index b2a80e70be..676fbd0a39 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
@@ -1038,6 +1038,49 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with TestHiv
}
}
+ test("saveAsTable[append]: the column order doesn't matter") {
+ withTable("saveAsTable_column_order") {
+ Seq((1, 2)).toDF("i", "j").write.saveAsTable("saveAsTable_column_order")
+ Seq((3, 4)).toDF("j", "i").write.mode("append").saveAsTable("saveAsTable_column_order")
+ checkAnswer(
+ table("saveAsTable_column_order"),
+ Seq((1, 2), (4, 3)).toDF("i", "j"))
+ }
+ }
+
+ test("saveAsTable[append]: mismatch column names") {
+ withTable("saveAsTable_mismatch_column_names") {
+ Seq((1, 2)).toDF("i", "j").write.saveAsTable("saveAsTable_mismatch_column_names")
+ val e = intercept[AnalysisException] {
+ Seq((3, 4)).toDF("i", "k")
+ .write.mode("append").saveAsTable("saveAsTable_mismatch_column_names")
+ }
+ assert(e.getMessage.contains("cannot resolve"))
+ }
+ }
+
+ test("saveAsTable[append]: too many columns") {
+ withTable("saveAsTable_too_many_columns") {
+ Seq((1, 2)).toDF("i", "j").write.saveAsTable("saveAsTable_too_many_columns")
+ val e = intercept[AnalysisException] {
+ Seq((3, 4, 5)).toDF("i", "j", "k")
+ .write.mode("append").saveAsTable("saveAsTable_too_many_columns")
+ }
+ assert(e.getMessage.contains("doesn't match"))
+ }
+ }
+
+ test("saveAsTable[append]: less columns") {
+ withTable("saveAsTable_less_columns") {
+ Seq((1, 2)).toDF("i", "j").write.saveAsTable("saveAsTable_less_columns")
+ val e = intercept[AnalysisException] {
+ Seq((4)).toDF("j")
+ .write.mode("append").saveAsTable("saveAsTable_less_columns")
+ }
+ assert(e.getMessage.contains("doesn't match"))
+ }
+ }
+
test("SPARK-15025: create datasource table with path with select") {
withTempPath { dir =>
withTable("t") {