aboutsummaryrefslogtreecommitdiff
path: root/sql/core/src/test/scala
diff options
context:
space:
mode:
authorWenchen Fan <wenchen@databricks.com>2016-09-08 19:41:49 +0800
committerWenchen Fan <wenchen@databricks.com>2016-09-08 19:41:49 +0800
commit3ced39df32e52170d6954a2464f84e0c9f307423 (patch)
tree94c1a6de4ed76cd3384a65293a9d0de6e8d6269c /sql/core/src/test/scala
parentb230fb92a53375b648fa0f9e1d852270156d79e5 (diff)
downloadspark-3ced39df32e52170d6954a2464f84e0c9f307423.tar.gz
spark-3ced39df32e52170d6954a2464f84e0c9f307423.tar.bz2
spark-3ced39df32e52170d6954a2464f84e0c9f307423.zip
[SPARK-17432][SQL] PreprocessDDL should respect case sensitivity when checking duplicated columns
## What changes were proposed in this pull request? In `PreprocessDDL` we will check if table columns are duplicated. However, this checking ignores case sensitivity config(it's always case-sensitive) and lead to different result between `HiveExternalCatalog` and `InMemoryCatalog`. `HiveExternalCatalog` will throw exception because hive metastore is always case-nonsensitive, and `InMemoryCatalog` is fine. This PR fixes it. ## How was this patch tested? a new test in DDLSuite Author: Wenchen Fan <wenchen@databricks.com> Closes #14994 from cloud-fan/check-dup.
Diffstat (limited to 'sql/core/src/test/scala')
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala7
1 files changed, 7 insertions, 0 deletions
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
index fd35c987ca..05f826a11b 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
@@ -371,6 +371,13 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
sql("CREATE TABLE tbl(a int, a string) USING json")
}
assert(e.message == "Found duplicate column(s) in table definition of `tbl`: a")
+
+ withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") {
+ val e2 = intercept[AnalysisException] {
+ sql("CREATE TABLE tbl(a int, A string) USING json")
+ }
+ assert(e2.message == "Found duplicate column(s) in table definition of `tbl`: a")
+ }
}
test("create table - partition column names not in table definition") {