diff options
author | gatorsmile <gatorsmile@gmail.com> | 2016-08-26 11:13:38 -0700 |
---|---|---|
committer | Yin Huai <yhuai@databricks.com> | 2016-08-26 11:13:38 -0700 |
commit | fd4ba3f626f49d7d616a2a334d45b1c736e1db1c (patch) | |
tree | b1157e6cb57bfe55d1db6776b6f07c2e441fb5e4 /sql/core/src/test/scala | |
parent | 18832162357282ec81515b5b2ba93747be3ad18b (diff) | |
download | spark-fd4ba3f626f49d7d616a2a334d45b1c736e1db1c.tar.gz spark-fd4ba3f626f49d7d616a2a334d45b1c736e1db1c.tar.bz2 spark-fd4ba3f626f49d7d616a2a334d45b1c736e1db1c.zip |
[SPARK-17192][SQL] Issue Exception when Users Specify the Partitioning Columns without a Given Schema
### What changes were proposed in this pull request?
Address the comments by yhuai in the original PR: https://github.com/apache/spark/pull/14207
First, issue an exception instead of logging a warning when users specify the partitioning columns without a given schema.
Second, refactor the codes a little.
### How was this patch tested?
Fixed the test cases.
Author: gatorsmile <gatorsmile@gmail.com>
Closes #14572 from gatorsmile/followup16552.
Diffstat (limited to 'sql/core/src/test/scala')
-rw-r--r-- | sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala | 17 |
1 files changed, 12 insertions, 5 deletions
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala index e6ae42258d..b343454b12 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala @@ -265,7 +265,7 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach { userSpecifiedPartitionCols.map(p => s"PARTITIONED BY ($p)").getOrElse("") val schemaClause = userSpecifiedSchema.map(s => s"($s)").getOrElse("") val uri = path.toURI - sql( + val sqlCreateTable = s""" |CREATE TABLE $tabName $schemaClause |USING parquet @@ -273,11 +273,18 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach { | path '$uri' |) |$partitionClause - """.stripMargin) - val tableMetadata = spark.sessionState.catalog.getTableMetadata(TableIdentifier(tabName)) + """.stripMargin + if (userSpecifiedSchema.isEmpty && userSpecifiedPartitionCols.nonEmpty) { + val e = intercept[AnalysisException](sql(sqlCreateTable)).getMessage + assert(e.contains( + "not allowed to specify partition columns when the table schema is not defined")) + } else { + sql(sqlCreateTable) + val tableMetadata = spark.sessionState.catalog.getTableMetadata(TableIdentifier(tabName)) - assert(expectedSchema == tableMetadata.schema) - assert(expectedPartitionCols == tableMetadata.partitionColumnNames) + assert(expectedSchema == tableMetadata.schema) + assert(expectedPartitionCols == tableMetadata.partitionColumnNames) + } } } |