aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJuliusz Sompolski <julek@databricks.com>2017-04-21 22:11:24 +0800
committerWenchen Fan <wenchen@databricks.com>2017-04-21 22:11:24 +0800
commitc9e6035e1fb825d280eaec3bdfc1e4d362897ffd (patch)
treecab9b91acc7a8c9c067fe0773c557eb9dccc08f5
parent34767997e0c6cb28e1fac8cb650fa3511f260ca5 (diff)
downloadspark-c9e6035e1fb825d280eaec3bdfc1e4d362897ffd.tar.gz
spark-c9e6035e1fb825d280eaec3bdfc1e4d362897ffd.tar.bz2
spark-c9e6035e1fb825d280eaec3bdfc1e4d362897ffd.zip
[SPARK-20412] Throw ParseException from visitNonOptionalPartitionSpec instead of returning null values.
## What changes were proposed in this pull request? If a partitionSpec is supposed to not contain optional values, a ParseException should be thrown, and not nulls returned. The nulls can later cause NullPointerExceptions in places not expecting them. ## How was this patch tested? A query like "SHOW PARTITIONS tbl PARTITION(col1='val1', col2)" used to throw a NullPointerException. Now it throws a ParseException. Author: Juliusz Sompolski <julek@databricks.com> Closes #17707 from juliuszsompolski/SPARK-20412.
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala5
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala16
2 files changed, 16 insertions, 5 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
index e1db1ef5b8..2cf06d1566 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
@@ -215,7 +215,10 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with Logging {
*/
protected def visitNonOptionalPartitionSpec(
ctx: PartitionSpecContext): Map[String, String] = withOrigin(ctx) {
- visitPartitionSpec(ctx).mapValues(_.orNull).map(identity)
+ visitPartitionSpec(ctx).map {
+ case (key, None) => throw new ParseException(s"Found an empty partition key '$key'.", ctx)
+ case (key, Some(value)) => key -> value
+ }
}
/**
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala
index 97c61dc869..8a6bc62fec 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala
@@ -530,13 +530,13 @@ class DDLCommandSuite extends PlanTest {
""".stripMargin
val sql4 =
"""
- |ALTER TABLE table_name PARTITION (test, dt='2008-08-08',
+ |ALTER TABLE table_name PARTITION (test=1, dt='2008-08-08',
|country='us') SET SERDE 'org.apache.class' WITH SERDEPROPERTIES ('columns'='foo,bar',
|'field.delim' = ',')
""".stripMargin
val sql5 =
"""
- |ALTER TABLE table_name PARTITION (test, dt='2008-08-08',
+ |ALTER TABLE table_name PARTITION (test=1, dt='2008-08-08',
|country='us') SET SERDEPROPERTIES ('columns'='foo,bar', 'field.delim' = ',')
""".stripMargin
val parsed1 = parser.parsePlan(sql1)
@@ -558,12 +558,12 @@ class DDLCommandSuite extends PlanTest {
tableIdent,
Some("org.apache.class"),
Some(Map("columns" -> "foo,bar", "field.delim" -> ",")),
- Some(Map("test" -> null, "dt" -> "2008-08-08", "country" -> "us")))
+ Some(Map("test" -> "1", "dt" -> "2008-08-08", "country" -> "us")))
val expected5 = AlterTableSerDePropertiesCommand(
tableIdent,
None,
Some(Map("columns" -> "foo,bar", "field.delim" -> ",")),
- Some(Map("test" -> null, "dt" -> "2008-08-08", "country" -> "us")))
+ Some(Map("test" -> "1", "dt" -> "2008-08-08", "country" -> "us")))
comparePlans(parsed1, expected1)
comparePlans(parsed2, expected2)
comparePlans(parsed3, expected3)
@@ -832,6 +832,14 @@ class DDLCommandSuite extends PlanTest {
assert(e.contains("Found duplicate keys 'a'"))
}
+ test("empty values in non-optional partition specs") {
+ val e = intercept[ParseException] {
+ parser.parsePlan(
+ "SHOW PARTITIONS dbx.tab1 PARTITION (a='1', b)")
+ }.getMessage
+ assert(e.contains("Found an empty partition key 'b'"))
+ }
+
test("drop table") {
val tableName1 = "db.tab"
val tableName2 = "tab"