aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/FileFormatWriter.scala2
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala23
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala35
3 files changed, 58 insertions, 2 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/FileFormatWriter.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/FileFormatWriter.scala
index 950e5ca0d6..30a09a9ad3 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/FileFormatWriter.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/FileFormatWriter.scala
@@ -341,7 +341,7 @@ object FileFormatWriter extends Logging {
Seq(Cast(c, StringType, Option(desc.timeZoneId))),
Seq(StringType))
val str = If(IsNull(c), Literal(ExternalCatalogUtils.DEFAULT_PARTITION_NAME), escaped)
- val partitionName = Literal(c.name + "=") :: str :: Nil
+ val partitionName = Literal(ExternalCatalogUtils.escapePathName(c.name) + "=") :: str :: Nil
if (i == 0) partitionName else Literal(Path.SEPARATOR) :: partitionName
}
}
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
index 6ffa58bcd9..b2199fdf90 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
@@ -1995,6 +1995,29 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
}
}
+ Seq("a b", "a:b", "a%b", "a,b").foreach { specialChars =>
+ test(s"data source table:partition column name containing $specialChars") {
+ withTable("t") {
+ withTempDir { dir =>
+ spark.sql(
+ s"""
+ |CREATE TABLE t(a string, `$specialChars` string)
+ |USING parquet
+ |PARTITIONED BY(`$specialChars`)
+ |LOCATION '$dir'
+ """.stripMargin)
+
+ assert(dir.listFiles().isEmpty)
+ spark.sql(s"INSERT INTO TABLE t PARTITION(`$specialChars`=2) SELECT 1")
+ val partEscaped = s"${ExternalCatalogUtils.escapePathName(specialChars)}=2"
+ val partFile = new File(dir, partEscaped)
+ assert(partFile.listFiles().length >= 1)
+ checkAnswer(spark.table("t"), Row("1", "2") :: Nil)
+ }
+ }
+ }
+ }
+
Seq("a b", "a:b", "a%b").foreach { specialChars =>
test(s"location uri contains $specialChars for datasource table") {
withTable("t", "t1") {
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
index e956c9abae..df2c1cee94 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
@@ -27,7 +27,7 @@ import org.scalatest.BeforeAndAfterEach
import org.apache.spark.SparkException
import org.apache.spark.sql.{AnalysisException, QueryTest, Row, SaveMode}
import org.apache.spark.sql.catalyst.analysis.{NoSuchPartitionException, TableAlreadyExistsException}
-import org.apache.spark.sql.catalyst.catalog.{CatalogDatabase, CatalogTable, CatalogTableType, CatalogUtils}
+import org.apache.spark.sql.catalyst.catalog.{CatalogDatabase, CatalogTable, CatalogTableType, CatalogUtils, ExternalCatalogUtils}
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.execution.command.DDLUtils
import org.apache.spark.sql.hive.HiveExternalCatalog
@@ -1690,6 +1690,39 @@ class HiveDDLSuite
}
}
+ Seq("parquet", "hive").foreach { datasource =>
+ Seq("a b", "a:b", "a%b", "a,b").foreach { specialChars =>
+ test(s"partition column name of $datasource table containing $specialChars") {
+ withTable("t") {
+ withTempDir { dir =>
+ spark.sql(
+ s"""
+ |CREATE TABLE t(a string, `$specialChars` string)
+ |USING $datasource
+ |PARTITIONED BY(`$specialChars`)
+ |LOCATION '$dir'
+ """.stripMargin)
+
+ assert(dir.listFiles().isEmpty)
+ spark.sql(s"INSERT INTO TABLE t PARTITION(`$specialChars`=2) SELECT 1")
+ val partEscaped = s"${ExternalCatalogUtils.escapePathName(specialChars)}=2"
+ val partFile = new File(dir, partEscaped)
+ assert(partFile.listFiles().length >= 1)
+ checkAnswer(spark.table("t"), Row("1", "2") :: Nil)
+
+ withSQLConf("hive.exec.dynamic.partition.mode" -> "nonstrict") {
+ spark.sql(s"INSERT INTO TABLE t PARTITION(`$specialChars`) SELECT 3, 4")
+ val partEscaped1 = s"${ExternalCatalogUtils.escapePathName(specialChars)}=4"
+ val partFile1 = new File(dir, partEscaped1)
+ assert(partFile1.listFiles().length >= 1)
+ checkAnswer(spark.table("t"), Row("1", "2") :: Row("3", "4") :: Nil)
+ }
+ }
+ }
+ }
+ }
+ }
+
Seq("a b", "a:b", "a%b").foreach { specialChars =>
test(s"datasource table: location uri contains $specialChars") {
withTable("t", "t1") {