aboutsummaryrefslogtreecommitdiff
path: root/sql/catalyst
diff options
context:
space:
mode:
authorWenchen Fan <wenchen@databricks.com>2016-07-25 09:28:56 +0800
committerWenchen Fan <wenchen@databricks.com>2016-07-25 09:28:56 +0800
commit1221ce04029154778ccb5453e348f6d116092cc5 (patch)
tree2f59418055af7815af432eb27f766b9169f1b7eb /sql/catalyst
parent23e047f4609bf39f50ea4c65f704cac15408a821 (diff)
downloadspark-1221ce04029154778ccb5453e348f6d116092cc5.tar.gz
spark-1221ce04029154778ccb5453e348f6d116092cc5.tar.bz2
spark-1221ce04029154778ccb5453e348f6d116092cc5.zip
[SPARK-16645][SQL] rename CatalogStorageFormat.serdeProperties to properties
## What changes were proposed in this pull request? we also store data source table options in this field, it's unreasonable to call it `serdeProperties`. ## How was this patch tested? N/A Author: Wenchen Fan <wenchen@databricks.com> Closes #14283 from cloud-fan/minor1.
Diffstat (limited to 'sql/catalyst')
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala10
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala6
2 files changed, 8 insertions, 8 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala
index b12606e17d..b7f35b3af4 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala
@@ -49,12 +49,12 @@ case class CatalogStorageFormat(
outputFormat: Option[String],
serde: Option[String],
compressed: Boolean,
- serdeProperties: Map[String, String]) {
+ properties: Map[String, String]) {
override def toString: String = {
val serdePropsToString =
- if (serdeProperties.nonEmpty) {
- s"Properties: " + serdeProperties.map(p => p._1 + "=" + p._2).mkString("[", ", ", "]")
+ if (properties.nonEmpty) {
+ s"Properties: " + properties.map(p => p._1 + "=" + p._2).mkString("[", ", ", "]")
} else {
""
}
@@ -73,7 +73,7 @@ case class CatalogStorageFormat(
object CatalogStorageFormat {
/** Empty storage format for default values and copies. */
val empty = CatalogStorageFormat(locationUri = None, inputFormat = None,
- outputFormat = None, serde = None, compressed = false, serdeProperties = Map.empty)
+ outputFormat = None, serde = None, compressed = false, properties = Map.empty)
}
/**
@@ -165,7 +165,7 @@ case class CatalogTable(
outputFormat: Option[String] = storage.outputFormat,
compressed: Boolean = false,
serde: Option[String] = storage.serde,
- serdeProperties: Map[String, String] = storage.serdeProperties): CatalogTable = {
+ serdeProperties: Map[String, String] = storage.properties): CatalogTable = {
copy(storage = CatalogStorageFormat(
locationUri, inputFormat, outputFormat, serde, compressed, serdeProperties))
}
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala
index a9268535c4..5bb50cba53 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala
@@ -399,11 +399,11 @@ abstract class ExternalCatalogSuite extends SparkFunSuite with BeforeAndAfterEac
// alter other storage information
catalog.alterPartitions("db2", "tbl2", Seq(
oldPart1.copy(storage = storageFormat.copy(serde = Some(newSerde))),
- oldPart2.copy(storage = storageFormat.copy(serdeProperties = newSerdeProps))))
+ oldPart2.copy(storage = storageFormat.copy(properties = newSerdeProps))))
val newPart1b = catalog.getPartition("db2", "tbl2", part1.spec)
val newPart2b = catalog.getPartition("db2", "tbl2", part2.spec)
assert(newPart1b.storage.serde == Some(newSerde))
- assert(newPart2b.storage.serdeProperties == newSerdeProps)
+ assert(newPart2b.storage.properties == newSerdeProps)
// alter but change spec, should fail because new partition specs do not exist yet
val badPart1 = part1.copy(spec = Map("a" -> "v1", "b" -> "v2"))
val badPart2 = part2.copy(spec = Map("a" -> "v3", "b" -> "v4"))
@@ -634,7 +634,7 @@ abstract class CatalogTestUtils {
outputFormat = Some(tableOutputFormat),
serde = None,
compressed = false,
- serdeProperties = Map.empty)
+ properties = Map.empty)
lazy val part1 = CatalogTablePartition(Map("a" -> "1", "b" -> "2"), storageFormat)
lazy val part2 = CatalogTablePartition(Map("a" -> "3", "b" -> "4"), storageFormat)
lazy val part3 = CatalogTablePartition(Map("a" -> "5", "b" -> "6"), storageFormat)