aboutsummaryrefslogtreecommitdiff
path: root/sql/hive
diff options
context:
space:
mode:
authorWenchen Fan <wenchen@databricks.com>2016-07-25 09:28:56 +0800
committerWenchen Fan <wenchen@databricks.com>2016-07-25 09:28:56 +0800
commit1221ce04029154778ccb5453e348f6d116092cc5 (patch)
tree2f59418055af7815af432eb27f766b9169f1b7eb /sql/hive
parent23e047f4609bf39f50ea4c65f704cac15408a821 (diff)
downloadspark-1221ce04029154778ccb5453e348f6d116092cc5.tar.gz
spark-1221ce04029154778ccb5453e348f6d116092cc5.tar.bz2
spark-1221ce04029154778ccb5453e348f6d116092cc5.zip
[SPARK-16645][SQL] rename CatalogStorageFormat.serdeProperties to properties
## What changes were proposed in this pull request? we also store data source table options in this field, it's unreasonable to call it `serdeProperties`. ## How was this patch tested? N/A Author: Wenchen Fan <wenchen@databricks.com> Closes #14283 from cloud-fan/minor1.
Diffstat (limited to 'sql/hive')
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala2
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/MetastoreRelation.scala6
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala8
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveDDLCommandSuite.scala20
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala6
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/MultiDatabaseSuite.scala2
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala4
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala4
8 files changed, 26 insertions, 26 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
index 2be51ed0e8..d308a31061 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
@@ -119,7 +119,7 @@ private[hive] class HiveMetastoreCatalog(sparkSession: SparkSession) extends Log
BucketSpec(n.toInt, getColumnNames("bucket"), getColumnNames("sort"))
}
- val options = table.storage.serdeProperties
+ val options = table.storage.properties
val dataSource =
DataSource(
sparkSession,
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/MetastoreRelation.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/MetastoreRelation.scala
index 3ab1bdabb9..f3c849b9f2 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/MetastoreRelation.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/MetastoreRelation.scala
@@ -103,7 +103,7 @@ private[hive] case class MetastoreRelation(
sd.setSerdeInfo(serdeInfo)
val serdeParameters = new java.util.HashMap[String, String]()
- catalogTable.storage.serdeProperties.foreach { case (k, v) => serdeParameters.put(k, v) }
+ catalogTable.storage.properties.foreach { case (k, v) => serdeParameters.put(k, v) }
serdeInfo.setParameters(serdeParameters)
new HiveTable(tTable)
@@ -173,8 +173,8 @@ private[hive] case class MetastoreRelation(
p.storage.serde.foreach(serdeInfo.setSerializationLib)
val serdeParameters = new java.util.HashMap[String, String]()
- catalogTable.storage.serdeProperties.foreach { case (k, v) => serdeParameters.put(k, v) }
- p.storage.serdeProperties.foreach { case (k, v) => serdeParameters.put(k, v) }
+ catalogTable.storage.properties.foreach { case (k, v) => serdeParameters.put(k, v) }
+ p.storage.properties.foreach { case (k, v) => serdeParameters.put(k, v) }
serdeInfo.setParameters(serdeParameters)
new Partition(hiveQlTable, tPartition)
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala
index 2f102a88cc..9f5782f045 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala
@@ -390,7 +390,7 @@ private[hive] class HiveClientImpl(
outputFormat = Option(h.getOutputFormatClass).map(_.getName),
serde = Option(h.getSerializationLib),
compressed = h.getTTable.getSd.isCompressed,
- serdeProperties = Option(h.getTTable.getSd.getSerdeInfo.getParameters)
+ properties = Option(h.getTTable.getSd.getSerdeInfo.getParameters)
.map(_.asScala.toMap).orNull
),
properties = properties,
@@ -775,7 +775,7 @@ private[hive] class HiveClientImpl(
table.storage.outputFormat.map(toOutputFormat).foreach(hiveTable.setOutputFormatClass)
hiveTable.setSerializationLib(
table.storage.serde.getOrElse("org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"))
- table.storage.serdeProperties.foreach { case (k, v) => hiveTable.setSerdeParam(k, v) }
+ table.storage.properties.foreach { case (k, v) => hiveTable.setSerdeParam(k, v) }
table.properties.foreach { case (k, v) => hiveTable.setProperty(k, v) }
table.comment.foreach { c => hiveTable.setProperty("comment", c) }
table.viewOriginalText.foreach { t => hiveTable.setViewOriginalText(t) }
@@ -799,7 +799,7 @@ private[hive] class HiveClientImpl(
p.storage.inputFormat.foreach(storageDesc.setInputFormat)
p.storage.outputFormat.foreach(storageDesc.setOutputFormat)
p.storage.serde.foreach(serdeInfo.setSerializationLib)
- serdeInfo.setParameters(p.storage.serdeProperties.asJava)
+ serdeInfo.setParameters(p.storage.properties.asJava)
storageDesc.setSerdeInfo(serdeInfo)
tpart.setDbName(ht.getDbName)
tpart.setTableName(ht.getTableName)
@@ -818,7 +818,7 @@ private[hive] class HiveClientImpl(
outputFormat = Option(apiPartition.getSd.getOutputFormat),
serde = Option(apiPartition.getSd.getSerdeInfo.getSerializationLib),
compressed = apiPartition.getSd.isCompressed,
- serdeProperties = Option(apiPartition.getSd.getSerdeInfo.getParameters)
+ properties = Option(apiPartition.getSd.getSerdeInfo.getParameters)
.map(_.asScala.toMap).orNull))
}
}
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveDDLCommandSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveDDLCommandSuite.scala
index 867aadb5f5..9d99d960ac 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveDDLCommandSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveDDLCommandSuite.scala
@@ -100,7 +100,7 @@ class HiveDDLCommandSuite extends PlanTest {
assert(desc.viewText.isEmpty)
assert(desc.viewOriginalText.isEmpty)
assert(desc.partitionColumns == Seq.empty[CatalogColumn])
- assert(desc.storage.serdeProperties == Map())
+ assert(desc.storage.properties == Map())
assert(desc.storage.inputFormat == Some("parquet.hive.DeprecatedParquetInputFormat"))
assert(desc.storage.outputFormat == Some("parquet.hive.DeprecatedParquetOutputFormat"))
assert(desc.storage.serde == Some("parquet.hive.serde.ParquetHiveSerDe"))
@@ -118,7 +118,7 @@ class HiveDDLCommandSuite extends PlanTest {
assert(desc.schema == Seq.empty[CatalogColumn])
assert(desc.viewText == None) // TODO will be SQLText
assert(desc.viewOriginalText.isEmpty)
- assert(desc.storage.serdeProperties == Map())
+ assert(desc.storage.properties == Map())
assert(desc.storage.inputFormat == Some("org.apache.hadoop.mapred.TextInputFormat"))
assert(desc.storage.outputFormat ==
Some("org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat"))
@@ -154,7 +154,7 @@ class HiveDDLCommandSuite extends PlanTest {
assert(desc.schema == Seq.empty[CatalogColumn])
assert(desc.viewText == None) // TODO will be SQLText
assert(desc.viewOriginalText.isEmpty)
- assert(desc.storage.serdeProperties == Map(("serde_p1" -> "p1"), ("serde_p2" -> "p2")))
+ assert(desc.storage.properties == Map(("serde_p1" -> "p1"), ("serde_p2" -> "p2")))
assert(desc.storage.inputFormat == Some("org.apache.hadoop.hive.ql.io.RCFileInputFormat"))
assert(desc.storage.outputFormat == Some("org.apache.hadoop.hive.ql.io.RCFileOutputFormat"))
assert(desc.storage.serde == Some("org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe"))
@@ -305,7 +305,7 @@ class HiveDDLCommandSuite extends PlanTest {
assert(desc.storage.outputFormat ==
Some("org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat"))
assert(desc.storage.serde.isEmpty)
- assert(desc.storage.serdeProperties.isEmpty)
+ assert(desc.storage.properties.isEmpty)
assert(desc.properties.isEmpty)
assert(desc.comment.isEmpty)
}
@@ -391,10 +391,10 @@ class HiveDDLCommandSuite extends PlanTest {
val (desc2, _) = extractTableDesc(query2)
val (desc3, _) = extractTableDesc(query3)
assert(desc1.storage.serde == Some("org.apache.poof.serde.Baff"))
- assert(desc1.storage.serdeProperties.isEmpty)
+ assert(desc1.storage.properties.isEmpty)
assert(desc2.storage.serde == Some("org.apache.poof.serde.Baff"))
- assert(desc2.storage.serdeProperties == Map("k1" -> "v1"))
- assert(desc3.storage.serdeProperties == Map(
+ assert(desc2.storage.properties == Map("k1" -> "v1"))
+ assert(desc3.storage.properties == Map(
"field.delim" -> "x",
"escape.delim" -> "y",
"serialization.format" -> "x",
@@ -463,7 +463,7 @@ class HiveDDLCommandSuite extends PlanTest {
assert(desc.storage.inputFormat == Some("winput"))
assert(desc.storage.outputFormat == Some("wowput"))
assert(desc.storage.serde == Some("org.apache.poof.serde.Baff"))
- assert(desc.storage.serdeProperties == Map("k1" -> "v1"))
+ assert(desc.storage.properties == Map("k1" -> "v1"))
assert(desc.properties == Map("k1" -> "v1", "k2" -> "v2"))
assert(desc.comment == Some("no comment"))
}
@@ -479,7 +479,7 @@ class HiveDDLCommandSuite extends PlanTest {
assert(desc.schema == Seq.empty[CatalogColumn])
assert(desc.viewText == Option("SELECT * FROM tab1"))
assert(desc.viewOriginalText == Option("SELECT * FROM tab1"))
- assert(desc.storage.serdeProperties == Map())
+ assert(desc.storage.properties == Map())
assert(desc.storage.inputFormat.isEmpty)
assert(desc.storage.outputFormat.isEmpty)
assert(desc.storage.serde.isEmpty)
@@ -505,7 +505,7 @@ class HiveDDLCommandSuite extends PlanTest {
CatalogColumn("col3", null, nullable = true, None) :: Nil)
assert(desc.viewText == Option("SELECT * FROM tab1"))
assert(desc.viewOriginalText == Option("SELECT * FROM tab1"))
- assert(desc.storage.serdeProperties == Map())
+ assert(desc.storage.properties == Map())
assert(desc.storage.inputFormat.isEmpty)
assert(desc.storage.outputFormat.isEmpty)
assert(desc.storage.serde.isEmpty)
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
index af071f95e6..22f8c0f19c 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
@@ -733,7 +733,7 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with TestHiv
outputFormat = None,
serde = None,
compressed = false,
- serdeProperties = Map(
+ properties = Map(
"path" -> sessionState.catalog.hiveDefaultTableFilePath(TableIdentifier(tableName)))
),
properties = Map(
@@ -1171,8 +1171,8 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with TestHiv
checkAnswer(table("t"), Seq(Row(1, 2, 3), Row(2, 3, 4)))
val catalogTable = sharedState.externalCatalog.getTable("default", "t")
// there should not be a lowercase key 'path' now
- assert(catalogTable.storage.serdeProperties.get("path").isEmpty)
- assert(catalogTable.storage.serdeProperties.get("PATH").isDefined)
+ assert(catalogTable.storage.properties.get("path").isEmpty)
+ assert(catalogTable.storage.properties.get("PATH").isDefined)
}
}
}
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MultiDatabaseSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MultiDatabaseSuite.scala
index 83f1b192f7..7ba880e476 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MultiDatabaseSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MultiDatabaseSuite.scala
@@ -29,7 +29,7 @@ class MultiDatabaseSuite extends QueryTest with SQLTestUtils with TestHiveSingle
val expectedPath =
spark.sharedState.externalCatalog.getDatabase(dbName).locationUri + "/" + tableName
- assert(metastoreTable.storage.serdeProperties("path") === expectedPath)
+ assert(metastoreTable.storage.properties("path") === expectedPath)
}
private def getTableNames(dbName: Option[String] = None): Array[String] = {
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala
index a972f61e25..066c3ffaba 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala
@@ -153,7 +153,7 @@ class VersionsSuite extends SparkFunSuite with Logging {
outputFormat = Some(classOf[HiveIgnoreKeyTextOutputFormat[_, _]].getName),
serde = Some(classOf[LazySimpleSerDe].getName()),
compressed = false,
- serdeProperties = Map.empty
+ properties = Map.empty
))
}
@@ -275,7 +275,7 @@ class VersionsSuite extends SparkFunSuite with Logging {
outputFormat = None,
serde = None,
compressed = false,
- serdeProperties = Map.empty)
+ properties = Map.empty)
test(s"$version: sql create partitioned table") {
client.runSqlHive("CREATE TABLE src_part (value INT) PARTITIONED BY (key1 INT, key2 INT)")
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
index 9228242021..fb5c9948a5 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
@@ -356,7 +356,7 @@ class HiveDDLSuite
expectedSerdeProps.map { case (k, v) => s"'$k'='$v'" }.mkString(", ")
val oldPart = catalog.getPartition(TableIdentifier("boxes"), Map("width" -> "4"))
assume(oldPart.storage.serde != Some(expectedSerde), "bad test: serde was already set")
- assume(oldPart.storage.serdeProperties.filterKeys(expectedSerdeProps.contains) !=
+ assume(oldPart.storage.properties.filterKeys(expectedSerdeProps.contains) !=
expectedSerdeProps, "bad test: serde properties were already set")
sql(s"""ALTER TABLE boxes PARTITION (width=4)
| SET SERDE '$expectedSerde'
@@ -364,7 +364,7 @@ class HiveDDLSuite
|""".stripMargin)
val newPart = catalog.getPartition(TableIdentifier("boxes"), Map("width" -> "4"))
assert(newPart.storage.serde == Some(expectedSerde))
- assume(newPart.storage.serdeProperties.filterKeys(expectedSerdeProps.contains) ==
+ assume(newPart.storage.properties.filterKeys(expectedSerdeProps.contains) ==
expectedSerdeProps)
}