aboutsummaryrefslogtreecommitdiff
path: root/sql/catalyst
diff options
context:
space:
mode:
authorXiao Li <gatorsmile@gmail.com>2017-02-24 23:03:59 -0800
committerWenchen Fan <wenchen@databricks.com>2017-02-24 23:03:59 -0800
commit4cb025afafe63d5871356d9dc38d58c1df0da996 (patch)
treeec55bc8784ca293e0862cdb9dc1e461aa9b7a04d /sql/catalyst
parent1b9ba258e086e2ba89a4f35a54106e2f8a38b525 (diff)
downloadspark-4cb025afafe63d5871356d9dc38d58c1df0da996.tar.gz
spark-4cb025afafe63d5871356d9dc38d58c1df0da996.tar.bz2
spark-4cb025afafe63d5871356d9dc38d58c1df0da996.zip
[SPARK-19735][SQL] Remove HOLD_DDLTIME from Catalog APIs
### What changes were proposed in this pull request? As explained in Hive JIRA https://issues.apache.org/jira/browse/HIVE-12224, HOLD_DDLTIME was broken as soon as it landed. Hive 2.0 removes HOLD_DDLTIME from the API. In Spark SQL, we always set it to FALSE. Like Hive, we should also remove it from our Catalog APIs. ### How was this patch tested? N/A Author: Xiao Li <gatorsmile@gmail.com> Closes #17063 from gatorsmile/removalHoldDDLTime.
Diffstat (limited to 'sql/catalyst')
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalog.scala5
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala5
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala6
3 files changed, 4 insertions, 12 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalog.scala
index 5233699fac..a3a4ab37ea 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalog.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalog.scala
@@ -125,7 +125,6 @@ abstract class ExternalCatalog {
table: String,
loadPath: String,
isOverwrite: Boolean,
- holdDDLTime: Boolean,
isSrcLocal: Boolean): Unit
/**
@@ -140,7 +139,6 @@ abstract class ExternalCatalog {
loadPath: String,
partition: TablePartitionSpec,
isOverwrite: Boolean,
- holdDDLTime: Boolean,
inheritTableSpecs: Boolean,
isSrcLocal: Boolean): Unit
@@ -150,8 +148,7 @@ abstract class ExternalCatalog {
loadPath: String,
partition: TablePartitionSpec,
replace: Boolean,
- numDP: Int,
- holdDDLTime: Boolean): Unit
+ numDP: Int): Unit
// --------------------------------------------------------------------------
// Partitions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala
index 15aed5f9b1..6bb2b2d4ff 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala
@@ -325,7 +325,6 @@ class InMemoryCatalog(
table: String,
loadPath: String,
isOverwrite: Boolean,
- holdDDLTime: Boolean,
isSrcLocal: Boolean): Unit = {
throw new UnsupportedOperationException("loadTable is not implemented")
}
@@ -336,7 +335,6 @@ class InMemoryCatalog(
loadPath: String,
partition: TablePartitionSpec,
isOverwrite: Boolean,
- holdDDLTime: Boolean,
inheritTableSpecs: Boolean,
isSrcLocal: Boolean): Unit = {
throw new UnsupportedOperationException("loadPartition is not implemented.")
@@ -348,8 +346,7 @@ class InMemoryCatalog(
loadPath: String,
partition: TablePartitionSpec,
replace: Boolean,
- numDP: Int,
- holdDDLTime: Boolean): Unit = {
+ numDP: Int): Unit = {
throw new UnsupportedOperationException("loadDynamicPartitions is not implemented.")
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
index 73ef0e6a18..0230626a66 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
@@ -322,13 +322,12 @@ class SessionCatalog(
name: TableIdentifier,
loadPath: String,
isOverwrite: Boolean,
- holdDDLTime: Boolean,
isSrcLocal: Boolean): Unit = {
val db = formatDatabaseName(name.database.getOrElse(getCurrentDatabase))
val table = formatTableName(name.table)
requireDbExists(db)
requireTableExists(TableIdentifier(table, Some(db)))
- externalCatalog.loadTable(db, table, loadPath, isOverwrite, holdDDLTime, isSrcLocal)
+ externalCatalog.loadTable(db, table, loadPath, isOverwrite, isSrcLocal)
}
/**
@@ -341,7 +340,6 @@ class SessionCatalog(
loadPath: String,
spec: TablePartitionSpec,
isOverwrite: Boolean,
- holdDDLTime: Boolean,
inheritTableSpecs: Boolean,
isSrcLocal: Boolean): Unit = {
val db = formatDatabaseName(name.database.getOrElse(getCurrentDatabase))
@@ -350,7 +348,7 @@ class SessionCatalog(
requireTableExists(TableIdentifier(table, Some(db)))
requireNonEmptyValueInPartitionSpec(Seq(spec))
externalCatalog.loadPartition(
- db, table, loadPath, spec, isOverwrite, holdDDLTime, inheritTableSpecs, isSrcLocal)
+ db, table, loadPath, spec, isOverwrite, inheritTableSpecs, isSrcLocal)
}
def defaultTablePath(tableIdent: TableIdentifier): String = {