aboutsummaryrefslogtreecommitdiff
path: root/sql/hive
diff options
context:
space:
mode:
authorWenchen Fan <wenchen@databricks.com>2016-12-14 11:30:34 +0800
committerWenchen Fan <wenchen@databricks.com>2016-12-14 11:30:34 +0800
commit3e307b4959ecdab3f9c16484d172403357e7d09b (patch)
tree231652eec3b96d4a0472eec0b6e1c2f963729bd7 /sql/hive
parentf2ddabfa09fda26ff0391d026dd67545dab33e01 (diff)
downloadspark-3e307b4959ecdab3f9c16484d172403357e7d09b.tar.gz
spark-3e307b4959ecdab3f9c16484d172403357e7d09b.tar.bz2
spark-3e307b4959ecdab3f9c16484d172403357e7d09b.zip
[SPARK-18566][SQL] remove OverwriteOptions
## What changes were proposed in this pull request? `OverwriteOptions` was introduced in https://github.com/apache/spark/pull/15705, to carry the information of static partitions. However, after further refactor, this information becomes duplicated and we can remove `OverwriteOptions`. ## How was this patch tested? N/A Author: Wenchen Fan <wenchen@databricks.com> Closes #15995 from cloud-fan/overwrite.
Diffstat (limited to 'sql/hive')
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala2
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/CreateHiveTableAsSelectCommand.scala5
2 files changed, 3 insertions, 4 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala
index ce1e3eb1a5..773c4a39d8 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala
@@ -47,7 +47,7 @@ private[hive] trait HiveStrategies {
case logical.InsertIntoTable(
table: MetastoreRelation, partition, child, overwrite, ifNotExists) =>
InsertIntoHiveTable(
- table, partition, planLater(child), overwrite.enabled, ifNotExists) :: Nil
+ table, partition, planLater(child), overwrite, ifNotExists) :: Nil
case CreateTable(tableDesc, mode, Some(query)) if tableDesc.provider.get == "hive" =>
val newTableDesc = if (tableDesc.storage.serde.isEmpty) {
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/CreateHiveTableAsSelectCommand.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/CreateHiveTableAsSelectCommand.scala
index cac43597ae..ef5a5a001f 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/CreateHiveTableAsSelectCommand.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/CreateHiveTableAsSelectCommand.scala
@@ -21,7 +21,7 @@ import scala.util.control.NonFatal
import org.apache.spark.sql.{AnalysisException, Row, SparkSession}
import org.apache.spark.sql.catalyst.catalog.CatalogTable
-import org.apache.spark.sql.catalyst.plans.logical.{InsertIntoTable, LogicalPlan, OverwriteOptions}
+import org.apache.spark.sql.catalyst.plans.logical.{InsertIntoTable, LogicalPlan}
import org.apache.spark.sql.execution.command.RunnableCommand
import org.apache.spark.sql.hive.MetastoreRelation
@@ -88,8 +88,7 @@ case class CreateHiveTableAsSelectCommand(
} else {
try {
sparkSession.sessionState.executePlan(InsertIntoTable(
- metastoreRelation, Map(), query, overwrite = OverwriteOptions(true),
- ifNotExists = false)).toRdd
+ metastoreRelation, Map(), query, overwrite = true, ifNotExists = false)).toRdd
} catch {
case NonFatal(e) =>
// drop the created table.