aboutsummaryrefslogtreecommitdiff
path: root/sql/hive/src/main
diff options
context:
space:
mode:
authorLiang-Chi Hsieh <simonh@tw.ibm.com>2016-04-22 18:26:28 +0800
committerWenchen Fan <wenchen@databricks.com>2016-04-22 18:26:28 +0800
commite09ab5da8b02da98d7b2496d549c1d53cceb8728 (patch)
treef9aa98a66a2cfdb669d751d3f17d170e99eba0f8 /sql/hive/src/main
parent284b15d2fbff7c0c3ffe8737838071d366ea5742 (diff)
downloadspark-e09ab5da8b02da98d7b2496d549c1d53cceb8728.tar.gz
spark-e09ab5da8b02da98d7b2496d549c1d53cceb8728.tar.bz2
spark-e09ab5da8b02da98d7b2496d549c1d53cceb8728.zip
[SPARK-14609][SQL] Native support for LOAD DATA DDL command
## What changes were proposed in this pull request? Add the native support for LOAD DATA DDL command that loads data into Hive table/partition. ## How was this patch tested? `HiveDDLCommandSuite` and `HiveQuerySuite`. Besides, few Hive tests (`WindowQuerySuite`, `HiveTableScanSuite` and `HiveSerDeSuite`) also use `LOAD DATA` command. Author: Liang-Chi Hsieh <simonh@tw.ibm.com> Closes #12412 from viirya/ddl-load-data.
Diffstat (limited to 'sql/hive/src/main')
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala42
1 files changed, 42 insertions, 0 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala
index f627384253..a92a94cae5 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala
@@ -17,6 +17,8 @@
package org.apache.spark.sql.hive
+import java.util
+
import scala.util.control.NonFatal
import org.apache.hadoop.hive.ql.metadata.HiveException
@@ -197,6 +199,46 @@ private[spark] class HiveExternalCatalog(client: HiveClient) extends ExternalCat
client.listTables(db, pattern)
}
+ override def loadTable(
+ db: String,
+ table: String,
+ loadPath: String,
+ isOverwrite: Boolean,
+ holdDDLTime: Boolean): Unit = withClient {
+ requireTableExists(db, table)
+ client.loadTable(
+ loadPath,
+ s"$db.$table",
+ isOverwrite,
+ holdDDLTime)
+ }
+
+ override def loadPartition(
+ db: String,
+ table: String,
+ loadPath: String,
+ partition: TablePartitionSpec,
+ isOverwrite: Boolean,
+ holdDDLTime: Boolean,
+ inheritTableSpecs: Boolean,
+ isSkewedStoreAsSubdir: Boolean): Unit = withClient {
+ requireTableExists(db, table)
+
+ val orderedPartitionSpec = new util.LinkedHashMap[String, String]()
+ getTable(db, table).partitionColumnNames.foreach { colName =>
+ orderedPartitionSpec.put(colName, partition(colName))
+ }
+
+ client.loadPartition(
+ loadPath,
+ s"$db.$table",
+ orderedPartitionSpec,
+ isOverwrite,
+ holdDDLTime,
+ inheritTableSpecs,
+ isSkewedStoreAsSubdir)
+ }
+
// --------------------------------------------------------------------------
// Partitions
// --------------------------------------------------------------------------