aboutsummaryrefslogtreecommitdiff
path: root/sql/catalyst/src
diff options
context:
space:
mode:
authorMarcelo Vanzin <vanzin@cloudera.com>2016-12-12 14:19:42 -0800
committergatorsmile <gatorsmile@gmail.com>2016-12-12 14:19:42 -0800
commit476b34c23a1ece1d52654482a393003756957ad2 (patch)
treefe86b2301f21d92ccb96c08c3182749d2d0ef3cb /sql/catalyst/src
parentbf42c2db57b9a2ca642ad3d499c30be8d9ff221a (diff)
downloadspark-476b34c23a1ece1d52654482a393003756957ad2.tar.gz
spark-476b34c23a1ece1d52654482a393003756957ad2.tar.bz2
spark-476b34c23a1ece1d52654482a393003756957ad2.zip
[SPARK-18752][HIVE] isSrcLocal" value should be set from user query.
The value of the "isSrcLocal" parameter passed to Hive's loadTable and loadPartition methods needs to be set according to the user query (e.g. "LOAD DATA LOCAL"), and not the current code that tries to guess what it should be. For existing versions of Hive the current behavior is probably ok, but some recent changes in the Hive code changed the semantics slightly, making code that sets "isSrcLocal" to "true" incorrectly to do the wrong thing. It would end up moving the parent directory of the files into the final location, instead of the file themselves, resulting in a table that cannot be read. I modified HiveCommandSuite so that existing "LOAD DATA" tests are run both in local and non-local mode, since the semantics are slightly different. The tests include a few new checks to make sure the semantics follow what Hive describes in its documentation. Tested with existing unit tests and also ran some Hive integration tests with a version of Hive containing the changes that surfaced the problem. Author: Marcelo Vanzin <vanzin@cloudera.com> Closes #16179 from vanzin/SPARK-18752.
Diffstat (limited to 'sql/catalyst/src')
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalog.scala6
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala6
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala10
3 files changed, 14 insertions, 8 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalog.scala
index 78897daec8..0c729648ef 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalog.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalog.scala
@@ -119,7 +119,8 @@ abstract class ExternalCatalog {
table: String,
loadPath: String,
isOverwrite: Boolean,
- holdDDLTime: Boolean): Unit
+ holdDDLTime: Boolean,
+ isSrcLocal: Boolean): Unit
def loadPartition(
db: String,
@@ -128,7 +129,8 @@ abstract class ExternalCatalog {
partition: TablePartitionSpec,
isOverwrite: Boolean,
holdDDLTime: Boolean,
- inheritTableSpecs: Boolean): Unit
+ inheritTableSpecs: Boolean,
+ isSrcLocal: Boolean): Unit
def loadDynamicPartitions(
db: String,
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala
index a6bebe1a39..816e4af2df 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala
@@ -312,7 +312,8 @@ class InMemoryCatalog(
table: String,
loadPath: String,
isOverwrite: Boolean,
- holdDDLTime: Boolean): Unit = {
+ holdDDLTime: Boolean,
+ isSrcLocal: Boolean): Unit = {
throw new UnsupportedOperationException("loadTable is not implemented")
}
@@ -323,7 +324,8 @@ class InMemoryCatalog(
partition: TablePartitionSpec,
isOverwrite: Boolean,
holdDDLTime: Boolean,
- inheritTableSpecs: Boolean): Unit = {
+ inheritTableSpecs: Boolean,
+ isSrcLocal: Boolean): Unit = {
throw new UnsupportedOperationException("loadPartition is not implemented.")
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
index 7a3d2097a8..e996a836fe 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
@@ -311,12 +311,13 @@ class SessionCatalog(
name: TableIdentifier,
loadPath: String,
isOverwrite: Boolean,
- holdDDLTime: Boolean): Unit = {
+ holdDDLTime: Boolean,
+ isSrcLocal: Boolean): Unit = {
val db = formatDatabaseName(name.database.getOrElse(getCurrentDatabase))
val table = formatTableName(name.table)
requireDbExists(db)
requireTableExists(TableIdentifier(table, Some(db)))
- externalCatalog.loadTable(db, table, loadPath, isOverwrite, holdDDLTime)
+ externalCatalog.loadTable(db, table, loadPath, isOverwrite, holdDDLTime, isSrcLocal)
}
/**
@@ -330,13 +331,14 @@ class SessionCatalog(
partition: TablePartitionSpec,
isOverwrite: Boolean,
holdDDLTime: Boolean,
- inheritTableSpecs: Boolean): Unit = {
+ inheritTableSpecs: Boolean,
+ isSrcLocal: Boolean): Unit = {
val db = formatDatabaseName(name.database.getOrElse(getCurrentDatabase))
val table = formatTableName(name.table)
requireDbExists(db)
requireTableExists(TableIdentifier(table, Some(db)))
externalCatalog.loadPartition(
- db, table, loadPath, partition, isOverwrite, holdDDLTime, inheritTableSpecs)
+ db, table, loadPath, partition, isOverwrite, holdDDLTime, inheritTableSpecs, isSrcLocal)
}
def defaultTablePath(tableIdent: TableIdentifier): String = {