aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
author福星 <fuxing@wacai.com>2016-11-03 12:02:01 -0700
committerReynold Xin <rxin@databricks.com>2016-11-03 12:02:01 -0700
commit16293311cdb25a62733a9aae4355659b971a3ce1 (patch)
treeb8edc2f075b23ef0dfeb4b488f62b2f7113e8bea
parentb17057c0a69b9c56e503483d97f5dc209eef0884 (diff)
downloadspark-16293311cdb25a62733a9aae4355659b971a3ce1.tar.gz
spark-16293311cdb25a62733a9aae4355659b971a3ce1.tar.bz2
spark-16293311cdb25a62733a9aae4355659b971a3ce1.zip
[SPARK-18237][HIVE] hive.exec.stagingdir have no effect
hive.exec.stagingdir have no effect in spark2.0.1, Hive confs in hive-site.xml will be loaded in `hadoopConf`, so we should use `hadoopConf` in `InsertIntoHiveTable` instead of `SessionState.conf` Author: 福星 <fuxing@wacai.com> Closes #15744 from ClassNotFoundExp/master.
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/InsertIntoHiveTable.scala4
1 files changed, 2 insertions, 2 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/InsertIntoHiveTable.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/InsertIntoHiveTable.scala
index 15be12cfc0..e333fc7feb 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/InsertIntoHiveTable.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/InsertIntoHiveTable.scala
@@ -76,7 +76,8 @@ case class InsertIntoHiveTable(
def output: Seq[Attribute] = Seq.empty
- val stagingDir = sessionState.conf.getConfString("hive.exec.stagingdir", ".hive-staging")
+ val hadoopConf = sessionState.newHadoopConf()
+ val stagingDir = hadoopConf.get("hive.exec.stagingdir", ".hive-staging")
private def executionId: String = {
val rand: Random = new Random
@@ -163,7 +164,6 @@ case class InsertIntoHiveTable(
// instances within the closure, since Serializer is not serializable while TableDesc is.
val tableDesc = table.tableDesc
val tableLocation = table.hiveQlTable.getDataLocation
- val hadoopConf = sessionState.newHadoopConf()
val tmpLocation = getExternalTmpPath(tableLocation, hadoopConf)
val fileSinkConf = new FileSinkDesc(tmpLocation.toString, tableDesc, false)
val isCompressed = hadoopConf.get("hive.exec.compress.output", "false").toBoolean