aboutsummaryrefslogtreecommitdiff
path: root/sql/hive
diff options
context:
space:
mode:
authorSital Kedia <skedia@fb.com>2016-04-02 19:17:25 -0700
committerSean Owen <sowen@cloudera.com>2016-04-02 19:17:25 -0700
commit1cf70183423b938ec064925b20fd4a5b9e355991 (patch)
tree789b7d4db27290e4000093c567177dfef27f9a2a /sql/hive
parent03d130f9734be66e8aefc4ffaa207ee13e837629 (diff)
downloadspark-1cf70183423b938ec064925b20fd4a5b9e355991.tar.gz
spark-1cf70183423b938ec064925b20fd4a5b9e355991.tar.bz2
spark-1cf70183423b938ec064925b20fd4a5b9e355991.zip
[SPARK-14056] Appends s3 specific configurations and spark.hadoop con…
## What changes were proposed in this pull request? Appends s3 specific configurations and spark.hadoop configurations to hive configuration. ## How was this patch tested? Tested by running a job on cluster. …figurations to hive configuration. Author: Sital Kedia <skedia@fb.com> Closes #11876 from sitalkedia/hiveConf.
Diffstat (limited to 'sql/hive')
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/TableReader.scala4
1 files changed, 2 insertions, 2 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/TableReader.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/TableReader.scala
index 80b24dc989..54afe9c2a3 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/TableReader.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/TableReader.scala
@@ -34,6 +34,7 @@ import org.apache.hadoop.io.Writable
import org.apache.hadoop.mapred.{FileInputFormat, InputFormat, JobConf}
import org.apache.spark.broadcast.Broadcast
+import org.apache.spark.deploy.SparkHadoopUtil
import org.apache.spark.internal.Logging
import org.apache.spark.rdd.{EmptyRDD, HadoopRDD, RDD, UnionRDD}
import org.apache.spark.sql.catalyst.InternalRow
@@ -74,8 +75,7 @@ class HadoopTableReader(
math.max(sc.hiveconf.getInt("mapred.map.tasks", 1), sc.sparkContext.defaultMinPartitions)
}
- // TODO: set aws s3 credentials.
-
+ SparkHadoopUtil.get.appendS3AndSparkHadoopConfigurations(sc.sparkContext.conf, hiveExtraConf)
private val _broadcastedHiveConf =
sc.sparkContext.broadcast(new SerializableConfiguration(hiveExtraConf))