aboutsummaryrefslogtreecommitdiff
path: root/core/src/main
diff options
context:
space:
mode:
authoruncleGen <hustyugm@gmail.com>2017-03-03 11:49:00 +0100
committerSean Owen <sowen@cloudera.com>2017-03-03 11:49:00 +0100
commitfa50143cd33586f4658892f434c9f6c23346e1bf (patch)
tree4eebea2a918020cc7295a8ac7d0ff921eb6ea98f /core/src/main
parentd556b317038455dc25e193f3add723fccdc54958 (diff)
downloadspark-fa50143cd33586f4658892f434c9f6c23346e1bf.tar.gz
spark-fa50143cd33586f4658892f434c9f6c23346e1bf.tar.bz2
spark-fa50143cd33586f4658892f434c9f6c23346e1bf.zip
[SPARK-19739][CORE] propagate S3 session token to cluser
## What changes were proposed in this pull request? propagate S3 session token to cluser ## How was this patch tested? existing ut Author: uncleGen <hustyugm@gmail.com> Closes #17080 from uncleGen/SPARK-19739.
Diffstat (limited to 'core/src/main')
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala13
1 files changed, 8 insertions, 5 deletions
diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala b/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
index 941e2d13fb..f475ce8754 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
@@ -82,17 +82,20 @@ class SparkHadoopUtil extends Logging {
// the behavior of the old implementation of this code, for backwards compatibility.
if (conf != null) {
// Explicitly check for S3 environment variables
- if (System.getenv("AWS_ACCESS_KEY_ID") != null &&
- System.getenv("AWS_SECRET_ACCESS_KEY") != null) {
- val keyId = System.getenv("AWS_ACCESS_KEY_ID")
- val accessKey = System.getenv("AWS_SECRET_ACCESS_KEY")
-
+ val keyId = System.getenv("AWS_ACCESS_KEY_ID")
+ val accessKey = System.getenv("AWS_SECRET_ACCESS_KEY")
+ if (keyId != null && accessKey != null) {
hadoopConf.set("fs.s3.awsAccessKeyId", keyId)
hadoopConf.set("fs.s3n.awsAccessKeyId", keyId)
hadoopConf.set("fs.s3a.access.key", keyId)
hadoopConf.set("fs.s3.awsSecretAccessKey", accessKey)
hadoopConf.set("fs.s3n.awsSecretAccessKey", accessKey)
hadoopConf.set("fs.s3a.secret.key", accessKey)
+
+ val sessionToken = System.getenv("AWS_SESSION_TOKEN")
+ if (sessionToken != null) {
+ hadoopConf.set("fs.s3a.session.token", sessionToken)
+ }
}
// Copy any "spark.hadoop.foo=bar" system properties into conf as "foo=bar"
conf.getAll.foreach { case (key, value) =>