aboutsummaryrefslogtreecommitdiff
path: root/yarn/common
diff options
context:
space:
mode:
authorThomas Graves <tgraves@apache.org>2014-08-05 15:57:32 -0500
committerThomas Graves <tgraves@apache.org>2014-08-05 15:57:32 -0500
commit41e0a21b22ccd2788dc079790788e505b0d4e37d (patch)
tree2d211afa9ebaf49101b1b4de44ce47f83033d4c5 /yarn/common
parent74f82c71b03d265a7d0c98ce196ca8c44de002e8 (diff)
downloadspark-41e0a21b22ccd2788dc079790788e505b0d4e37d.tar.gz
spark-41e0a21b22ccd2788dc079790788e505b0d4e37d.tar.bz2
spark-41e0a21b22ccd2788dc079790788e505b0d4e37d.zip
SPARK-1680: use configs for specifying environment variables on YARN
Note that this also documents spark.executorEnv.* which to me means its public. If we don't want that please speak up. Author: Thomas Graves <tgraves@apache.org> Closes #1512 from tgravescs/SPARK-1680 and squashes the following commits: 11525df [Thomas Graves] more doc changes 553bad0 [Thomas Graves] fix documentation 152bf7c [Thomas Graves] fix docs 5382326 [Thomas Graves] try fix docs 32f86a4 [Thomas Graves] use configs for specifying environment variables on YARN
Diffstat (limited to 'yarn/common')
-rw-r--r--yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala13
-rw-r--r--yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnableUtil.scala6
2 files changed, 18 insertions, 1 deletions
diff --git a/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala b/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala
index 44e025b8f6..1da0a1b675 100644
--- a/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala
+++ b/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala
@@ -259,6 +259,14 @@ trait ClientBase extends Logging {
localResources
}
+ /** Get all application master environment variables set on this SparkConf */
+ def getAppMasterEnv: Seq[(String, String)] = {
+ val prefix = "spark.yarn.appMasterEnv."
+ sparkConf.getAll.filter{case (k, v) => k.startsWith(prefix)}
+ .map{case (k, v) => (k.substring(prefix.length), v)}
+ }
+
+
def setupLaunchEnv(
localResources: HashMap[String, LocalResource],
stagingDir: String): HashMap[String, String] = {
@@ -276,6 +284,11 @@ trait ClientBase extends Logging {
distCacheMgr.setDistFilesEnv(env)
distCacheMgr.setDistArchivesEnv(env)
+ getAppMasterEnv.foreach { case (key, value) =>
+ YarnSparkHadoopUtil.addToEnvironment(env, key, value, File.pathSeparator)
+ }
+
+ // Keep this for backwards compatibility but users should move to the config
sys.env.get("SPARK_YARN_USER_ENV").foreach { userEnvs =>
// Allow users to specify some environment variables.
YarnSparkHadoopUtil.setEnvFromInputString(env, userEnvs, File.pathSeparator)
diff --git a/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnableUtil.scala b/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnableUtil.scala
index 4ba7133a95..71a9e42846 100644
--- a/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnableUtil.scala
+++ b/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnableUtil.scala
@@ -171,7 +171,11 @@ trait ExecutorRunnableUtil extends Logging {
val extraCp = sparkConf.getOption("spark.executor.extraClassPath")
ClientBase.populateClasspath(null, yarnConf, sparkConf, env, extraCp)
- // Allow users to specify some environment variables
+ sparkConf.getExecutorEnv.foreach { case (key, value) =>
+ YarnSparkHadoopUtil.addToEnvironment(env, key, value, File.pathSeparator)
+ }
+
+ // Keep this for backwards compatibility but users should move to the config
YarnSparkHadoopUtil.setEnvFromInputString(env, System.getenv("SPARK_YARN_USER_ENV"),
File.pathSeparator)