aboutsummaryrefslogtreecommitdiff
path: root/yarn
diff options
context:
space:
mode:
authorWangTaoTheTonic <barneystinson@aliyun.com>2014-10-28 08:53:10 -0500
committerThomas Graves <tgraves@apache.org>2014-10-28 08:53:10 -0500
commite8813be6539aba1cd1f8854c204b7938464403ed (patch)
treeb2296dbb4bf0148e3228bcf778aab30906806dd8 /yarn
parent47346cd029abc50c70582a721810a7cceb682d8a (diff)
downloadspark-e8813be6539aba1cd1f8854c204b7938464403ed.tar.gz
spark-e8813be6539aba1cd1f8854c204b7938464403ed.tar.bz2
spark-e8813be6539aba1cd1f8854c204b7938464403ed.zip
[SPARK-4095][YARN][Minor]extract val isLaunchingDriver in ClientBase
Instead of checking if `args.userClass` is null repeatedly, we extract it to an global val as in `ApplicationMaster`. Author: WangTaoTheTonic <barneystinson@aliyun.com> Closes #2954 from WangTaoTheTonic/MemUnit and squashes the following commits: 13bda20 [WangTaoTheTonic] extract val isLaunchingDriver in ClientBase
Diffstat (limited to 'yarn')
-rw-r--r--yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala5
1 files changed, 2 insertions, 3 deletions
diff --git a/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala b/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala
index fb0e34bf59..0417cdd00a 100644
--- a/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala
+++ b/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala
@@ -55,6 +55,7 @@ private[spark] trait ClientBase extends Logging {
protected val amMemoryOverhead = args.amMemoryOverhead // MB
protected val executorMemoryOverhead = args.executorMemoryOverhead // MB
private val distCacheMgr = new ClientDistributedCacheManager()
+ private val isLaunchingDriver = args.userClass != null
/**
* Fail fast if we have requested more resources per container than is available in the cluster.
@@ -267,7 +268,6 @@ private[spark] trait ClientBase extends Logging {
// Note that to warn the user about the deprecation in cluster mode, some code from
// SparkConf#validateSettings() is duplicated here (to avoid triggering the condition
// described above).
- val isLaunchingDriver = args.userClass != null
if (isLaunchingDriver) {
sys.env.get("SPARK_JAVA_OPTS").foreach { value =>
val warning =
@@ -344,7 +344,6 @@ private[spark] trait ClientBase extends Logging {
}
// Include driver-specific java options if we are launching a driver
- val isLaunchingDriver = args.userClass != null
if (isLaunchingDriver) {
sparkConf.getOption("spark.driver.extraJavaOptions")
.orElse(sys.env.get("SPARK_JAVA_OPTS"))
@@ -357,7 +356,7 @@ private[spark] trait ClientBase extends Logging {
javaOpts += ("-Dspark.yarn.app.container.log.dir=" + ApplicationConstants.LOG_DIR_EXPANSION_VAR)
val userClass =
- if (args.userClass != null) {
+ if (isLaunchingDriver) {
Seq("--class", YarnSparkHadoopUtil.escapeForShell(args.userClass))
} else {
Nil