diff options
author | DB Tsai <dbtsai@dbtsai.com> | 2014-06-09 22:18:50 -0700 |
---|---|---|
committer | Xiangrui Meng <meng@databricks.com> | 2014-06-09 22:18:50 -0700 |
commit | 6f2db8c2f51911f88a601ec5bf1509ea0e8173ed (patch) | |
tree | 93a0b61ed7fc67a7f8266b57be20ad425a8285f6 | |
parent | a9ec033c8cf489898cc47e2043bd9e86b7df1ff8 (diff) | |
download | spark-6f2db8c2f51911f88a601ec5bf1509ea0e8173ed.tar.gz spark-6f2db8c2f51911f88a601ec5bf1509ea0e8173ed.tar.bz2 spark-6f2db8c2f51911f88a601ec5bf1509ea0e8173ed.zip |
Make sure that empty string is filtered out when we get the secondary jars from conf
Author: DB Tsai <dbtsai@dbtsai.com>
Closes #1027 from dbtsai/dbtsai-classloader and squashes the following commits:
9ac6be3 [DB Tsai] Fixed line too long
c9c7ad7 [DB Tsai] Make sure that empty string is filtered out when we get the secondary jars from conf.
-rw-r--r-- | yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala | 6 |
1 files changed, 4 insertions, 2 deletions
diff --git a/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala b/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala index 4b5e0efdde..801e8b3815 100644 --- a/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala +++ b/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala @@ -221,7 +221,7 @@ trait ClientBase extends Logging { } } - var cachedSecondaryJarLinks = ListBuffer.empty[String] + val cachedSecondaryJarLinks = ListBuffer.empty[String] val fileLists = List( (args.addJars, LocalResourceType.FILE, true), (args.files, LocalResourceType.FILE, false), (args.archives, LocalResourceType.ARCHIVE, false) ) @@ -502,12 +502,14 @@ object ClientBase extends Logging { def addClasspathEntry(path: String) = YarnSparkHadoopUtil.addToEnvironment(env, Environment.CLASSPATH.name, path, File.pathSeparator) /** Add entry to the classpath. Interpreted as a path relative to the working directory. */ - def addPwdClasspathEntry(entry: String) = addClasspathEntry(Environment.PWD.$() + Path.SEPARATOR + entry) + def addPwdClasspathEntry(entry: String) = + addClasspathEntry(Environment.PWD.$() + Path.SEPARATOR + entry) extraClassPath.foreach(addClasspathEntry) val cachedSecondaryJarLinks = sparkConf.getOption(CONF_SPARK_YARN_SECONDARY_JARS).getOrElse("").split(",") + .filter(_.nonEmpty) // Normally the users app.jar is last in case conflicts with spark jars if (sparkConf.get("spark.yarn.user.classpath.first", "false").toBoolean) { addPwdClasspathEntry(APP_JAR) |