aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorPatrick Wendell <pwendell@gmail.com>2014-01-06 10:29:04 -0800
committerPatrick Wendell <pwendell@gmail.com>2014-01-06 10:29:04 -0800
commit357083c29f59bc7df593911ac2c5735e4ee9de74 (patch)
tree38e9297b56db168c985071f1d915d331570980a4 /core
parenta2e7e0497484554f86bd71e93705eb0422b1512b (diff)
parent25446dd931cce5916de5dddf4689b41ee6fd3148 (diff)
downloadspark-357083c29f59bc7df593911ac2c5735e4ee9de74.tar.gz
spark-357083c29f59bc7df593911ac2c5735e4ee9de74.tar.bz2
spark-357083c29f59bc7df593911ac2c5735e4ee9de74.zip
Merge pull request #330 from tgravescs/fix_addjars_null_handling
Fix handling of empty SPARK_EXAMPLES_JAR Currently if SPARK_EXAMPLES_JAR is left unset you get a null pointer exception when running the examples (atleast on spark on yarn). The null now gets turned into a string of "null" when its put into the SparkConf so addJar no longer properly ignores it. This fixes that so that it can be left unset.
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/org/apache/spark/SparkConf.scala5
1 files changed, 3 insertions, 2 deletions
diff --git a/core/src/main/scala/org/apache/spark/SparkConf.scala b/core/src/main/scala/org/apache/spark/SparkConf.scala
index 98343e9532..55f27033b5 100644
--- a/core/src/main/scala/org/apache/spark/SparkConf.scala
+++ b/core/src/main/scala/org/apache/spark/SparkConf.scala
@@ -24,7 +24,7 @@ import com.typesafe.config.ConfigFactory
*
* @param loadDefaults whether to load values from the system properties and classpath
*/
-class SparkConf(loadDefaults: Boolean) extends Serializable with Cloneable {
+class SparkConf(loadDefaults: Boolean) extends Serializable with Cloneable with Logging {
/** Create a SparkConf that loads defaults from system properties and the classpath */
def this() = this(true)
@@ -67,7 +67,8 @@ class SparkConf(loadDefaults: Boolean) extends Serializable with Cloneable {
/** Set JAR files to distribute to the cluster. */
def setJars(jars: Seq[String]): SparkConf = {
- set("spark.jars", jars.mkString(","))
+ for (jar <- jars if (jar == null)) logWarning("null jar passed to SparkContext constructor")
+ set("spark.jars", jars.filter(_ != null).mkString(","))
}
/** Set JAR files to distribute to the cluster. (Java-friendly version.) */