diff options
author | Matei Zaharia <matei@eecs.berkeley.edu> | 2010-10-16 16:19:47 -0700 |
---|---|---|
committer | Matei Zaharia <matei@eecs.berkeley.edu> | 2010-10-16 16:19:47 -0700 |
commit | 166d9f91258d1e803ac44c9faf429a3920728ec6 (patch) | |
tree | 6f7cde5ff1d74ffcf8c56006497044da74ac94b5 /src | |
parent | 1c082ad5fbfbcb72044a96b7c0b71329ae8e682a (diff) | |
download | spark-166d9f91258d1e803ac44c9faf429a3920728ec6.tar.gz spark-166d9f91258d1e803ac44c9faf429a3920728ec6.tar.bz2 spark-166d9f91258d1e803ac44c9faf429a3920728ec6.zip |
Removed setSparkHome method on SparkContext in favor of having an
optional constructor parameter, so that the scheduler is guaranteed that
a Spark home has been set when it first builds its executor arg.
Diffstat (limited to 'src')
-rw-r--r-- | src/scala/spark/MesosScheduler.scala | 6 | ||||
-rw-r--r-- | src/scala/spark/SparkContext.scala | 17 |
2 files changed, 7 insertions, 16 deletions
diff --git a/src/scala/spark/MesosScheduler.scala b/src/scala/spark/MesosScheduler.scala index 0f7adb4826..c45eff64d4 100644 --- a/src/scala/spark/MesosScheduler.scala +++ b/src/scala/spark/MesosScheduler.scala @@ -87,9 +87,9 @@ extends MScheduler with spark.Scheduler with Logging val sparkHome = sc.getSparkHome match { case Some(path) => path case None => - throw new SparkException("Spark home is not set; either set the " + - "spark.home system property or the SPARK_HOME environment variable " + - "or call SparkContext.setSparkHome") + throw new SparkException("Spark home is not set; set it through the " + + "spark.home system property, the SPARK_HOME environment variable " + + "or the SparkContext constructor") } val execScript = new File(sparkHome, "spark-executor").getCanonicalPath val params = new JHashMap[String, String] diff --git a/src/scala/spark/SparkContext.scala b/src/scala/spark/SparkContext.scala index b9870cc3b9..69c3332bb0 100644 --- a/src/scala/spark/SparkContext.scala +++ b/src/scala/spark/SparkContext.scala @@ -8,11 +8,9 @@ import scala.collection.mutable.ArrayBuffer class SparkContext( master: String, frameworkName: String, + val sparkHome: String = null, val jars: Seq[String] = Nil) extends Logging { - // Spark home directory, used to resolve executor when running on Mesos - private var sparkHome: Option[String] = None - private[spark] var scheduler: Scheduler = { // Regular expression used for local[N] master format val LOCAL_N_REGEX = """local\[([0-9]+)\]""".r @@ -63,19 +61,12 @@ extends Logging { scheduler.waitForRegister() } - // Set the Spark home location - def setSparkHome(path: String) { - if (path == null) - throw new NullPointerException("Path passed to setSparkHome was null") - sparkHome = Some(path) - } - - // Get Spark's home location from either a value set through setSparkHome, + // Get Spark's home location from either a value set through the constructor, // or the spark.home Java property, or the SPARK_HOME environment variable // (in that order of preference). If neither of these is set, return None. def getSparkHome(): Option[String] = { - if (sparkHome != None) - sparkHome + if (sparkHome != null) + Some(sparkHome) else if (System.getProperty("spark.home") != null) Some(System.getProperty("spark.home")) else if (System.getenv("SPARK_HOME") != null) |