aboutsummaryrefslogtreecommitdiff
path: root/repl
diff options
context:
space:
mode:
authorBenjamin Hindman <benjamin.hindman@gmail.com>2013-07-23 09:33:13 -0400
committerBenjamin Hindman <benjamin.hindman@gmail.com>2013-07-29 23:32:52 -0700
commitf6f46455eb4f1199eb9a464b1a0b45d9926f7ff8 (patch)
tree78f97d0a0c00c149c06ecd734b4885bfc81e5d6d /repl
parent49be084ed3cca4778ff0602c0bee962a3515338e (diff)
downloadspark-f6f46455eb4f1199eb9a464b1a0b45d9926f7ff8.tar.gz
spark-f6f46455eb4f1199eb9a464b1a0b45d9926f7ff8.tar.bz2
spark-f6f46455eb4f1199eb9a464b1a0b45d9926f7ff8.zip
Added property 'spark.executor.uri' for launching on Mesos without
requiring Spark to be installed. Using 'make_distribution.sh' a user can put a Spark distribution at a URI supported by Mesos (e.g., 'hdfs://...') and then set that when launching their job. Also added SPARK_EXECUTOR_URI for the REPL.
Diffstat (limited to 'repl')
-rw-r--r--repl/src/main/scala/spark/repl/SparkILoop.scala2
1 files changed, 2 insertions, 0 deletions
diff --git a/repl/src/main/scala/spark/repl/SparkILoop.scala b/repl/src/main/scala/spark/repl/SparkILoop.scala
index 59f9d05683..0bfe7bb743 100644
--- a/repl/src/main/scala/spark/repl/SparkILoop.scala
+++ b/repl/src/main/scala/spark/repl/SparkILoop.scala
@@ -831,6 +831,8 @@ class SparkILoop(in0: Option[BufferedReader], val out: PrintWriter, val master:
var sparkContext: SparkContext = null
def createSparkContext(): SparkContext = {
+ val uri = System.getenv("SPARK_EXECUTOR_URI")
+ if (uri != null) System.setProperty("spark.executor.uri", uri)
val master = this.master match {
case Some(m) => m
case None => {