aboutsummaryrefslogtreecommitdiff
path: root/repl
diff options
context:
space:
mode:
authorAaron Davidson <aaron@databricks.com>2013-10-17 18:51:19 -0700
committerAaron Davidson <aaron@databricks.com>2013-10-17 18:51:19 -0700
commit74737264c4a9b2a9a99bf3aa77928f6960bad78c (patch)
treeb1806dc4a32f16d1a4201eeb52b0bd1da1322508 /repl
parentfc26e5b8320556b9edb93741391b759813b4079b (diff)
downloadspark-74737264c4a9b2a9a99bf3aa77928f6960bad78c.tar.gz
spark-74737264c4a9b2a9a99bf3aa77928f6960bad78c.tar.bz2
spark-74737264c4a9b2a9a99bf3aa77928f6960bad78c.zip
Spark shell exits if it cannot create SparkContext
Mainly, this occurs if you provide a messed up MASTER url (one that doesn't match one of our regexes). Previously, we would default to Mesos, fail, and then start the shell anyway, except that any Spark command would fail.
Diffstat (limited to 'repl')
-rw-r--r--repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala9
1 files changed, 8 insertions, 1 deletions
diff --git a/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala b/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala
index 36f54a22cf..48a8fa9328 100644
--- a/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala
@@ -845,7 +845,14 @@ class SparkILoop(in0: Option[BufferedReader], val out: PrintWriter, val master:
val jars = Option(System.getenv("ADD_JARS")).map(_.split(','))
.getOrElse(new Array[String](0))
.map(new java.io.File(_).getAbsolutePath)
- sparkContext = new SparkContext(master, "Spark shell", System.getenv("SPARK_HOME"), jars)
+ try {
+ sparkContext = new SparkContext(master, "Spark shell", System.getenv("SPARK_HOME"), jars)
+ } catch {
+ case e: Exception =>
+ e.printStackTrace()
+ echo("Failed to create SparkContext, exiting...")
+ sys.exit(1)
+ }
sparkContext
}