From 74737264c4a9b2a9a99bf3aa77928f6960bad78c Mon Sep 17 00:00:00 2001 From: Aaron Davidson Date: Thu, 17 Oct 2013 18:51:19 -0700 Subject: Spark shell exits if it cannot create SparkContext Mainly, this occurs if you provide a messed up MASTER url (one that doesn't match one of our regexes). Previously, we would default to Mesos, fail, and then start the shell anyway, except that any Spark command would fail. --- repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) (limited to 'repl') diff --git a/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala b/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala index 36f54a22cf..48a8fa9328 100644 --- a/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala +++ b/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala @@ -845,7 +845,14 @@ class SparkILoop(in0: Option[BufferedReader], val out: PrintWriter, val master: val jars = Option(System.getenv("ADD_JARS")).map(_.split(',')) .getOrElse(new Array[String](0)) .map(new java.io.File(_).getAbsolutePath) - sparkContext = new SparkContext(master, "Spark shell", System.getenv("SPARK_HOME"), jars) + try { + sparkContext = new SparkContext(master, "Spark shell", System.getenv("SPARK_HOME"), jars) + } catch { + case e: Exception => + e.printStackTrace() + echo("Failed to create SparkContext, exiting...") + sys.exit(1) + } sparkContext } -- cgit v1.2.3