aboutsummaryrefslogtreecommitdiff
path: root/repl
diff options
context:
space:
mode:
Diffstat (limited to 'repl')
-rw-r--r--repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala14
-rw-r--r--repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala2
2 files changed, 4 insertions, 12 deletions
diff --git a/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala b/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala
index 43e504c290..523fd1222d 100644
--- a/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala
@@ -940,17 +940,9 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
if (prop != null) prop else "local"
}
}
- val jars = Option(System.getenv("ADD_JARS")).map(_.split(','))
- .getOrElse(new Array[String](0))
- .map(new java.io.File(_).getAbsolutePath)
- try {
- sparkContext = new SparkContext(master, "Spark shell", System.getenv("SPARK_HOME"), jars)
- } catch {
- case e: Exception =>
- e.printStackTrace()
- echo("Failed to create SparkContext, exiting...")
- sys.exit(1)
- }
+ val jars = SparkILoop.getAddedJars.map(new java.io.File(_).getAbsolutePath)
+ sparkContext = new SparkContext(master, "Spark shell", System.getenv("SPARK_HOME"), jars)
+ echo("Created spark context..")
sparkContext
}
diff --git a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
index 418c31e24b..c230a03298 100644
--- a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
+++ b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
@@ -78,7 +78,7 @@ class ReplSuite extends FunSuite {
System.clearProperty("spark.hostPort")
}
- test ("simple foreach with accumulator") {
+ test("simple foreach with accumulator") {
val output = runInterpreter("local", """
|val accum = sc.accumulator(0)
|sc.parallelize(1 to 10).foreach(x => accum += x)