aboutsummaryrefslogtreecommitdiff
path: root/repl/src
diff options
context:
space:
mode:
authorAaron Davidson <aaron@databricks.com>2013-11-14 22:13:09 -0800
committerAaron Davidson <aaron@databricks.com>2013-11-14 22:13:09 -0800
commitf629ba95b6a1a3508463bfdcb03efcfaa3327cb5 (patch)
tree74a4cbcd839471d8c4248c9615f5803fad28787d /repl/src
parentd4cd32330e1e4ac83b38bc922a9d3fd85f85f606 (diff)
downloadspark-f629ba95b6a1a3508463bfdcb03efcfaa3327cb5.tar.gz
spark-f629ba95b6a1a3508463bfdcb03efcfaa3327cb5.tar.bz2
spark-f629ba95b6a1a3508463bfdcb03efcfaa3327cb5.zip
Various merge corrections
I've diff'd this patch against my own -- since they were both created independently, this means that two sets of eyes have gone over all the merge conflicts that were created, so I'm feeling significantly more confident in the resulting PR. @rxin has looked at the changes to the repl and is resoundingly confident that they are correct.
Diffstat (limited to 'repl/src')
-rw-r--r--repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala14
-rw-r--r--repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala2
2 files changed, 4 insertions, 12 deletions
diff --git a/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala b/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala
index 43e504c290..523fd1222d 100644
--- a/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala
@@ -940,17 +940,9 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
if (prop != null) prop else "local"
}
}
- val jars = Option(System.getenv("ADD_JARS")).map(_.split(','))
- .getOrElse(new Array[String](0))
- .map(new java.io.File(_).getAbsolutePath)
- try {
- sparkContext = new SparkContext(master, "Spark shell", System.getenv("SPARK_HOME"), jars)
- } catch {
- case e: Exception =>
- e.printStackTrace()
- echo("Failed to create SparkContext, exiting...")
- sys.exit(1)
- }
+ val jars = SparkILoop.getAddedJars.map(new java.io.File(_).getAbsolutePath)
+ sparkContext = new SparkContext(master, "Spark shell", System.getenv("SPARK_HOME"), jars)
+ echo("Created spark context..")
sparkContext
}
diff --git a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
index 418c31e24b..c230a03298 100644
--- a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
+++ b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
@@ -78,7 +78,7 @@ class ReplSuite extends FunSuite {
System.clearProperty("spark.hostPort")
}
- test ("simple foreach with accumulator") {
+ test("simple foreach with accumulator") {
val output = runInterpreter("local", """
|val accum = sc.accumulator(0)
|sc.parallelize(1 to 10).foreach(x => accum += x)