From 6e6d94ffdfa885432408d6996bad7df2a641748a Mon Sep 17 00:00:00 2001 From: Prashant Sharma Date: Fri, 12 Jul 2013 11:55:16 +0530 Subject: Added add jars functionality to new repl, which was dropped while merging with old. --- repl/src/main/scala/spark/repl/SparkILoop.scala | 11 +++++++++-- repl/src/main/scala/spark/repl/SparkILoopInit.scala | 2 +- repl/src/test/scala/spark/repl/ReplSuiteMixin.scala | 2 +- 3 files changed, 11 insertions(+), 4 deletions(-) (limited to 'repl') diff --git a/repl/src/main/scala/spark/repl/SparkILoop.scala b/repl/src/main/scala/spark/repl/SparkILoop.scala index 2b6e7b68bf..cc7a63e166 100644 --- a/repl/src/main/scala/spark/repl/SparkILoop.scala +++ b/repl/src/main/scala/spark/repl/SparkILoop.scala @@ -30,7 +30,7 @@ import scala.tools.util._ import scala.language.{implicitConversions, existentials} import scala.reflect.{ClassTag, classTag} import scala.tools.reflect.StdRuntimeTags._ -import scala.reflect.{ClassTag, classTag} + import java.lang.{Class => jClass} import scala.reflect.api.{Mirror, TypeCreator, Universe => ApiUniverse} @@ -641,6 +641,7 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter, reset() } + def reset() { intp.reset() // unleashAndSetPhase() @@ -921,7 +922,8 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter, if (prop != null) prop else "local" } } - sparkContext = new SparkContext(master, "Spark shell") + val jars = SparkILoop.getAddedJars.map(new java.io.File(_).getAbsolutePath) + sparkContext = new SparkContext(master, "Spark shell", System.getenv("SPARK_HOME"), jars) echo("Created spark context..") sparkContext } @@ -948,6 +950,8 @@ object SparkILoop { implicit def loopToInterpreter(repl: SparkILoop): SparkIMain = repl.intp private def echo(msg: String) = Console println msg + def getAddedJars: Array[String] = Option(System.getenv("ADD_JARS")).map(_.split(',')).getOrElse(new Array[String](0)) + // Designed primarily for use by test code: take a String with a // bunch of code, and prints out a transcript of what it would look // like if you'd just typed it into the repl. @@ -975,9 +979,12 @@ object SparkILoop { } } val repl = new SparkILoop(input, output) + if (settings.classpath.isDefault) settings.classpath.value = sys.props("java.class.path") + getAddedJars.foreach(settings.classpath.append(_)) + repl process settings } } diff --git a/repl/src/main/scala/spark/repl/SparkILoopInit.scala b/repl/src/main/scala/spark/repl/SparkILoopInit.scala index 8b7da3d3c6..b89495d2f3 100644 --- a/repl/src/main/scala/spark/repl/SparkILoopInit.scala +++ b/repl/src/main/scala/spark/repl/SparkILoopInit.scala @@ -119,7 +119,7 @@ trait SparkILoopInit { command(""" @transient val sc = spark.repl.Main.interp.createSparkContext(); """) - command("import spark.SparkContext._"); + command("import spark.SparkContext._") } echo("Spark context available as sc.") } diff --git a/repl/src/test/scala/spark/repl/ReplSuiteMixin.scala b/repl/src/test/scala/spark/repl/ReplSuiteMixin.scala index 3b5b6385ad..d88e44ad19 100644 --- a/repl/src/test/scala/spark/repl/ReplSuiteMixin.scala +++ b/repl/src/test/scala/spark/repl/ReplSuiteMixin.scala @@ -36,7 +36,7 @@ trait ReplSuiteMixin { interp.closeInterpreter() } // To avoid Akka rebinding to the same port, since it doesn't unbind immediately on shutdown - System.clearProperty("spark.driver.port") + System.clearProperty("spark.hostPort") return out.toString } -- cgit v1.2.3