aboutsummaryrefslogtreecommitdiff
path: root/repl
diff options
context:
space:
mode:
authorPrashant Sharma <prashant.s@imaginea.com>2013-07-12 11:55:16 +0530
committerPrashant Sharma <prashant.s@imaginea.com>2013-07-12 11:55:16 +0530
commit6e6d94ffdfa885432408d6996bad7df2a641748a (patch)
tree5a0ac3112f8f5cbaf29969cfd7358b3c04bb8305 /repl
parentca249eea50065a80cf01f62e3144a0c18cce5f18 (diff)
downloadspark-6e6d94ffdfa885432408d6996bad7df2a641748a.tar.gz
spark-6e6d94ffdfa885432408d6996bad7df2a641748a.tar.bz2
spark-6e6d94ffdfa885432408d6996bad7df2a641748a.zip
Added add jars functionality to new repl, which was dropped while merging with old.
Diffstat (limited to 'repl')
-rw-r--r--repl/src/main/scala/spark/repl/SparkILoop.scala11
-rw-r--r--repl/src/main/scala/spark/repl/SparkILoopInit.scala2
-rw-r--r--repl/src/test/scala/spark/repl/ReplSuiteMixin.scala2
3 files changed, 11 insertions, 4 deletions
diff --git a/repl/src/main/scala/spark/repl/SparkILoop.scala b/repl/src/main/scala/spark/repl/SparkILoop.scala
index 2b6e7b68bf..cc7a63e166 100644
--- a/repl/src/main/scala/spark/repl/SparkILoop.scala
+++ b/repl/src/main/scala/spark/repl/SparkILoop.scala
@@ -30,7 +30,7 @@ import scala.tools.util._
import scala.language.{implicitConversions, existentials}
import scala.reflect.{ClassTag, classTag}
import scala.tools.reflect.StdRuntimeTags._
-import scala.reflect.{ClassTag, classTag}
+
import java.lang.{Class => jClass}
import scala.reflect.api.{Mirror, TypeCreator, Universe => ApiUniverse}
@@ -641,6 +641,7 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
reset()
}
+
def reset() {
intp.reset()
// unleashAndSetPhase()
@@ -921,7 +922,8 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
if (prop != null) prop else "local"
}
}
- sparkContext = new SparkContext(master, "Spark shell")
+ val jars = SparkILoop.getAddedJars.map(new java.io.File(_).getAbsolutePath)
+ sparkContext = new SparkContext(master, "Spark shell", System.getenv("SPARK_HOME"), jars)
echo("Created spark context..")
sparkContext
}
@@ -948,6 +950,8 @@ object SparkILoop {
implicit def loopToInterpreter(repl: SparkILoop): SparkIMain = repl.intp
private def echo(msg: String) = Console println msg
+ def getAddedJars: Array[String] = Option(System.getenv("ADD_JARS")).map(_.split(',')).getOrElse(new Array[String](0))
+
// Designed primarily for use by test code: take a String with a
// bunch of code, and prints out a transcript of what it would look
// like if you'd just typed it into the repl.
@@ -975,9 +979,12 @@ object SparkILoop {
}
}
val repl = new SparkILoop(input, output)
+
if (settings.classpath.isDefault)
settings.classpath.value = sys.props("java.class.path")
+ getAddedJars.foreach(settings.classpath.append(_))
+
repl process settings
}
}
diff --git a/repl/src/main/scala/spark/repl/SparkILoopInit.scala b/repl/src/main/scala/spark/repl/SparkILoopInit.scala
index 8b7da3d3c6..b89495d2f3 100644
--- a/repl/src/main/scala/spark/repl/SparkILoopInit.scala
+++ b/repl/src/main/scala/spark/repl/SparkILoopInit.scala
@@ -119,7 +119,7 @@ trait SparkILoopInit {
command("""
@transient val sc = spark.repl.Main.interp.createSparkContext();
""")
- command("import spark.SparkContext._");
+ command("import spark.SparkContext._")
}
echo("Spark context available as sc.")
}
diff --git a/repl/src/test/scala/spark/repl/ReplSuiteMixin.scala b/repl/src/test/scala/spark/repl/ReplSuiteMixin.scala
index 3b5b6385ad..d88e44ad19 100644
--- a/repl/src/test/scala/spark/repl/ReplSuiteMixin.scala
+++ b/repl/src/test/scala/spark/repl/ReplSuiteMixin.scala
@@ -36,7 +36,7 @@ trait ReplSuiteMixin {
interp.closeInterpreter()
}
// To avoid Akka rebinding to the same port, since it doesn't unbind immediately on shutdown
- System.clearProperty("spark.driver.port")
+ System.clearProperty("spark.hostPort")
return out.toString
}