aboutsummaryrefslogtreecommitdiff
path: root/repl
diff options
context:
space:
mode:
authorRaymond Liu <raymond.liu@intel.com>2013-11-12 15:14:21 +0800
committerRaymond Liu <raymond.liu@intel.com>2013-11-13 16:55:11 +0800
commit0f2e3c6e31d56c627ff81cdc93289a7c7cb2ec16 (patch)
tree60f01110b170ff72347e1ae6209f898712578ed3 /repl
parent5429d62dfa16305eb23d67dfe38172803c80db65 (diff)
parent3d4ad84b63e440fd3f4b3edb1b120ff7c14a42d1 (diff)
downloadspark-0f2e3c6e31d56c627ff81cdc93289a7c7cb2ec16.tar.gz
spark-0f2e3c6e31d56c627ff81cdc93289a7c7cb2ec16.tar.bz2
spark-0f2e3c6e31d56c627ff81cdc93289a7c7cb2ec16.zip
Merge branch 'master' into scala-2.10
Diffstat (limited to 'repl')
-rw-r--r--repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala36
1 files changed, 29 insertions, 7 deletions
diff --git a/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala b/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala
index 988b624feb..43e504c290 100644
--- a/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala
@@ -675,6 +675,20 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
Result(true, shouldReplay)
}
+ def addAllClasspath(args: Seq[String]): Unit = {
+ var added = false
+ var totalClasspath = ""
+ for (arg <- args) {
+ val f = File(arg).normalize
+ if (f.exists) {
+ added = true
+ addedClasspath = ClassPath.join(addedClasspath, f.path)
+ totalClasspath = ClassPath.join(settings.classpath.value, addedClasspath)
+ }
+ }
+ if (added) replay()
+ }
+
def addClasspath(arg: String): Unit = {
val f = File(arg).normalize
if (f.exists) {
@@ -915,10 +929,10 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
}
def createSparkContext(): SparkContext = {
- val uri = System.getenv("SPARK_EXECUTOR_URI")
- if (uri != null) {
- System.setProperty("spark.executor.uri", uri)
- }
+ val uri = System.getenv("SPARK_EXECUTOR_URI")
+ if (uri != null) {
+ System.setProperty("spark.executor.uri", uri)
+ }
val master = this.master match {
case Some(m) => m
case None => {
@@ -926,9 +940,17 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
if (prop != null) prop else "local"
}
}
- val jars = SparkILoop.getAddedJars.map(new java.io.File(_).getAbsolutePath)
- sparkContext = new SparkContext(master, "Spark shell", System.getenv("SPARK_HOME"), jars)
- echo("Created spark context..")
+ val jars = Option(System.getenv("ADD_JARS")).map(_.split(','))
+ .getOrElse(new Array[String](0))
+ .map(new java.io.File(_).getAbsolutePath)
+ try {
+ sparkContext = new SparkContext(master, "Spark shell", System.getenv("SPARK_HOME"), jars)
+ } catch {
+ case e: Exception =>
+ e.printStackTrace()
+ echo("Failed to create SparkContext, exiting...")
+ sys.exit(1)
+ }
sparkContext
}