aboutsummaryrefslogtreecommitdiff
path: root/repl
diff options
context:
space:
mode:
authorAndrew Or <andrewor14@gmail.com>2014-05-22 20:25:41 -0700
committerTathagata Das <tathagata.das1565@gmail.com>2014-05-22 20:25:41 -0700
commit8edbee7d1b4afc192d97ba192a5526affc464205 (patch)
tree51698533bd7aadd613e5fa2274e7134fa90a25a3 /repl
parentf9f5fd5f4e81828a3e0c391892e0f28751568843 (diff)
downloadspark-8edbee7d1b4afc192d97ba192a5526affc464205.tar.gz
spark-8edbee7d1b4afc192d97ba192a5526affc464205.tar.bz2
spark-8edbee7d1b4afc192d97ba192a5526affc464205.zip
[SPARK-1897] Respect spark.jars (and --jars) in spark-shell
Spark shell currently overwrites `spark.jars` with `ADD_JARS`. In all modes except yarn-cluster, this means the `--jar` flag passed to `bin/spark-shell` is also discarded. However, in the [docs](http://people.apache.org/~pwendell/spark-1.0.0-rc7-docs/scala-programming-guide.html#initializing-spark), we explicitly tell the users to add the jars this way. Author: Andrew Or <andrewor14@gmail.com> Closes #849 from andrewor14/shell-jars and squashes the following commits: 928a7e6 [Andrew Or] ',' -> "," (minor) afc357c [Andrew Or] Handle spark.jars == "" in SparkILoop, not SparkSubmit c6da113 [Andrew Or] Do not set spark.jars to "" d8549f7 [Andrew Or] Respect spark.jars and --jars in spark-shell
Diffstat (limited to 'repl')
-rw-r--r--repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala8
1 files changed, 7 insertions, 1 deletions
diff --git a/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala b/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala
index 296da74068..55684e94bd 100644
--- a/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala
@@ -993,7 +993,13 @@ object SparkILoop {
implicit def loopToInterpreter(repl: SparkILoop): SparkIMain = repl.intp
private def echo(msg: String) = Console println msg
- def getAddedJars: Array[String] = Option(System.getenv("ADD_JARS")).map(_.split(',')).getOrElse(new Array[String](0))
+ def getAddedJars: Array[String] = {
+ val envJars = sys.env.get("ADD_JARS")
+ val propJars = sys.props.get("spark.jars").flatMap { p =>
+ if (p == "") None else Some(p)
+ }
+ propJars.orElse(envJars).map(_.split(",")).getOrElse(Array.empty)
+ }
// Designed primarily for use by test code: take a String with a
// bunch of code, and prints out a transcript of what it would look