diff options
author | Andrew Or <andrewor14@gmail.com> | 2014-09-02 10:47:05 -0700 |
---|---|---|
committer | Andrew Or <andrewor14@gmail.com> | 2014-09-09 15:28:04 -0700 |
commit | 359cd59d1517cbe32a6d6a27a1bf604b53eea08b (patch) | |
tree | 7e09c0bf4d2c957ed6877b11a72bfdd493a2ee53 /repl | |
parent | 23fd3e8b95845b956b3c90df660bc3cf0ed42d28 (diff) | |
download | spark-359cd59d1517cbe32a6d6a27a1bf604b53eea08b.tar.gz spark-359cd59d1517cbe32a6d6a27a1bf604b53eea08b.tar.bz2 spark-359cd59d1517cbe32a6d6a27a1bf604b53eea08b.zip |
[SPARK-1919] Fix Windows spark-shell --jars
We were trying to add `file:/C:/path/to/my.jar` to the class path. We should add `C:/path/to/my.jar` instead. Tested on Windows 8.1.
Author: Andrew Or <andrewor14@gmail.com>
Closes #2211 from andrewor14/windows-shell-jars and squashes the following commits:
262c6a2 [Andrew Or] Oops... Add the new code to the correct place
0d5a0c1 [Andrew Or] Format jar path only for adding to shell classpath
42bd626 [Andrew Or] Remove unnecessary code
0049f1b [Andrew Or] Remove embarrassing log messages
b1755a0 [Andrew Or] Format jar paths properly before adding them to the classpath
Diffstat (limited to 'repl')
-rw-r--r-- | repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala | 11 |
1 files changed, 10 insertions, 1 deletions
diff --git a/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala b/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala index 65788f4646..30f73645dc 100644 --- a/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala +++ b/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala @@ -14,6 +14,7 @@ import scala.tools.nsc.interpreter._ import scala.tools.nsc.interpreter.{ Results => IR } import Predef.{ println => _, _ } import java.io.{ BufferedReader, FileReader } +import java.net.URI import java.util.concurrent.locks.ReentrantLock import scala.sys.process.Process import scala.tools.nsc.interpreter.session._ @@ -186,8 +187,16 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter, require(settings != null) if (addedClasspath != "") settings.classpath.append(addedClasspath) + val addedJars = + if (Utils.isWindows) { + // Strip any URI scheme prefix so we can add the correct path to the classpath + // e.g. file:/C:/my/path.jar -> C:/my/path.jar + SparkILoop.getAddedJars.map { jar => new URI(jar).getPath.stripPrefix("/") } + } else { + SparkILoop.getAddedJars + } // work around for Scala bug - val totalClassPath = SparkILoop.getAddedJars.foldLeft( + val totalClassPath = addedJars.foldLeft( settings.classpath.value)((l, r) => ClassPath.join(l, r)) this.settings.classpath.value = totalClassPath |