aboutsummaryrefslogtreecommitdiff
path: root/repl
diff options
context:
space:
mode:
authorNezih Yigitbasi <nyigitbasi@netflix.com>2016-06-16 18:19:29 -0700
committerMarcelo Vanzin <vanzin@cloudera.com>2016-06-16 18:20:16 -0700
commit63470afc997fb9d6b6f8a911c25964743556c9cc (patch)
tree02588d1187e7e3ff855edf3f55f0d3a5aa10d39f /repl
parentf1bf0d2f3a61d81686f36763e83d3be89c98435f (diff)
downloadspark-63470afc997fb9d6b6f8a911c25964743556c9cc.tar.gz
spark-63470afc997fb9d6b6f8a911c25964743556c9cc.tar.bz2
spark-63470afc997fb9d6b6f8a911c25964743556c9cc.zip
[SPARK-15782][YARN] Fix spark.jars and spark.yarn.dist.jars handling
When `--packages` is specified with spark-shell the classes from those packages cannot be found, which I think is due to some of the changes in SPARK-12343. Tested manually with both scala 2.10 and 2.11 repls. vanzin davies can you guys please review? Author: Marcelo Vanzin <vanzin@cloudera.com> Author: Nezih Yigitbasi <nyigitbasi@netflix.com> Closes #13709 from nezihyigitbasi/SPARK-15782.
Diffstat (limited to 'repl')
-rw-r--r--repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala32
-rw-r--r--repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala4
2 files changed, 21 insertions, 15 deletions
diff --git a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala
index b1e95d8fdb..8fcab386ec 100644
--- a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala
+++ b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala
@@ -201,10 +201,10 @@ class SparkILoop(
if (Utils.isWindows) {
// Strip any URI scheme prefix so we can add the correct path to the classpath
// e.g. file:/C:/my/path.jar -> C:/my/path.jar
- SparkILoop.getAddedJars.map { jar => new URI(jar).getPath.stripPrefix("/") }
+ getAddedJars().map { jar => new URI(jar).getPath.stripPrefix("/") }
} else {
// We need new URI(jar).getPath here for the case that `jar` includes encoded white space (%20).
- SparkILoop.getAddedJars.map { jar => new URI(jar).getPath }
+ getAddedJars().map { jar => new URI(jar).getPath }
}
// work around for Scala bug
val totalClassPath = addedJars.foldLeft(
@@ -1005,7 +1005,7 @@ class SparkILoop(
@DeveloperApi
def createSparkSession(): SparkSession = {
val execUri = System.getenv("SPARK_EXECUTOR_URI")
- val jars = SparkILoop.getAddedJars
+ val jars = getAddedJars()
val conf = new SparkConf()
.setMaster(getMaster())
.setJars(jars)
@@ -1060,22 +1060,30 @@ class SparkILoop(
@deprecated("Use `process` instead", "2.9.0")
private def main(settings: Settings): Unit = process(settings)
-}
-object SparkILoop extends Logging {
- implicit def loopToInterpreter(repl: SparkILoop): SparkIMain = repl.intp
- private def echo(msg: String) = Console println msg
-
- def getAddedJars: Array[String] = {
+ private[repl] def getAddedJars(): Array[String] = {
+ val conf = new SparkConf().setMaster(getMaster())
val envJars = sys.env.get("ADD_JARS")
if (envJars.isDefined) {
logWarning("ADD_JARS environment variable is deprecated, use --jar spark submit argument instead")
}
- val propJars = sys.props.get("spark.jars").flatMap { p => if (p == "") None else Some(p) }
- val jars = propJars.orElse(envJars).getOrElse("")
+ val jars = {
+ val userJars = Utils.getUserJars(conf)
+ if (userJars.isEmpty) {
+ envJars.getOrElse("")
+ } else {
+ userJars.mkString(",")
+ }
+ }
Utils.resolveURIs(jars).split(",").filter(_.nonEmpty)
}
+}
+
+object SparkILoop extends Logging {
+ implicit def loopToInterpreter(repl: SparkILoop): SparkIMain = repl.intp
+ private def echo(msg: String) = Console println msg
+
// Designed primarily for use by test code: take a String with a
// bunch of code, and prints out a transcript of what it would look
// like if you'd just typed it into the repl.
@@ -1109,7 +1117,7 @@ object SparkILoop extends Logging {
if (settings.classpath.isDefault)
settings.classpath.value = sys.props("java.class.path")
- getAddedJars.map(jar => new URI(jar).getPath).foreach(settings.classpath.append(_))
+ repl.getAddedJars().map(jar => new URI(jar).getPath).foreach(settings.classpath.append(_))
repl process settings
}
diff --git a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala
index 771670fa55..28fe84d6fe 100644
--- a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala
+++ b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala
@@ -54,9 +54,7 @@ object Main extends Logging {
// Visible for testing
private[repl] def doMain(args: Array[String], _interp: SparkILoop): Unit = {
interp = _interp
- val jars = conf.getOption("spark.jars")
- .map(_.replace(",", File.pathSeparator))
- .getOrElse("")
+ val jars = Utils.getUserJars(conf).mkString(File.pathSeparator)
val interpArguments = List(
"-Yrepl-class-based",
"-Yrepl-outdir", s"${outputDir.getAbsolutePath}",