diff options
author | Shivaram Venkataraman <shivaram@cs.berkeley.edu> | 2015-06-01 21:01:14 -0700 |
---|---|---|
committer | Shivaram Venkataraman <shivaram@cs.berkeley.edu> | 2015-06-01 21:01:14 -0700 |
commit | 6b44278ef7cd2a278dfa67e8393ef30775c72726 (patch) | |
tree | cf91cf9002bc56cc75db616beec7aa4644de81a1 | |
parent | 15d7c90aeb0d51851f7ebb4c75c9b249ad88dfeb (diff) | |
download | spark-6b44278ef7cd2a278dfa67e8393ef30775c72726.tar.gz spark-6b44278ef7cd2a278dfa67e8393ef30775c72726.tar.bz2 spark-6b44278ef7cd2a278dfa67e8393ef30775c72726.zip |
[SPARK-8028] [SPARKR] Use addJar instead of setJars in SparkR
This prevents the spark.jars from being cleared while using `--packages` or `--jars`
cc pwendell davies brkyvz
Author: Shivaram Venkataraman <shivaram@cs.berkeley.edu>
Closes #6568 from shivaram/SPARK-8028 and squashes the following commits:
3a9cf1f [Shivaram Venkataraman] Use addJar instead of setJars in SparkR This prevents the spark.jars from being cleared
-rw-r--r-- | core/src/main/scala/org/apache/spark/api/r/RRDD.scala | 7 |
1 files changed, 5 insertions, 2 deletions
diff --git a/core/src/main/scala/org/apache/spark/api/r/RRDD.scala b/core/src/main/scala/org/apache/spark/api/r/RRDD.scala index e020458888..4dfa732593 100644 --- a/core/src/main/scala/org/apache/spark/api/r/RRDD.scala +++ b/core/src/main/scala/org/apache/spark/api/r/RRDD.scala @@ -355,7 +355,6 @@ private[r] object RRDD { val sparkConf = new SparkConf().setAppName(appName) .setSparkHome(sparkHome) - .setJars(jars) // Override `master` if we have a user-specified value if (master != "") { @@ -373,7 +372,11 @@ private[r] object RRDD { sparkConf.setExecutorEnv(name.asInstanceOf[String], value.asInstanceOf[String]) } - new JavaSparkContext(sparkConf) + val jsc = new JavaSparkContext(sparkConf) + jars.foreach { jar => + jsc.addJar(jar) + } + jsc } /** |