aboutsummaryrefslogtreecommitdiff
path: root/core/src
diff options
context:
space:
mode:
authorJeff Zhang <zjffdu@apache.org>2016-11-02 11:47:45 -0700
committerMarcelo Vanzin <vanzin@cloudera.com>2016-11-02 11:47:45 -0700
commit3c24299b71e23e159edbb972347b13430f92a465 (patch)
treeaf6c8aa0fe13aaf1b35fa424667f04ca1e217cf0 /core/src
parent02f203107b8eda1f1576e36c4f12b0e3bc5e910e (diff)
downloadspark-3c24299b71e23e159edbb972347b13430f92a465.tar.gz
spark-3c24299b71e23e159edbb972347b13430f92a465.tar.bz2
spark-3c24299b71e23e159edbb972347b13430f92a465.zip
[SPARK-18160][CORE][YARN] spark.files & spark.jars should not be passed to driver in yarn mode
## What changes were proposed in this pull request? spark.files is still passed to driver in yarn mode, so SparkContext will still handle it which cause the error in the jira desc. ## How was this patch tested? Tested manually in a 5 node cluster. As this issue only happens in multiple node cluster, so I didn't write test for it. Author: Jeff Zhang <zjffdu@apache.org> Closes #15669 from zjffdu/SPARK-18160.
Diffstat (limited to 'core/src')
-rw-r--r--core/src/main/scala/org/apache/spark/SparkContext.scala29
1 files changed, 6 insertions, 23 deletions
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala
index 4694790c72..63478c88b0 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -1716,29 +1716,12 @@ class SparkContext(config: SparkConf) extends Logging {
key = uri.getScheme match {
// A JAR file which exists only on the driver node
case null | "file" =>
- if (master == "yarn" && deployMode == "cluster") {
- // In order for this to work in yarn cluster mode the user must specify the
- // --addJars option to the client to upload the file into the distributed cache
- // of the AM to make it show up in the current working directory.
- val fileName = new Path(uri.getPath).getName()
- try {
- env.rpcEnv.fileServer.addJar(new File(fileName))
- } catch {
- case e: Exception =>
- // For now just log an error but allow to go through so spark examples work.
- // The spark examples don't really need the jar distributed since its also
- // the app jar.
- logError("Error adding jar (" + e + "), was the --addJars option used?")
- null
- }
- } else {
- try {
- env.rpcEnv.fileServer.addJar(new File(uri.getPath))
- } catch {
- case exc: FileNotFoundException =>
- logError(s"Jar not found at $path")
- null
- }
+ try {
+ env.rpcEnv.fileServer.addJar(new File(uri.getPath))
+ } catch {
+ case exc: FileNotFoundException =>
+ logError(s"Jar not found at $path")
+ null
}
// A JAR file which exists locally on every worker node
case "local" =>