aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorReynold Xin <rxin@apache.org>2014-05-26 22:05:23 -0700
committerReynold Xin <rxin@apache.org>2014-05-26 22:05:23 -0700
commit90e281b55aecbfbe4431ac582311d5790fe7aad3 (patch)
treed7a8f3f3848ace47474a6b000fc76ed866121dc1 /core
parent9ed37190f45fd9e6aa0f2c73b66d317732a53eb8 (diff)
downloadspark-90e281b55aecbfbe4431ac582311d5790fe7aad3.tar.gz
spark-90e281b55aecbfbe4431ac582311d5790fe7aad3.tar.bz2
spark-90e281b55aecbfbe4431ac582311d5790fe7aad3.zip
SPARK-1933: Throw a more meaningful exception when a directory is passed to addJar/addFile.
https://issues.apache.org/jira/browse/SPARK-1933 Author: Reynold Xin <rxin@apache.org> Closes #888 from rxin/addfile and squashes the following commits: 8c402a3 [Reynold Xin] Updated comment. ff6c162 [Reynold Xin] SPARK-1933: Throw a more meaningful exception when a directory is passed to addJar/addFile.
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/org/apache/spark/HttpFileServer.scala7
-rw-r--r--core/src/main/scala/org/apache/spark/SparkContext.scala5
2 files changed, 9 insertions, 3 deletions
diff --git a/core/src/main/scala/org/apache/spark/HttpFileServer.scala b/core/src/main/scala/org/apache/spark/HttpFileServer.scala
index a6e300d345..0e3750fdde 100644
--- a/core/src/main/scala/org/apache/spark/HttpFileServer.scala
+++ b/core/src/main/scala/org/apache/spark/HttpFileServer.scala
@@ -59,6 +59,13 @@ private[spark] class HttpFileServer(securityManager: SecurityManager) extends Lo
}
def addFileToDir(file: File, dir: File) : String = {
+ // Check whether the file is a directory. If it is, throw a more meaningful exception.
+ // If we don't catch this, Guava throws a very confusing error message:
+ // java.io.FileNotFoundException: [file] (No such file or directory)
+ // even though the directory ([file]) exists.
+ if (file.isDirectory) {
+ throw new IllegalArgumentException(s"$file cannot be a directory.")
+ }
Files.copy(file, new File(dir, file.getName))
dir + "/" + file.getName
}
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala
index 49737fa4be..03ceff8bf1 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -794,7 +794,7 @@ class SparkContext(config: SparkConf) extends Logging {
addedFiles(key) = System.currentTimeMillis
// Fetch the file locally in case a job is executed using DAGScheduler.runLocally().
- Utils.fetchFile(path, new File(SparkFiles.getRootDirectory), conf, env.securityManager)
+ Utils.fetchFile(path, new File(SparkFiles.getRootDirectory()), conf, env.securityManager)
logInfo("Added file " + path + " at " + key + " with timestamp " + addedFiles(key))
postEnvironmentUpdate()
@@ -932,13 +932,12 @@ class SparkContext(config: SparkConf) extends Logging {
try {
env.httpFileServer.addJar(new File(fileName))
} catch {
- case e: Exception => {
+ case e: Exception =>
// For now just log an error but allow to go through so spark examples work.
// The spark examples don't really need the jar distributed since its also
// the app jar.
logError("Error adding jar (" + e + "), was the --addJars option used?")
null
- }
}
} else {
env.httpFileServer.addJar(new File(uri.getPath))