aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--core/src/main/scala/org/apache/spark/SparkContext.scala2
-rw-r--r--core/src/test/scala/org/apache/spark/SparkContextSuite.scala51
2 files changed, 38 insertions, 15 deletions
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala
index 8121aab3b0..4457f40286 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -1093,7 +1093,7 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
def addFile(path: String, recursive: Boolean): Unit = {
val uri = new URI(path)
val schemeCorrectedPath = uri.getScheme match {
- case null | "local" => "file:" + uri.getPath
+ case null | "local" => new File(path).getCanonicalFile.toURI.toString
case _ => path
}
diff --git a/core/src/test/scala/org/apache/spark/SparkContextSuite.scala b/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
index 50f347f195..b8e3e83b5a 100644
--- a/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
@@ -79,26 +79,49 @@ class SparkContextSuite extends FunSuite with LocalSparkContext {
val byteArray2 = converter.convert(bytesWritable)
assert(byteArray2.length === 0)
}
-
+
test("addFile works") {
- val file = File.createTempFile("someprefix", "somesuffix")
- val absolutePath = file.getAbsolutePath
+ val file1 = File.createTempFile("someprefix1", "somesuffix1")
+ val absolutePath1 = file1.getAbsolutePath
+
+ val pluto = Utils.createTempDir()
+ val file2 = File.createTempFile("someprefix2", "somesuffix2", pluto)
+ val relativePath = file2.getParent + "/../" + file2.getParentFile.getName + "/" + file2.getName
+ val absolutePath2 = file2.getAbsolutePath
+
try {
- Files.write("somewords", file, UTF_8)
- val length = file.length()
+ Files.write("somewords1", file1, UTF_8)
+ Files.write("somewords2", file2, UTF_8)
+ val length1 = file1.length()
+ val length2 = file2.length()
+
sc = new SparkContext(new SparkConf().setAppName("test").setMaster("local"))
- sc.addFile(file.getAbsolutePath)
+ sc.addFile(file1.getAbsolutePath)
+ sc.addFile(relativePath)
sc.parallelize(Array(1), 1).map(x => {
- val gotten = new File(SparkFiles.get(file.getName))
- if (!gotten.exists()) {
- throw new SparkException("file doesn't exist")
+ val gotten1 = new File(SparkFiles.get(file1.getName))
+ val gotten2 = new File(SparkFiles.get(file2.getName))
+ if (!gotten1.exists()) {
+ throw new SparkException("file doesn't exist : " + absolutePath1)
+ }
+ if (!gotten2.exists()) {
+ throw new SparkException("file doesn't exist : " + absolutePath2)
}
- if (length != gotten.length()) {
+
+ if (length1 != gotten1.length()) {
+ throw new SparkException(
+ s"file has different length $length1 than added file ${gotten1.length()} : " + absolutePath1)
+ }
+ if (length2 != gotten2.length()) {
throw new SparkException(
- s"file has different length $length than added file ${gotten.length()}")
+ s"file has different length $length2 than added file ${gotten2.length()} : " + absolutePath2)
}
- if (absolutePath == gotten.getAbsolutePath) {
- throw new SparkException("file should have been copied")
+
+ if (absolutePath1 == gotten1.getAbsolutePath) {
+ throw new SparkException("file should have been copied :" + absolutePath1)
+ }
+ if (absolutePath2 == gotten2.getAbsolutePath) {
+ throw new SparkException("file should have been copied : " + absolutePath2)
}
x
}).count()
@@ -106,7 +129,7 @@ class SparkContextSuite extends FunSuite with LocalSparkContext {
sc.stop()
}
}
-
+
test("addFile recursive works") {
val pluto = Utils.createTempDir()
val neptune = Utils.createTempDir(pluto.getAbsolutePath)