aboutsummaryrefslogtreecommitdiff
path: root/core/src/test
diff options
context:
space:
mode:
authorShixiong Zhu <shixiong@databricks.com>2015-12-17 09:55:37 -0800
committerShixiong Zhu <shixiong@databricks.com>2015-12-17 09:55:37 -0800
commit86e405f357711ae93935853a912bc13985c259db (patch)
tree8892bb8e1986479e51daf5bfe634aa7d6285d679 /core/src/test
parent6e0771665b3c9330fc0a5b2c7740a796b4cd712e (diff)
downloadspark-86e405f357711ae93935853a912bc13985c259db.tar.gz
spark-86e405f357711ae93935853a912bc13985c259db.tar.bz2
spark-86e405f357711ae93935853a912bc13985c259db.zip
[SPARK-12220][CORE] Make Utils.fetchFile support files that contain special characters
This PR encodes and decodes the file name to fix the issue. Author: Shixiong Zhu <shixiong@databricks.com> Closes #10208 from zsxwing/uri.
Diffstat (limited to 'core/src/test')
-rw-r--r--core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala4
-rw-r--r--core/src/test/scala/org/apache/spark/util/UtilsSuite.scala11
2 files changed, 15 insertions, 0 deletions
diff --git a/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala b/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala
index 6d153eb04e..49e3e0191c 100644
--- a/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala
@@ -771,6 +771,8 @@ abstract class RpcEnvSuite extends SparkFunSuite with BeforeAndAfterAll {
val tempDir = Utils.createTempDir()
val file = new File(tempDir, "file")
Files.write(UUID.randomUUID().toString(), file, UTF_8)
+ val fileWithSpecialChars = new File(tempDir, "file name")
+ Files.write(UUID.randomUUID().toString(), fileWithSpecialChars, UTF_8)
val empty = new File(tempDir, "empty")
Files.write("", empty, UTF_8);
val jar = new File(tempDir, "jar")
@@ -787,6 +789,7 @@ abstract class RpcEnvSuite extends SparkFunSuite with BeforeAndAfterAll {
Files.write(UUID.randomUUID().toString(), subFile2, UTF_8)
val fileUri = env.fileServer.addFile(file)
+ val fileWithSpecialCharsUri = env.fileServer.addFile(fileWithSpecialChars)
val emptyUri = env.fileServer.addFile(empty)
val jarUri = env.fileServer.addJar(jar)
val dir1Uri = env.fileServer.addDirectory("/dir1", dir1)
@@ -805,6 +808,7 @@ abstract class RpcEnvSuite extends SparkFunSuite with BeforeAndAfterAll {
val files = Seq(
(file, fileUri),
+ (fileWithSpecialChars, fileWithSpecialCharsUri),
(empty, emptyUri),
(jar, jarUri),
(subFile1, dir1Uri + "/file1"),
diff --git a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
index 68b0da76bc..fdb51d440e 100644
--- a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
@@ -734,4 +734,15 @@ class UtilsSuite extends SparkFunSuite with ResetSystemProperties with Logging {
conf.set("spark.executor.instances", "0")) === true)
}
+ test("encodeFileNameToURIRawPath") {
+ assert(Utils.encodeFileNameToURIRawPath("abc") === "abc")
+ assert(Utils.encodeFileNameToURIRawPath("abc xyz") === "abc%20xyz")
+ assert(Utils.encodeFileNameToURIRawPath("abc:xyz") === "abc:xyz")
+ }
+
+ test("decodeFileNameInURI") {
+ assert(Utils.decodeFileNameInURI(new URI("files:///abc/xyz")) === "xyz")
+ assert(Utils.decodeFileNameInURI(new URI("files:///abc")) === "abc")
+ assert(Utils.decodeFileNameInURI(new URI("files:///abc%20xyz")) === "abc xyz")
+ }
}