aboutsummaryrefslogtreecommitdiff
path: root/core/src/test/scala/org/apache
diff options
context:
space:
mode:
authorBurak Yavuz <brkyvz@gmail.com>2016-09-25 22:57:31 -0700
committerShixiong Zhu <shixiong@databricks.com>2016-09-25 22:57:31 -0700
commit59d87d24079bc633e63ce032f0a5ddd18a3b02cb (patch)
treeaa630bb12d2aa88ea56e13370e92784834127434 /core/src/test/scala/org/apache
parentde333d121da4cb80d45819cbcf8b4246e48ec4d0 (diff)
downloadspark-59d87d24079bc633e63ce032f0a5ddd18a3b02cb.tar.gz
spark-59d87d24079bc633e63ce032f0a5ddd18a3b02cb.tar.bz2
spark-59d87d24079bc633e63ce032f0a5ddd18a3b02cb.zip
[SPARK-17650] malformed url's throw exceptions before bricking Executors
## What changes were proposed in this pull request? When a malformed URL was sent to Executors through `sc.addJar` and `sc.addFile`, the executors become unusable, because they constantly throw `MalformedURLException`s and can never acknowledge that the file or jar is just bad input. This PR tries to fix that problem by making sure MalformedURLs can never be submitted through `sc.addJar` and `sc.addFile`. Another solution would be to blacklist bad files and jars on Executors. Maybe fail the first time, and then ignore the second time (but print a warning message). ## How was this patch tested? Unit tests in SparkContextSuite Author: Burak Yavuz <brkyvz@gmail.com> Closes #15224 from brkyvz/SPARK-17650.
Diffstat (limited to 'core/src/test/scala/org/apache')
-rw-r--r--core/src/test/scala/org/apache/spark/SparkContextSuite.scala22
1 files changed, 22 insertions, 0 deletions
diff --git a/core/src/test/scala/org/apache/spark/SparkContextSuite.scala b/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
index f8d143dc61..c451c596b0 100644
--- a/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
@@ -18,6 +18,7 @@
package org.apache.spark
import java.io.File
+import java.net.MalformedURLException
import java.nio.charset.StandardCharsets
import java.util.concurrent.TimeUnit
@@ -173,6 +174,27 @@ class SparkContextSuite extends SparkFunSuite with LocalSparkContext {
}
}
+ test("SPARK-17650: malformed url's throw exceptions before bricking Executors") {
+ try {
+ sc = new SparkContext(new SparkConf().setAppName("test").setMaster("local"))
+ Seq("http", "https", "ftp").foreach { scheme =>
+ val badURL = s"$scheme://user:pwd/path"
+ val e1 = intercept[MalformedURLException] {
+ sc.addFile(badURL)
+ }
+ assert(e1.getMessage.contains(badURL))
+ val e2 = intercept[MalformedURLException] {
+ sc.addJar(badURL)
+ }
+ assert(e2.getMessage.contains(badURL))
+ assert(sc.addedFiles.isEmpty)
+ assert(sc.addedJars.isEmpty)
+ }
+ } finally {
+ sc.stop()
+ }
+ }
+
test("addFile recursive works") {
val pluto = Utils.createTempDir()
val neptune = Utils.createTempDir(pluto.getAbsolutePath)