aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorpetermaxlee <petermaxlee@gmail.com>2016-07-06 10:46:22 -0700
committerReynold Xin <rxin@databricks.com>2016-07-06 10:46:22 -0700
commit480357cc6d71c682fe703611c71c1e6a36e6ce9a (patch)
tree76e8159ba07e929d2373dfefdd8c398e19854f43 /core
parent4e14199ff740ea186eb2cec2e5cf901b58c5f90e (diff)
downloadspark-480357cc6d71c682fe703611c71c1e6a36e6ce9a.tar.gz
spark-480357cc6d71c682fe703611c71c1e6a36e6ce9a.tar.bz2
spark-480357cc6d71c682fe703611c71c1e6a36e6ce9a.zip
[SPARK-16304] LinkageError should not crash Spark executor
## What changes were proposed in this pull request? This patch updates the failure handling logic so Spark executor does not crash when seeing LinkageError. ## How was this patch tested? Added an end-to-end test in FailureSuite. Author: petermaxlee <petermaxlee@gmail.com> Closes #13982 from petermaxlee/SPARK-16304.
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/org/apache/spark/util/Utils.scala6
-rw-r--r--core/src/test/scala/org/apache/spark/FailureSuite.scala9
2 files changed, 14 insertions, 1 deletions
diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala b/core/src/main/scala/org/apache/spark/util/Utils.scala
index 156cf1748b..298e6243aa 100644
--- a/core/src/main/scala/org/apache/spark/util/Utils.scala
+++ b/core/src/main/scala/org/apache/spark/util/Utils.scala
@@ -1881,7 +1881,11 @@ private[spark] object Utils extends Logging {
/** Returns true if the given exception was fatal. See docs for scala.util.control.NonFatal. */
def isFatalError(e: Throwable): Boolean = {
e match {
- case NonFatal(_) | _: InterruptedException | _: NotImplementedError | _: ControlThrowable =>
+ case NonFatal(_) |
+ _: InterruptedException |
+ _: NotImplementedError |
+ _: ControlThrowable |
+ _: LinkageError =>
false
case _ =>
true
diff --git a/core/src/test/scala/org/apache/spark/FailureSuite.scala b/core/src/test/scala/org/apache/spark/FailureSuite.scala
index 132f6361e4..d805c67714 100644
--- a/core/src/test/scala/org/apache/spark/FailureSuite.scala
+++ b/core/src/test/scala/org/apache/spark/FailureSuite.scala
@@ -253,6 +253,15 @@ class FailureSuite extends SparkFunSuite with LocalSparkContext {
rdd.count()
}
+ test("SPARK-16304: Link error should not crash executor") {
+ sc = new SparkContext("local[1,2]", "test")
+ intercept[SparkException] {
+ sc.parallelize(1 to 2).foreach { i =>
+ throw new LinkageError()
+ }
+ }
+ }
+
// TODO: Need to add tests with shuffle fetch failures.
}