diff options
author | Kashish Jain <kashish.jain@guavus.com> | 2015-02-06 13:47:23 -0800 |
---|---|---|
committer | Andrew Or <andrew@databricks.com> | 2015-02-06 13:59:11 -0800 |
commit | ca66159a4f30d65fa4cd32dbf3ff23978cb7f99b (patch) | |
tree | 048e5fad86c19f59f064adbee328c33ad82e8f35 /yarn | |
parent | b3872e00d155939e40366debda635fc3fb12cc73 (diff) | |
download | spark-ca66159a4f30d65fa4cd32dbf3ff23978cb7f99b.tar.gz spark-ca66159a4f30d65fa4cd32dbf3ff23978cb7f99b.tar.bz2 spark-ca66159a4f30d65fa4cd32dbf3ff23978cb7f99b.zip |
SPARK-5613: Catch the ApplicationNotFoundException exception to avoid thread from getting killed on yarn restart.
[SPARK-5613] Added a catch block to catch the ApplicationNotFoundException. Without this catch block the thread gets killed on occurrence of this exception. This Exception occurs when yarn restarts and tries to find an application id for a spark job which got interrupted due to yarn getting stopped.
See the stacktrace in the bug for more details.
Author: Kashish Jain <kashish.jain@guavus.com>
Closes #4392 from kasjain/branch-1.2 and squashes the following commits:
4831000 [Kashish Jain] SPARK-5613: Catch the ApplicationNotFoundException exception to avoid thread from getting killed on yarn restart.
Diffstat (limited to 'yarn')
-rw-r--r-- | yarn/src/main/scala/org/apache/spark/scheduler/cluster/YarnClientSchedulerBackend.scala | 11 |
1 files changed, 9 insertions, 2 deletions
diff --git a/yarn/src/main/scala/org/apache/spark/scheduler/cluster/YarnClientSchedulerBackend.scala b/yarn/src/main/scala/org/apache/spark/scheduler/cluster/YarnClientSchedulerBackend.scala index 690f927e93..f1b5aafac4 100644 --- a/yarn/src/main/scala/org/apache/spark/scheduler/cluster/YarnClientSchedulerBackend.scala +++ b/yarn/src/main/scala/org/apache/spark/scheduler/cluster/YarnClientSchedulerBackend.scala @@ -20,6 +20,7 @@ package org.apache.spark.scheduler.cluster import scala.collection.mutable.ArrayBuffer import org.apache.hadoop.yarn.api.records.{ApplicationId, YarnApplicationState} +import org.apache.hadoop.yarn.exceptions.ApplicationNotFoundException import org.apache.spark.{SparkException, Logging, SparkContext} import org.apache.spark.deploy.yarn.{Client, ClientArguments} @@ -133,8 +134,14 @@ private[spark] class YarnClientSchedulerBackend( val t = new Thread { override def run() { while (!stopping) { - val report = client.getApplicationReport(appId) - val state = report.getYarnApplicationState() + var state: YarnApplicationState = null + try { + val report = client.getApplicationReport(appId) + state = report.getYarnApplicationState() + } catch { + case e: ApplicationNotFoundException => + state = YarnApplicationState.KILLED + } if (state == YarnApplicationState.FINISHED || state == YarnApplicationState.KILLED || state == YarnApplicationState.FAILED) { |