aboutsummaryrefslogtreecommitdiff
path: root/yarn/src
diff options
context:
space:
mode:
authorjerryshao <sshao@hortonworks.com>2015-10-12 18:17:28 -0700
committerMarcelo Vanzin <vanzin@cloudera.com>2015-10-12 18:18:19 -0700
commitf97e9323b526b3d0b0fee0ca03f4276f37bb5750 (patch)
treebdeb2b7b2c07f84c71c5cc1f6ce7f988ca7acd70 /yarn/src
parent091c2c3ecd69803d78c2b15a1487046701059d38 (diff)
downloadspark-f97e9323b526b3d0b0fee0ca03f4276f37bb5750.tar.gz
spark-f97e9323b526b3d0b0fee0ca03f4276f37bb5750.tar.bz2
spark-f97e9323b526b3d0b0fee0ca03f4276f37bb5750.zip
[SPARK-10739] [YARN] Add application attempt window for Spark on Yarn
Add application attempt window for Spark on Yarn to ignore old out of window failures, this is useful for long running applications to recover from failures. Author: jerryshao <sshao@hortonworks.com> Closes #8857 from jerryshao/SPARK-10739 and squashes the following commits: 36eabdc [jerryshao] change the doc 7f9b77d [jerryshao] Style change 1c9afd0 [jerryshao] Address the comments caca695 [jerryshao] Add application attempt window for Spark on Yarn
Diffstat (limited to 'yarn/src')
-rw-r--r--yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala14
1 files changed, 14 insertions, 0 deletions
diff --git a/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala b/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala
index 1fbd18aa46..d25d830fd4 100644
--- a/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala
+++ b/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala
@@ -208,6 +208,20 @@ private[spark] class Client(
case None => logDebug("spark.yarn.maxAppAttempts is not set. " +
"Cluster's default value will be used.")
}
+
+ if (sparkConf.contains("spark.yarn.am.attemptFailuresValidityInterval")) {
+ try {
+ val interval = sparkConf.getTimeAsMs("spark.yarn.am.attemptFailuresValidityInterval")
+ val method = appContext.getClass().getMethod(
+ "setAttemptFailuresValidityInterval", classOf[Long])
+ method.invoke(appContext, interval: java.lang.Long)
+ } catch {
+ case e: NoSuchMethodException =>
+ logWarning("Ignoring spark.yarn.am.attemptFailuresValidityInterval because the version " +
+ "of YARN does not support it")
+ }
+ }
+
val capability = Records.newRecord(classOf[Resource])
capability.setMemory(args.amMemory + amMemoryOverhead)
capability.setVirtualCores(args.amCores)