From 496d2a2b403ac83b390a90519217dd310b0013a4 Mon Sep 17 00:00:00 2001 From: Carson Wang Date: Wed, 16 Mar 2016 10:56:01 +0000 Subject: [SPARK-13889][YARN] Fix integer overflow when calculating the max number of executor failure ## What changes were proposed in this pull request? The max number of executor failure before failing the application is default to twice the maximum number of executors if dynamic allocation is enabled. The default value for "spark.dynamicAllocation.maxExecutors" is Int.MaxValue. So this causes an integer overflow and a wrong result. The calculated value of the default max number of executor failure is 3. This PR adds a check to avoid the overflow. ## How was this patch tested? It tests if the value is greater that Int.MaxValue / 2 to avoid the overflow when it multiplies 2. Author: Carson Wang Closes #11713 from carsonwang/IntOverflow. --- .../main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) (limited to 'yarn') diff --git a/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala b/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala index cd179cf328..a06e677a04 100644 --- a/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala +++ b/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala @@ -73,7 +73,10 @@ private[spark] class ApplicationMaster( } else { sparkConf.get(EXECUTOR_INSTANCES).getOrElse(0) } - val defaultMaxNumExecutorFailures = math.max(3, 2 * effectiveNumExecutors) + // By default, effectiveNumExecutors is Int.MaxValue if dynamic allocation is enabled. We need + // avoid the integer overflow here. + val defaultMaxNumExecutorFailures = math.max(3, + if (effectiveNumExecutors > Int.MaxValue / 2) Int.MaxValue else (2 * effectiveNumExecutors)) sparkConf.get(MAX_EXECUTOR_FAILURES).getOrElse(defaultMaxNumExecutorFailures) } -- cgit v1.2.3