aboutsummaryrefslogtreecommitdiff
path: root/yarn/common/src
diff options
context:
space:
mode:
authorwitgo <witgo@qq.com>2014-06-16 14:27:31 -0500
committerThomas Graves <tgraves@apache.org>2014-06-16 14:27:31 -0500
commitcdf2b04570871848442ca9f9e2316a37e4aaaae0 (patch)
treeef908cd3a8d0d5ad7adea9e180255453e1674e03 /yarn/common/src
parent4fdb491775bb9c4afa40477dc0069ff6fcadfe25 (diff)
downloadspark-cdf2b04570871848442ca9f9e2316a37e4aaaae0.tar.gz
spark-cdf2b04570871848442ca9f9e2316a37e4aaaae0.tar.bz2
spark-cdf2b04570871848442ca9f9e2316a37e4aaaae0.zip
[SPARK-1930] The Container is running beyond physical memory limits, so as to be killed
Author: witgo <witgo@qq.com> Closes #894 from witgo/SPARK-1930 and squashes the following commits: 564307e [witgo] Update the running-on-yarn.md 3747515 [witgo] Merge branch 'master' of https://github.com/apache/spark into SPARK-1930 172647b [witgo] add memoryOverhead docs a0ff545 [witgo] leaving only two configs a17bda2 [witgo] Merge branch 'master' of https://github.com/apache/spark into SPARK-1930 478ca15 [witgo] Merge branch 'master' into SPARK-1930 d1244a1 [witgo] Merge branch 'master' into SPARK-1930 8b967ae [witgo] Merge branch 'master' into SPARK-1930 655a820 [witgo] review commit 71859a7 [witgo] Merge branch 'master' of https://github.com/apache/spark into SPARK-1930 e3c531d [witgo] review commit e16f190 [witgo] different memoryOverhead ffa7569 [witgo] review commit 5c9581f [witgo] Merge branch 'master' into SPARK-1930 9a6bcf2 [witgo] review commit 8fae45a [witgo] fix NullPointerException e0dcc16 [witgo] Adding configuration items b6a989c [witgo] Fix container memory beyond limit, were killed
Diffstat (limited to 'yarn/common/src')
-rw-r--r--yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala14
1 files changed, 9 insertions, 5 deletions
diff --git a/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala b/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala
index 6861b50300..858bcaa95b 100644
--- a/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala
+++ b/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala
@@ -65,6 +65,10 @@ trait ClientBase extends Logging {
val APP_FILE_PERMISSION: FsPermission =
FsPermission.createImmutable(Integer.parseInt("644", 8).toShort)
+ // Additional memory overhead - in mb.
+ protected def memoryOverhead: Int = sparkConf.getInt("spark.yarn.driver.memoryOverhead",
+ YarnAllocationHandler.MEMORY_OVERHEAD)
+
// TODO(harvey): This could just go in ClientArguments.
def validateArgs() = {
Map(
@@ -72,10 +76,10 @@ trait ClientBase extends Logging {
"Error: You must specify a user jar when running in standalone mode!"),
(args.userClass == null) -> "Error: You must specify a user class!",
(args.numExecutors <= 0) -> "Error: You must specify at least 1 executor!",
- (args.amMemory <= YarnAllocationHandler.MEMORY_OVERHEAD) -> ("Error: AM memory size must be" +
- "greater than: " + YarnAllocationHandler.MEMORY_OVERHEAD),
- (args.executorMemory <= YarnAllocationHandler.MEMORY_OVERHEAD) -> ("Error: Executor memory size" +
- "must be greater than: " + YarnAllocationHandler.MEMORY_OVERHEAD.toString)
+ (args.amMemory <= memoryOverhead) -> ("Error: AM memory size must be" +
+ "greater than: " + memoryOverhead),
+ (args.executorMemory <= memoryOverhead) -> ("Error: Executor memory size" +
+ "must be greater than: " + memoryOverhead.toString)
).foreach { case(cond, errStr) =>
if (cond) {
logError(errStr)
@@ -101,7 +105,7 @@ trait ClientBase extends Logging {
logError(errorMessage)
throw new IllegalArgumentException(errorMessage)
}
- val amMem = args.amMemory + YarnAllocationHandler.MEMORY_OVERHEAD
+ val amMem = args.amMemory + memoryOverhead
if (amMem > maxMem) {
val errorMessage = "Required AM memory (%d) is above the max threshold (%d) of this cluster."