aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorJongyoul Lee <jongyoul@gmail.com>2015-03-20 19:14:35 +0000
committerSean Owen <sowen@cloudera.com>2015-03-20 19:14:35 +0000
commit49a01c7ea2c48feee7ab4551c4fa03fd1cdb1a32 (patch)
treea3f87c2e82480db362539920d911c3212096df22 /core
parent6b36470c66bd6140c45e45d3f1d51b0082c3fd97 (diff)
downloadspark-49a01c7ea2c48feee7ab4551c4fa03fd1cdb1a32.tar.gz
spark-49a01c7ea2c48feee7ab4551c4fa03fd1cdb1a32.tar.bz2
spark-49a01c7ea2c48feee7ab4551c4fa03fd1cdb1a32.zip
[SPARK-6423][Mesos] MemoryUtils should use memoryOverhead if it's set
- Fixed calculateTotalMemory to use spark.mesos.executor.memoryOverhead - Added testCase Author: Jongyoul Lee <jongyoul@gmail.com> Closes #5099 from jongyoul/SPARK-6423 and squashes the following commits: 6747fce [Jongyoul Lee] [SPARK-6423][Mesos] MemoryUtils should use memoryOverhead if it's set - Changed a description of spark.mesos.executor.memoryOverhead 475a7c8 [Jongyoul Lee] [SPARK-6423][Mesos] MemoryUtils should use memoryOverhead if it's set - Fit the import rules 453c5a2 [Jongyoul Lee] [SPARK-6423][Mesos] MemoryUtils should use memoryOverhead if it's set - Fixed calculateTotalMemory to use spark.mesos.executor.memoryOverhead - Added testCase
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MemoryUtils.scala10
-rw-r--r--core/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MemoryUtilsSuite.scala47
2 files changed, 50 insertions, 7 deletions
diff --git a/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MemoryUtils.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MemoryUtils.scala
index 705116cb13..aa3ec0f8cf 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MemoryUtils.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MemoryUtils.scala
@@ -21,15 +21,11 @@ import org.apache.spark.SparkContext
private[spark] object MemoryUtils {
// These defaults copied from YARN
- val OVERHEAD_FRACTION = 1.10
+ val OVERHEAD_FRACTION = 0.10
val OVERHEAD_MINIMUM = 384
def calculateTotalMemory(sc: SparkContext) = {
- math.max(
- sc.conf.getOption("spark.mesos.executor.memoryOverhead")
- .getOrElse(OVERHEAD_MINIMUM.toString)
- .toInt + sc.executorMemory,
- OVERHEAD_FRACTION * sc.executorMemory
- )
+ sc.conf.getInt("spark.mesos.executor.memoryOverhead",
+ math.max(OVERHEAD_FRACTION * sc.executorMemory, OVERHEAD_MINIMUM).toInt) + sc.executorMemory
}
}
diff --git a/core/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MemoryUtilsSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MemoryUtilsSuite.scala
new file mode 100644
index 0000000000..3fa0115e68
--- /dev/null
+++ b/core/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MemoryUtilsSuite.scala
@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.scheduler.cluster.mesos
+
+import org.mockito.Mockito._
+import org.scalatest.FunSuite
+import org.scalatest.mock.MockitoSugar
+
+import org.apache.spark.{SparkConf, SparkContext}
+
+class MemoryUtilsSuite extends FunSuite with MockitoSugar {
+ test("MesosMemoryUtils should always override memoryOverhead when it's set") {
+ val sparkConf = new SparkConf
+
+ val sc = mock[SparkContext]
+ when(sc.conf).thenReturn(sparkConf)
+
+ // 384 > sc.executorMemory * 0.1 => 512 + 384 = 896
+ when(sc.executorMemory).thenReturn(512)
+ assert(MemoryUtils.calculateTotalMemory(sc) === 896)
+
+ // 384 < sc.executorMemory * 0.1 => 4096 + (4096 * 0.1) = 4505.6
+ when(sc.executorMemory).thenReturn(4096)
+ assert(MemoryUtils.calculateTotalMemory(sc) === 4505)
+
+ // set memoryOverhead
+ sparkConf.set("spark.mesos.executor.memoryOverhead", "100")
+ assert(MemoryUtils.calculateTotalMemory(sc) === 4196)
+ sparkConf.set("spark.mesos.executor.memoryOverhead", "400")
+ assert(MemoryUtils.calculateTotalMemory(sc) === 4496)
+ }
+}