From f5ea7fe53974a7e8cbfc222b9a6f47669b53ccfd Mon Sep 17 00:00:00 2001 From: jerryshao Date: Mon, 25 Jul 2016 15:17:06 -0700 Subject: [SPARK-16166][CORE] Also take off-heap memory usage into consideration in log and webui display ## What changes were proposed in this pull request? Currently in the log and UI display, only on-heap storage memory is calculated and displayed, ``` 16/06/27 13:41:52 INFO MemoryStore: Block rdd_5_0 stored as values in memory (estimated size 17.8 KB, free 665.9 MB) ``` untitled With [SPARK-13992](https://issues.apache.org/jira/browse/SPARK-13992) off-heap memory is supported for data persistence, so here change to also take off-heap storage memory into consideration. ## How was this patch tested? Unit test and local verification. Author: jerryshao Closes #13920 from jerryshao/SPARK-16166. --- .../test/scala/org/apache/spark/memory/TestMemoryManager.scala | 2 ++ .../test/scala/org/apache/spark/storage/BlockManagerSuite.scala | 8 ++++---- 2 files changed, 6 insertions(+), 4 deletions(-) (limited to 'core/src/test/scala/org/apache') diff --git a/core/src/test/scala/org/apache/spark/memory/TestMemoryManager.scala b/core/src/test/scala/org/apache/spark/memory/TestMemoryManager.scala index 6a4f409e8e..5f699df821 100644 --- a/core/src/test/scala/org/apache/spark/memory/TestMemoryManager.scala +++ b/core/src/test/scala/org/apache/spark/memory/TestMemoryManager.scala @@ -56,6 +56,8 @@ class TestMemoryManager(conf: SparkConf) } override def maxOnHeapStorageMemory: Long = Long.MaxValue + override def maxOffHeapStorageMemory: Long = 0L + private var oomOnce = false private var available = Long.MaxValue diff --git a/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala b/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala index 6821582254..8077a1b941 100644 --- a/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala +++ b/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala @@ -239,8 +239,8 @@ class BlockManagerSuite extends SparkFunSuite with Matchers with BeforeAndAfterE // Checking whether blocks are in memory and memory size val memStatus = master.getMemoryStatus.head._2 - assert(memStatus._1 == 20000L, "total memory " + memStatus._1 + " should equal 20000") - assert(memStatus._2 <= 12000L, "remaining memory " + memStatus._2 + " should <= 12000") + assert(memStatus._1 == 40000L, "total memory " + memStatus._1 + " should equal 40000") + assert(memStatus._2 <= 32000L, "remaining memory " + memStatus._2 + " should <= 12000") assert(store.getSingleAndReleaseLock("a1-to-remove").isDefined, "a1 was not in store") assert(store.getSingleAndReleaseLock("a2-to-remove").isDefined, "a2 was not in store") assert(store.getSingleAndReleaseLock("a3-to-remove").isDefined, "a3 was not in store") @@ -269,8 +269,8 @@ class BlockManagerSuite extends SparkFunSuite with Matchers with BeforeAndAfterE } eventually(timeout(1000 milliseconds), interval(10 milliseconds)) { val memStatus = master.getMemoryStatus.head._2 - memStatus._1 should equal (20000L) - memStatus._2 should equal (20000L) + memStatus._1 should equal (40000L) + memStatus._2 should equal (40000L) } } -- cgit v1.2.3