diff options
author | jerryshao <sshao@hortonworks.com> | 2016-07-25 15:17:06 -0700 |
---|---|---|
committer | Josh Rosen <joshrosen@databricks.com> | 2016-07-25 15:17:06 -0700 |
commit | f5ea7fe53974a7e8cbfc222b9a6f47669b53ccfd (patch) | |
tree | 2f5d5bb48568f2dfcac15516a746293775542a71 /core/src/test/scala | |
parent | cda4603de340d533c49feac1b244ddfd291f9bcf (diff) | |
download | spark-f5ea7fe53974a7e8cbfc222b9a6f47669b53ccfd.tar.gz spark-f5ea7fe53974a7e8cbfc222b9a6f47669b53ccfd.tar.bz2 spark-f5ea7fe53974a7e8cbfc222b9a6f47669b53ccfd.zip |
[SPARK-16166][CORE] Also take off-heap memory usage into consideration in log and webui display
## What changes were proposed in this pull request?
Currently in the log and UI display, only on-heap storage memory is calculated and displayed,
```
16/06/27 13:41:52 INFO MemoryStore: Block rdd_5_0 stored as values in memory (estimated size 17.8 KB, free 665.9 MB)
```
<img width="1232" alt="untitled" src="https://cloud.githubusercontent.com/assets/850797/16369960/53fb614e-3c6e-11e6-8fa3-7ffe65abcb49.png">
With [SPARK-13992](https://issues.apache.org/jira/browse/SPARK-13992) off-heap memory is supported for data persistence, so here change to also take off-heap storage memory into consideration.
## How was this patch tested?
Unit test and local verification.
Author: jerryshao <sshao@hortonworks.com>
Closes #13920 from jerryshao/SPARK-16166.
Diffstat (limited to 'core/src/test/scala')
-rw-r--r-- | core/src/test/scala/org/apache/spark/memory/TestMemoryManager.scala | 2 | ||||
-rw-r--r-- | core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala | 8 |
2 files changed, 6 insertions, 4 deletions
diff --git a/core/src/test/scala/org/apache/spark/memory/TestMemoryManager.scala b/core/src/test/scala/org/apache/spark/memory/TestMemoryManager.scala index 6a4f409e8e..5f699df821 100644 --- a/core/src/test/scala/org/apache/spark/memory/TestMemoryManager.scala +++ b/core/src/test/scala/org/apache/spark/memory/TestMemoryManager.scala @@ -56,6 +56,8 @@ class TestMemoryManager(conf: SparkConf) } override def maxOnHeapStorageMemory: Long = Long.MaxValue + override def maxOffHeapStorageMemory: Long = 0L + private var oomOnce = false private var available = Long.MaxValue diff --git a/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala b/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala index 6821582254..8077a1b941 100644 --- a/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala +++ b/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala @@ -239,8 +239,8 @@ class BlockManagerSuite extends SparkFunSuite with Matchers with BeforeAndAfterE // Checking whether blocks are in memory and memory size val memStatus = master.getMemoryStatus.head._2 - assert(memStatus._1 == 20000L, "total memory " + memStatus._1 + " should equal 20000") - assert(memStatus._2 <= 12000L, "remaining memory " + memStatus._2 + " should <= 12000") + assert(memStatus._1 == 40000L, "total memory " + memStatus._1 + " should equal 40000") + assert(memStatus._2 <= 32000L, "remaining memory " + memStatus._2 + " should <= 12000") assert(store.getSingleAndReleaseLock("a1-to-remove").isDefined, "a1 was not in store") assert(store.getSingleAndReleaseLock("a2-to-remove").isDefined, "a2 was not in store") assert(store.getSingleAndReleaseLock("a3-to-remove").isDefined, "a3 was not in store") @@ -269,8 +269,8 @@ class BlockManagerSuite extends SparkFunSuite with Matchers with BeforeAndAfterE } eventually(timeout(1000 milliseconds), interval(10 milliseconds)) { val memStatus = master.getMemoryStatus.head._2 - memStatus._1 should equal (20000L) - memStatus._2 should equal (20000L) + memStatus._1 should equal (40000L) + memStatus._2 should equal (40000L) } } |