aboutsummaryrefslogtreecommitdiff
path: root/core/src/main/scala/org/apache/spark/status
diff options
context:
space:
mode:
Diffstat (limited to 'core/src/main/scala/org/apache/spark/status')
-rw-r--r--core/src/main/scala/org/apache/spark/status/api/v1/AllRDDResource.scala8
-rw-r--r--core/src/main/scala/org/apache/spark/status/api/v1/api.scala12
2 files changed, 17 insertions, 3 deletions
diff --git a/core/src/main/scala/org/apache/spark/status/api/v1/AllRDDResource.scala b/core/src/main/scala/org/apache/spark/status/api/v1/AllRDDResource.scala
index 5c03609e5e..1279b281ad 100644
--- a/core/src/main/scala/org/apache/spark/status/api/v1/AllRDDResource.scala
+++ b/core/src/main/scala/org/apache/spark/status/api/v1/AllRDDResource.scala
@@ -70,7 +70,13 @@ private[spark] object AllRDDResource {
address = status.blockManagerId.hostPort,
memoryUsed = status.memUsedByRdd(rddId),
memoryRemaining = status.memRemaining,
- diskUsed = status.diskUsedByRdd(rddId)
+ diskUsed = status.diskUsedByRdd(rddId),
+ onHeapMemoryUsed = Some(
+ if (!rddInfo.storageLevel.useOffHeap) status.memUsedByRdd(rddId) else 0L),
+ offHeapMemoryUsed = Some(
+ if (rddInfo.storageLevel.useOffHeap) status.memUsedByRdd(rddId) else 0L),
+ onHeapMemoryRemaining = status.onHeapMemRemaining,
+ offHeapMemoryRemaining = status.offHeapMemRemaining
) } )
} else {
None
diff --git a/core/src/main/scala/org/apache/spark/status/api/v1/api.scala b/core/src/main/scala/org/apache/spark/status/api/v1/api.scala
index 5b9227350e..d159b9450e 100644
--- a/core/src/main/scala/org/apache/spark/status/api/v1/api.scala
+++ b/core/src/main/scala/org/apache/spark/status/api/v1/api.scala
@@ -75,7 +75,11 @@ class ExecutorSummary private[spark](
val totalShuffleWrite: Long,
val isBlacklisted: Boolean,
val maxMemory: Long,
- val executorLogs: Map[String, String])
+ val executorLogs: Map[String, String],
+ val onHeapMemoryUsed: Option[Long],
+ val offHeapMemoryUsed: Option[Long],
+ val maxOnHeapMemory: Option[Long],
+ val maxOffHeapMemory: Option[Long])
class JobData private[spark](
val jobId: Int,
@@ -111,7 +115,11 @@ class RDDDataDistribution private[spark](
val address: String,
val memoryUsed: Long,
val memoryRemaining: Long,
- val diskUsed: Long)
+ val diskUsed: Long,
+ val onHeapMemoryUsed: Option[Long],
+ val offHeapMemoryUsed: Option[Long],
+ val onHeapMemoryRemaining: Option[Long],
+ val offHeapMemoryRemaining: Option[Long])
class RDDPartitionInfo private[spark](
val blockName: String,