aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorCrazyJvm <crazyjvm@gmail.com>2014-06-15 23:23:26 -0700
committerPatrick Wendell <pwendell@gmail.com>2014-06-15 23:24:32 -0700
commit5e85f87ace799bc66ec8d5f639591964be9843c2 (patch)
tree56d8233f4efd515c5d7f893e7406462e991a8a12
parent609e5ff20dc5f9eefbe1e6de8d21096de78ff8bd (diff)
downloadspark-5e85f87ace799bc66ec8d5f639591964be9843c2.tar.gz
spark-5e85f87ace799bc66ec8d5f639591964be9843c2.tar.bz2
spark-5e85f87ace799bc66ec8d5f639591964be9843c2.zip
SPARK-1999: StorageLevel in storage tab and RDD Storage Info never changes
StorageLevel in 'storage tab' and 'RDD Storage Info' never changes even if you call rdd.unpersist() and then you give the rdd another different storage level. Author: CrazyJvm <crazyjvm@gmail.com> Closes #968 from CrazyJvm/ui-storagelevel and squashes the following commits: 62555fa [CrazyJvm] change RDDInfo constructor param 'storageLevel' to var, so there's need to add another variable _storageLevel。 9f1571e [CrazyJvm] JIRA https://issues.apache.org/jira/browse/SPARK-1999 UI : StorageLevel in storage tab and RDD Storage Info never changes
-rw-r--r--core/src/main/scala/org/apache/spark/storage/RDDInfo.scala6
-rw-r--r--core/src/main/scala/org/apache/spark/storage/StorageUtils.scala3
2 files changed, 6 insertions, 3 deletions
diff --git a/core/src/main/scala/org/apache/spark/storage/RDDInfo.scala b/core/src/main/scala/org/apache/spark/storage/RDDInfo.scala
index 023fd6e4d8..5a72e21687 100644
--- a/core/src/main/scala/org/apache/spark/storage/RDDInfo.scala
+++ b/core/src/main/scala/org/apache/spark/storage/RDDInfo.scala
@@ -26,7 +26,7 @@ class RDDInfo(
val id: Int,
val name: String,
val numPartitions: Int,
- val storageLevel: StorageLevel)
+ var storageLevel: StorageLevel)
extends Ordered[RDDInfo] {
var numCachedPartitions = 0
@@ -36,8 +36,8 @@ class RDDInfo(
override def toString = {
import Utils.bytesToString
- ("RDD \"%s\" (%d) Storage: %s; CachedPartitions: %d; TotalPartitions: %d; MemorySize: %s; " +
- "TachyonSize: %s; DiskSize: %s").format(
+ ("RDD \"%s\" (%d) StorageLevel: %s; CachedPartitions: %d; TotalPartitions: %d; " +
+ "MemorySize: %s; TachyonSize: %s; DiskSize: %s").format(
name, id, storageLevel.toString, numCachedPartitions, numPartitions,
bytesToString(memSize), bytesToString(tachyonSize), bytesToString(diskSize))
}
diff --git a/core/src/main/scala/org/apache/spark/storage/StorageUtils.scala b/core/src/main/scala/org/apache/spark/storage/StorageUtils.scala
index 6f3252a2f6..f3bde1df45 100644
--- a/core/src/main/scala/org/apache/spark/storage/StorageUtils.scala
+++ b/core/src/main/scala/org/apache/spark/storage/StorageUtils.scala
@@ -89,10 +89,13 @@ private[spark] object StorageUtils {
// Add up memory, disk and Tachyon sizes
val persistedBlocks =
blocks.filter { status => status.memSize + status.diskSize + status.tachyonSize > 0 }
+ val _storageLevel =
+ if (persistedBlocks.length > 0) persistedBlocks(0).storageLevel else StorageLevel.NONE
val memSize = persistedBlocks.map(_.memSize).reduceOption(_ + _).getOrElse(0L)
val diskSize = persistedBlocks.map(_.diskSize).reduceOption(_ + _).getOrElse(0L)
val tachyonSize = persistedBlocks.map(_.tachyonSize).reduceOption(_ + _).getOrElse(0L)
rddInfoMap.get(rddId).map { rddInfo =>
+ rddInfo.storageLevel = _storageLevel
rddInfo.numCachedPartitions = persistedBlocks.length
rddInfo.memSize = memSize
rddInfo.diskSize = diskSize