diff options
author | CrazyJvm <crazyjvm@gmail.com> | 2014-06-15 23:23:26 -0700 |
---|---|---|
committer | Patrick Wendell <pwendell@gmail.com> | 2014-06-15 23:23:26 -0700 |
commit | a63aa1adb2dfb19c8189167932ee8569840f96a0 (patch) | |
tree | 5262b1cd555599cae64d582c910c196cd7a508f6 | |
parent | ca5d9d43b93abd279079b3be8a06fdd78c595510 (diff) | |
download | spark-a63aa1adb2dfb19c8189167932ee8569840f96a0.tar.gz spark-a63aa1adb2dfb19c8189167932ee8569840f96a0.tar.bz2 spark-a63aa1adb2dfb19c8189167932ee8569840f96a0.zip |
SPARK-1999: StorageLevel in storage tab and RDD Storage Info never changes
StorageLevel in 'storage tab' and 'RDD Storage Info' never changes even if you call rdd.unpersist() and then you give the rdd another different storage level.
Author: CrazyJvm <crazyjvm@gmail.com>
Closes #968 from CrazyJvm/ui-storagelevel and squashes the following commits:
62555fa [CrazyJvm] change RDDInfo constructor param 'storageLevel' to var, so there's need to add another variable _storageLevel。
9f1571e [CrazyJvm] JIRA https://issues.apache.org/jira/browse/SPARK-1999 UI : StorageLevel in storage tab and RDD Storage Info never changes
-rw-r--r-- | core/src/main/scala/org/apache/spark/storage/RDDInfo.scala | 6 | ||||
-rw-r--r-- | core/src/main/scala/org/apache/spark/storage/StorageUtils.scala | 3 |
2 files changed, 6 insertions, 3 deletions
diff --git a/core/src/main/scala/org/apache/spark/storage/RDDInfo.scala b/core/src/main/scala/org/apache/spark/storage/RDDInfo.scala index 023fd6e4d8..5a72e21687 100644 --- a/core/src/main/scala/org/apache/spark/storage/RDDInfo.scala +++ b/core/src/main/scala/org/apache/spark/storage/RDDInfo.scala @@ -26,7 +26,7 @@ class RDDInfo( val id: Int, val name: String, val numPartitions: Int, - val storageLevel: StorageLevel) + var storageLevel: StorageLevel) extends Ordered[RDDInfo] { var numCachedPartitions = 0 @@ -36,8 +36,8 @@ class RDDInfo( override def toString = { import Utils.bytesToString - ("RDD \"%s\" (%d) Storage: %s; CachedPartitions: %d; TotalPartitions: %d; MemorySize: %s; " + - "TachyonSize: %s; DiskSize: %s").format( + ("RDD \"%s\" (%d) StorageLevel: %s; CachedPartitions: %d; TotalPartitions: %d; " + + "MemorySize: %s; TachyonSize: %s; DiskSize: %s").format( name, id, storageLevel.toString, numCachedPartitions, numPartitions, bytesToString(memSize), bytesToString(tachyonSize), bytesToString(diskSize)) } diff --git a/core/src/main/scala/org/apache/spark/storage/StorageUtils.scala b/core/src/main/scala/org/apache/spark/storage/StorageUtils.scala index 6f3252a2f6..f3bde1df45 100644 --- a/core/src/main/scala/org/apache/spark/storage/StorageUtils.scala +++ b/core/src/main/scala/org/apache/spark/storage/StorageUtils.scala @@ -89,10 +89,13 @@ private[spark] object StorageUtils { // Add up memory, disk and Tachyon sizes val persistedBlocks = blocks.filter { status => status.memSize + status.diskSize + status.tachyonSize > 0 } + val _storageLevel = + if (persistedBlocks.length > 0) persistedBlocks(0).storageLevel else StorageLevel.NONE val memSize = persistedBlocks.map(_.memSize).reduceOption(_ + _).getOrElse(0L) val diskSize = persistedBlocks.map(_.diskSize).reduceOption(_ + _).getOrElse(0L) val tachyonSize = persistedBlocks.map(_.tachyonSize).reduceOption(_ + _).getOrElse(0L) rddInfoMap.get(rddId).map { rddInfo => + rddInfo.storageLevel = _storageLevel rddInfo.numCachedPartitions = persistedBlocks.length rddInfo.memSize = memSize rddInfo.diskSize = diskSize |