aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorImran Rashid <imran@quantifind.com>2013-02-01 00:23:38 -0800
committerImran Rashid <imran@quantifind.com>2013-02-01 00:23:38 -0800
commit8a0a5ed53353ad6aa5656eb729d55ca7af2ab096 (patch)
tree1e79a7e6e6b11d886a5714f1b5c51e517f139268 /core
parentf127f2ae76692b189d86b5a47293579d5657c6d5 (diff)
downloadspark-8a0a5ed53353ad6aa5656eb729d55ca7af2ab096.tar.gz
spark-8a0a5ed53353ad6aa5656eb729d55ca7af2ab096.tar.bz2
spark-8a0a5ed53353ad6aa5656eb729d55ca7af2ab096.zip
track total partitions, in addition to cached partitions; use scala string formatting
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/spark/storage/StorageUtils.scala10
-rw-r--r--core/src/main/twirl/spark/storage/rdd.scala.html6
-rw-r--r--core/src/main/twirl/spark/storage/rdd_table.scala.html6
3 files changed, 13 insertions, 9 deletions
diff --git a/core/src/main/scala/spark/storage/StorageUtils.scala b/core/src/main/scala/spark/storage/StorageUtils.scala
index ce7c067eea..5367b74bb6 100644
--- a/core/src/main/scala/spark/storage/StorageUtils.scala
+++ b/core/src/main/scala/spark/storage/StorageUtils.scala
@@ -22,12 +22,11 @@ case class StorageStatus(blockManagerId: BlockManagerId, maxMem: Long,
}
case class RDDInfo(id: Int, name: String, storageLevel: StorageLevel,
- numPartitions: Int, memSize: Long, diskSize: Long) {
+ numCachedPartitions: Int, numPartitions: Int, memSize: Long, diskSize: Long) {
override def toString = {
import Utils.memoryBytesToString
- import java.lang.{Integer => JInt}
- String.format("RDD \"%s\" (%d) Storage: %s; Partitions: %d; MemorySize: %s; DiskSize: %s", name, id.asInstanceOf[JInt],
- storageLevel.toString, numPartitions.asInstanceOf[JInt], memoryBytesToString(memSize), memoryBytesToString(diskSize))
+ "RDD \"%s\" (%d) Storage: %s; CachedPartitions: %d; TotalPartitions: %d; MemorySize: %s; DiskSize: %s".format(name, id,
+ storageLevel.toString, numCachedPartitions, numPartitions, memoryBytesToString(memSize), memoryBytesToString(diskSize))
}
}
@@ -65,9 +64,8 @@ object StorageUtils {
val rdd = sc.persistentRdds(rddId)
val rddName = Option(rdd.name).getOrElse(rddKey)
val rddStorageLevel = rdd.getStorageLevel
- //TODO get total number of partitions in rdd
- RDDInfo(rddId, rddName, rddStorageLevel, rddBlocks.length, memSize, diskSize)
+ RDDInfo(rddId, rddName, rddStorageLevel, rddBlocks.length, rdd.splits.size, memSize, diskSize)
}.toArray
}
diff --git a/core/src/main/twirl/spark/storage/rdd.scala.html b/core/src/main/twirl/spark/storage/rdd.scala.html
index ac7f8c981f..d85addeb17 100644
--- a/core/src/main/twirl/spark/storage/rdd.scala.html
+++ b/core/src/main/twirl/spark/storage/rdd.scala.html
@@ -11,7 +11,11 @@
<strong>Storage Level:</strong>
@(rddInfo.storageLevel.description)
<li>
- <strong>Partitions:</strong>
+ <strong>Cached Partitions:</strong>
+ @(rddInfo.numCachedPartitions)
+ </li>
+ <li>
+ <strong>Total Partitions:</strong>
@(rddInfo.numPartitions)
</li>
<li>
diff --git a/core/src/main/twirl/spark/storage/rdd_table.scala.html b/core/src/main/twirl/spark/storage/rdd_table.scala.html
index af801cf229..a51e64aed0 100644
--- a/core/src/main/twirl/spark/storage/rdd_table.scala.html
+++ b/core/src/main/twirl/spark/storage/rdd_table.scala.html
@@ -6,7 +6,8 @@
<tr>
<th>RDD Name</th>
<th>Storage Level</th>
- <th>Partitions</th>
+ <th>Cached Partitions</th>
+ <th>Fraction Partitions Cached</th>
<th>Size in Memory</th>
<th>Size on Disk</th>
</tr>
@@ -21,7 +22,8 @@
</td>
<td>@(rdd.storageLevel.description)
</td>
- <td>@rdd.numPartitions</td>
+ <td>@rdd.numCachedPartitions</td>
+ <td>@(rdd.numCachedPartitions / rdd.numPartitions.toDouble)</td>
<td>@{Utils.memoryBytesToString(rdd.memSize)}</td>
<td>@{Utils.memoryBytesToString(rdd.diskSize)}</td>
</tr>