aboutsummaryrefslogtreecommitdiff
path: root/core/src/main/twirl
diff options
context:
space:
mode:
authorDenny <dennybritz@gmail.com>2012-11-12 10:56:35 -0800
committerDenny <dennybritz@gmail.com>2012-11-12 10:56:35 -0800
commit4a1be7e0dbf0031d85b91dc1132fe101d87ba097 (patch)
treef6d16c6dc153a1274af044a4bd83a4b4eacb5353 /core/src/main/twirl
parent68e0a8828283debc9e92a96a93fe32c19af12811 (diff)
downloadspark-4a1be7e0dbf0031d85b91dc1132fe101d87ba097.tar.gz
spark-4a1be7e0dbf0031d85b91dc1132fe101d87ba097.tar.bz2
spark-4a1be7e0dbf0031d85b91dc1132fe101d87ba097.zip
Refactor BlockManager UI and adding worker details.
Diffstat (limited to 'core/src/main/twirl')
-rw-r--r--core/src/main/twirl/spark/storage/index.scala.html22
-rw-r--r--core/src/main/twirl/spark/storage/rdd.scala.html35
-rw-r--r--core/src/main/twirl/spark/storage/rdd_row.scala.html18
-rw-r--r--core/src/main/twirl/spark/storage/rdd_table.scala.html16
-rw-r--r--core/src/main/twirl/spark/storage/worker_table.scala.html24
5 files changed, 76 insertions, 39 deletions
diff --git a/core/src/main/twirl/spark/storage/index.scala.html b/core/src/main/twirl/spark/storage/index.scala.html
index fa7dad51ee..2b337f6133 100644
--- a/core/src/main/twirl/spark/storage/index.scala.html
+++ b/core/src/main/twirl/spark/storage/index.scala.html
@@ -1,4 +1,5 @@
-@(maxMem: Long, remainingMem: Long, diskSpaceUsed: Long, rdds: List[spark.storage.RDDInfo])
+@(maxMem: Long, remainingMem: Long, diskSpaceUsed: Long, rdds: Array[spark.storage.RDDInfo], storageStatusList: Array[spark.storage.StorageStatus])
+@import spark.Utils
@spark.common.html.layout(title = "Storage Dashboard") {
@@ -7,16 +8,16 @@
<div class="span12">
<ul class="unstyled">
<li><strong>Memory:</strong>
- @{spark.Utils.memoryBytesToString(maxMem - remainingMem)} Used
- (@{spark.Utils.memoryBytesToString(remainingMem)} Available) </li>
- <li><strong>Disk:</strong> @{spark.Utils.memoryBytesToString(diskSpaceUsed)} Used </li>
+ @{Utils.memoryBytesToString(maxMem - remainingMem)} Used
+ (@{Utils.memoryBytesToString(remainingMem)} Available) </li>
+ <li><strong>Disk:</strong> @{Utils.memoryBytesToString(diskSpaceUsed)} Used </li>
</ul>
</div>
</div>
<hr/>
- <!-- RDD Summary (Running) -->
+ <!-- RDD Summary -->
<div class="row">
<div class="span12">
<h3> RDD Summary </h3>
@@ -25,4 +26,15 @@
</div>
</div>
+ <hr/>
+
+ <!-- Worker Summary -->
+ <div class="row">
+ <div class="span12">
+ <h3> Worker Summary </h3>
+ <br/>
+ @worker_table(storageStatusList)
+ </div>
+ </div>
+
} \ No newline at end of file
diff --git a/core/src/main/twirl/spark/storage/rdd.scala.html b/core/src/main/twirl/spark/storage/rdd.scala.html
index 075289c826..ac7f8c981f 100644
--- a/core/src/main/twirl/spark/storage/rdd.scala.html
+++ b/core/src/main/twirl/spark/storage/rdd.scala.html
@@ -1,4 +1,5 @@
-@(rddInfo: spark.storage.RDDInfo, blocks: Map[String, spark.storage.BlockStatus])
+@(rddInfo: spark.storage.RDDInfo, storageStatusList: Array[spark.storage.StorageStatus])
+@import spark.Utils
@spark.common.html.layout(title = "RDD Info ") {
@@ -8,21 +9,18 @@
<ul class="unstyled">
<li>
<strong>Storage Level:</strong>
- @(if (rddInfo.storageLevel.useDisk) "Disk" else "")
- @(if (rddInfo.storageLevel.useMemory) "Memory" else "")
- @(if (rddInfo.storageLevel.deserialized) "Deserialized" else "")
- @(rddInfo.storageLevel.replication)x Replicated
+ @(rddInfo.storageLevel.description)
<li>
<strong>Partitions:</strong>
@(rddInfo.numPartitions)
</li>
<li>
<strong>Memory Size:</strong>
- @{spark.Utils.memoryBytesToString(rddInfo.memSize)}
+ @{Utils.memoryBytesToString(rddInfo.memSize)}
</li>
<li>
<strong>Disk Size:</strong>
- @{spark.Utils.memoryBytesToString(rddInfo.diskSize)}
+ @{Utils.memoryBytesToString(rddInfo.diskSize)}
</li>
</ul>
</div>
@@ -36,6 +34,7 @@
<h3> RDD Summary </h3>
<br/>
+
<!-- Block Table Summary -->
<table class="table table-bordered table-striped table-condensed sortable">
<thead>
@@ -47,17 +46,14 @@
</tr>
</thead>
<tbody>
- @blocks.map { case (k,v) =>
+ @storageStatusList.flatMap(_.blocks).toArray.sortWith(_._1 < _._1).map { case (k,v) =>
<tr>
<td>@k</td>
<td>
- @(if (v.storageLevel.useDisk) "Disk" else "")
- @(if (v.storageLevel.useMemory) "Memory" else "")
- @(if (v.storageLevel.deserialized) "Deserialized" else "")
- @(v.storageLevel.replication)x Replicated
+ @(v.storageLevel.description)
</td>
- <td>@{spark.Utils.memoryBytesToString(v.memSize)}</td>
- <td>@{spark.Utils.memoryBytesToString(v.diskSize)}</td>
+ <td>@{Utils.memoryBytesToString(v.memSize)}</td>
+ <td>@{Utils.memoryBytesToString(v.diskSize)}</td>
</tr>
}
</tbody>
@@ -67,4 +63,15 @@
</div>
</div>
+ <hr/>
+
+ <!-- Worker Table -->
+ <div class="row">
+ <div class="span12">
+ <h3> Worker Summary </h3>
+ <br/>
+ @worker_table(storageStatusList, "rdd_" + rddInfo.id )
+ </div>
+ </div>
+
} \ No newline at end of file
diff --git a/core/src/main/twirl/spark/storage/rdd_row.scala.html b/core/src/main/twirl/spark/storage/rdd_row.scala.html
deleted file mode 100644
index 3dd9944e3b..0000000000
--- a/core/src/main/twirl/spark/storage/rdd_row.scala.html
+++ /dev/null
@@ -1,18 +0,0 @@
-@(rdd: spark.storage.RDDInfo)
-
-<tr>
- <td>
- <a href="rdd?id=@(rdd.id)">
- @rdd.name
- </a>
- </td>
- <td>
- @(if (rdd.storageLevel.useDisk) "Disk" else "")
- @(if (rdd.storageLevel.useMemory) "Memory" else "")
- @(if (rdd.storageLevel.deserialized) "Deserialized" else "")
- @(rdd.storageLevel.replication)x Replicated
- </td>
- <td>@rdd.numPartitions</td>
- <td>@{spark.Utils.memoryBytesToString(rdd.memSize)}</td>
- <td>@{spark.Utils.memoryBytesToString(rdd.diskSize)}</td>
-</tr> \ No newline at end of file
diff --git a/core/src/main/twirl/spark/storage/rdd_table.scala.html b/core/src/main/twirl/spark/storage/rdd_table.scala.html
index 24f55ccefb..af801cf229 100644
--- a/core/src/main/twirl/spark/storage/rdd_table.scala.html
+++ b/core/src/main/twirl/spark/storage/rdd_table.scala.html
@@ -1,4 +1,5 @@
-@(rdds: List[spark.storage.RDDInfo])
+@(rdds: Array[spark.storage.RDDInfo])
+@import spark.Utils
<table class="table table-bordered table-striped table-condensed sortable">
<thead>
@@ -12,7 +13,18 @@
</thead>
<tbody>
@for(rdd <- rdds) {
- @rdd_row(rdd)
+ <tr>
+ <td>
+ <a href="rdd?id=@(rdd.id)">
+ @rdd.name
+ </a>
+ </td>
+ <td>@(rdd.storageLevel.description)
+ </td>
+ <td>@rdd.numPartitions</td>
+ <td>@{Utils.memoryBytesToString(rdd.memSize)}</td>
+ <td>@{Utils.memoryBytesToString(rdd.diskSize)}</td>
+ </tr>
}
</tbody>
</table> \ No newline at end of file
diff --git a/core/src/main/twirl/spark/storage/worker_table.scala.html b/core/src/main/twirl/spark/storage/worker_table.scala.html
new file mode 100644
index 0000000000..d54b8de4cc
--- /dev/null
+++ b/core/src/main/twirl/spark/storage/worker_table.scala.html
@@ -0,0 +1,24 @@
+@(workersStatusList: Array[spark.storage.StorageStatus], prefix: String = "")
+@import spark.Utils
+
+<table class="table table-bordered table-striped table-condensed sortable">
+ <thead>
+ <tr>
+ <th>Host</th>
+ <th>Memory Usage</th>
+ <th>Disk Usage</th>
+ </tr>
+ </thead>
+ <tbody>
+ @for(status <- workersStatusList) {
+ <tr>
+ <td>@(status.blockManagerId.ip + ":" + status.blockManagerId.port)</td>
+ <td>
+ @(Utils.memoryBytesToString(status.memUsed(prefix)))
+ (@(Utils.memoryBytesToString(status.memRemaining)) Total Available)
+ </td>
+ <td>@(Utils.memoryBytesToString(status.diskUsed(prefix)))</td>
+ </tr>
+ }
+ </tbody>
+</table> \ No newline at end of file