aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorKay Ousterhout <kayo@yahoo-inc.com>2013-08-13 23:44:58 -0700
committerKay Ousterhout <kayo@yahoo-inc.com>2013-08-13 23:44:58 -0700
commita88aa5e6ed98d212b25a534566e417401da9cc7d (patch)
tree4d98b7a8f08b18f61a72475efe44766b2e3d520a /core
parent3f14cbab05f732f129b2da4efb19c746949ad0ab (diff)
downloadspark-a88aa5e6ed98d212b25a534566e417401da9cc7d.tar.gz
spark-a88aa5e6ed98d212b25a534566e417401da9cc7d.tar.bz2
spark-a88aa5e6ed98d212b25a534566e417401da9cc7d.zip
Fixed 2 bugs in executor UI.
1) UI crashed if the executor UI was loaded before any tasks started. 2) The total tasks was incorrectly reported due to using string (rather than int) arithmetic.
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/spark/ui/exec/ExecutorsUI.scala22
1 files changed, 10 insertions, 12 deletions
diff --git a/core/src/main/scala/spark/ui/exec/ExecutorsUI.scala b/core/src/main/scala/spark/ui/exec/ExecutorsUI.scala
index 6ec48f70a4..43e0c20b19 100644
--- a/core/src/main/scala/spark/ui/exec/ExecutorsUI.scala
+++ b/core/src/main/scala/spark/ui/exec/ExecutorsUI.scala
@@ -38,10 +38,9 @@ private[spark] class ExecutorsUI(val sc: SparkContext) {
def render(request: HttpServletRequest): Seq[Node] = {
val storageStatusList = sc.getExecutorStorageStatus
- val maxMem = storageStatusList.map(_.maxMem).reduce(_+_)
- val memUsed = storageStatusList.map(_.memUsed()).reduce(_+_)
- val diskSpaceUsed = storageStatusList.flatMap(_.blocks.values.map(_.diskSize))
- .reduceOption(_+_).getOrElse(0L)
+ val maxMem = storageStatusList.map(_.maxMem).fold(0L)(_+_)
+ val memUsed = storageStatusList.map(_.memUsed()).fold(0L)(_+_)
+ val diskSpaceUsed = storageStatusList.flatMap(_.blocks.values.map(_.diskSize)).fold(0L)(_+_)
val execHead = Seq("Executor ID", "Address", "RDD blocks", "Memory used", "Disk used",
"Active tasks", "Failed tasks", "Complete tasks", "Total tasks")
@@ -93,10 +92,9 @@ private[spark] class ExecutorsUI(val sc: SparkContext) {
val memUsed = sc.getExecutorStorageStatus(a).memUsed().toString
val maxMem = sc.getExecutorStorageStatus(a).maxMem.toString
val diskUsed = sc.getExecutorStorageStatus(a).diskUsed().toString
- val activeTasks = listener.executorToTasksActive.get(a.toString).map(l => l.size)
- .getOrElse(0).toString
- val failedTasks = listener.executorToTasksFailed.getOrElse(a.toString, 0).toString
- val completedTasks = listener.executorToTasksComplete.getOrElse(a.toString, 0).toString
+ val activeTasks = listener.executorToTasksActive.get(a.toString).map(l => l.size).getOrElse(0)
+ val failedTasks = listener.executorToTasksFailed.getOrElse(a.toString, 0)
+ val completedTasks = listener.executorToTasksComplete.getOrElse(a.toString, 0)
val totalTasks = activeTasks + failedTasks + completedTasks
Seq(
@@ -106,10 +104,10 @@ private[spark] class ExecutorsUI(val sc: SparkContext) {
memUsed,
maxMem,
diskUsed,
- activeTasks,
- failedTasks,
- completedTasks,
- totalTasks
+ activeTasks.toString,
+ failedTasks.toString,
+ completedTasks.toString,
+ totalTasks.toString
)
}