From a88aa5e6ed98d212b25a534566e417401da9cc7d Mon Sep 17 00:00:00 2001 From: Kay Ousterhout <kayo@yahoo-inc.com> Date: Tue, 13 Aug 2013 23:44:58 -0700 Subject: [PATCH] Fixed 2 bugs in executor UI. 1) UI crashed if the executor UI was loaded before any tasks started. 2) The total tasks was incorrectly reported due to using string (rather than int) arithmetic. --- .../scala/spark/ui/exec/ExecutorsUI.scala | 22 +++++++++---------- 1 file changed, 10 insertions(+), 12 deletions(-) diff --git a/core/src/main/scala/spark/ui/exec/ExecutorsUI.scala b/core/src/main/scala/spark/ui/exec/ExecutorsUI.scala index 6ec48f70a4..43e0c20b19 100644 --- a/core/src/main/scala/spark/ui/exec/ExecutorsUI.scala +++ b/core/src/main/scala/spark/ui/exec/ExecutorsUI.scala @@ -38,10 +38,9 @@ private[spark] class ExecutorsUI(val sc: SparkContext) { def render(request: HttpServletRequest): Seq[Node] = { val storageStatusList = sc.getExecutorStorageStatus - val maxMem = storageStatusList.map(_.maxMem).reduce(_+_) - val memUsed = storageStatusList.map(_.memUsed()).reduce(_+_) - val diskSpaceUsed = storageStatusList.flatMap(_.blocks.values.map(_.diskSize)) - .reduceOption(_+_).getOrElse(0L) + val maxMem = storageStatusList.map(_.maxMem).fold(0L)(_+_) + val memUsed = storageStatusList.map(_.memUsed()).fold(0L)(_+_) + val diskSpaceUsed = storageStatusList.flatMap(_.blocks.values.map(_.diskSize)).fold(0L)(_+_) val execHead = Seq("Executor ID", "Address", "RDD blocks", "Memory used", "Disk used", "Active tasks", "Failed tasks", "Complete tasks", "Total tasks") @@ -93,10 +92,9 @@ private[spark] class ExecutorsUI(val sc: SparkContext) { val memUsed = sc.getExecutorStorageStatus(a).memUsed().toString val maxMem = sc.getExecutorStorageStatus(a).maxMem.toString val diskUsed = sc.getExecutorStorageStatus(a).diskUsed().toString - val activeTasks = listener.executorToTasksActive.get(a.toString).map(l => l.size) - .getOrElse(0).toString - val failedTasks = listener.executorToTasksFailed.getOrElse(a.toString, 0).toString - val completedTasks = listener.executorToTasksComplete.getOrElse(a.toString, 0).toString + val activeTasks = listener.executorToTasksActive.get(a.toString).map(l => l.size).getOrElse(0) + val failedTasks = listener.executorToTasksFailed.getOrElse(a.toString, 0) + val completedTasks = listener.executorToTasksComplete.getOrElse(a.toString, 0) val totalTasks = activeTasks + failedTasks + completedTasks Seq( @@ -106,10 +104,10 @@ private[spark] class ExecutorsUI(val sc: SparkContext) { memUsed, maxMem, diskUsed, - activeTasks, - failedTasks, - completedTasks, - totalTasks + activeTasks.toString, + failedTasks.toString, + completedTasks.toString, + totalTasks.toString ) } -- GitLab