diff --git a/core/src/main/scala/spark/executor/Executor.scala b/core/src/main/scala/spark/executor/Executor.scala index c5202d94b024a748928cd9e1edf0bcad66565f83..05a960d7c506d0df274b7ae7424c90b1d4e3e4e1 100644 --- a/core/src/main/scala/spark/executor/Executor.scala +++ b/core/src/main/scala/spark/executor/Executor.scala @@ -130,7 +130,7 @@ private[spark] class Executor(executorId: String, slaveHostname: String, propert taskStart = System.currentTimeMillis() val value = task.run(taskId.toInt) val taskFinish = System.currentTimeMillis() - task.metrics.foreach{ m => + for (m <- task.metrics) { m.hostname = Utils.localHostName m.executorDeserializeTime = (taskStart - startTime).toInt m.executorRunTime = (taskFinish - taskStart).toInt @@ -158,7 +158,7 @@ private[spark] class Executor(executorId: String, slaveHostname: String, propert case t: Throwable => { val serviceTime = (System.currentTimeMillis() - taskStart).toInt val metrics = attemptedTask.flatMap(t => t.metrics) - metrics.foreach {m => + for (m <- metrics) { m.executorRunTime = serviceTime m.jvmGCTime = getTotalGCTime - startGCTime } diff --git a/core/src/main/scala/spark/scheduler/local/LocalScheduler.scala b/core/src/main/scala/spark/scheduler/local/LocalScheduler.scala index 33e7a10ea471166956354bbd25b721e8d801fb0c..6c43928bc855ea7b16fa429c28310a22292fcb6b 100644 --- a/core/src/main/scala/spark/scheduler/local/LocalScheduler.scala +++ b/core/src/main/scala/spark/scheduler/local/LocalScheduler.scala @@ -34,7 +34,6 @@ import spark.scheduler._ import spark.scheduler.cluster._ import spark.scheduler.cluster.SchedulingMode.SchedulingMode import akka.actor._ -import management.ManagementFactory /** * A FIFO or Fair TaskScheduler implementation that runs tasks locally in a thread pool. Optionally @@ -218,7 +217,7 @@ private[spark] class LocalScheduler(threads: Int, val maxFailures: Int, val sc: case t: Throwable => { val serviceTime = System.currentTimeMillis() - taskStart val metrics = attemptedTask.flatMap(t => t.metrics) - metrics.foreach {m => + for (m <- metrics) { m.executorRunTime = serviceTime.toInt m.jvmGCTime = getTotalGCTime - startGCTime } diff --git a/core/src/main/scala/spark/ui/jobs/StagePage.scala b/core/src/main/scala/spark/ui/jobs/StagePage.scala index 28a6d7b179baa5b02937684e5095611c821c08d6..f91a415e370c9ea3e64fa1d215686b78921127c7 100644 --- a/core/src/main/scala/spark/ui/jobs/StagePage.scala +++ b/core/src/main/scala/spark/ui/jobs/StagePage.scala @@ -166,7 +166,7 @@ private[spark] class StagePage(parent: JobProgressUI) { Utils.memoryBytesToString(s.shuffleBytesWritten)}.getOrElse("")}</td> }} <td sorttable_customkey={gcTime.toString}> - {if (gcTime > 0) {parent.formatDuration(gcTime)} else ""} + {if (gcTime > 0) parent.formatDuration(gcTime) else ""} </td> <td>{exception.map(e => <span>