From c2c1af39f593cd00d29368efe2dbb8c0444f624d Mon Sep 17 00:00:00 2001 From: Kay Ousterhout <kayousterhout@gmail.com> Date: Sun, 29 Dec 2013 21:10:08 -0800 Subject: [PATCH] Updated code style according to Patrick's comments --- .../scala/org/apache/spark/scheduler/SparkListener.scala | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/scheduler/SparkListener.scala b/core/src/main/scala/org/apache/spark/scheduler/SparkListener.scala index 3becb4f068..627995c826 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/SparkListener.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/SparkListener.scala @@ -119,8 +119,7 @@ object StatsReportListener extends Logging { val probabilities = percentiles.map{_ / 100.0} val percentilesHeader = "\t" + percentiles.mkString("%\t") + "%" - def extractDoubleDistribution( - stage:SparkListenerStageCompleted, + def extractDoubleDistribution(stage: SparkListenerStageCompleted, getMetric: (TaskInfo,TaskMetrics) => Option[Double]) : Option[Distribution] = { Distribution(stage.stage.taskInfos.flatMap { @@ -128,8 +127,7 @@ object StatsReportListener extends Logging { } //is there some way to setup the types that I can get rid of this completely? - def extractLongDistribution( - stage:SparkListenerStageCompleted, + def extractLongDistribution(stage: SparkListenerStageCompleted, getMetric: (TaskInfo,TaskMetrics) => Option[Long]) : Option[Distribution] = { extractDoubleDistribution(stage, (info, metric) => getMetric(info,metric).map{_.toDouble}) -- GitLab