diff --git a/core/src/main/resources/spark/ui/static/webui.css b/core/src/main/resources/spark/ui/static/webui.css
index b83f4109c037c3b13a7f0ae2386bc41c24766088..9914a8ad2a39c1ce3ce849bd13a7c94ad88c4209 100644
--- a/core/src/main/resources/spark/ui/static/webui.css
+++ b/core/src/main/resources/spark/ui/static/webui.css
@@ -5,10 +5,6 @@
   padding: 0;
 }
 
-body {
-  font-size: 15px !important;
-}
-
 .version {
   line-height: 30px;
   vertical-align: bottom;
@@ -53,6 +49,10 @@ body {
   line-height: 15px !important;
 }
 
+.table-fixed {
+  table-layout:fixed;
+}
+
 .table td {
   vertical-align: middle !important;
 }
diff --git a/core/src/main/scala/spark/deploy/master/ui/IndexPage.scala b/core/src/main/scala/spark/deploy/master/ui/IndexPage.scala
index 4443d880560ec92eb57f65d606e02e3a6b776140..47936e2bad6f16fedb7f6bd6d8d1d8288dfc01c3 100644
--- a/core/src/main/scala/spark/deploy/master/ui/IndexPage.scala
+++ b/core/src/main/scala/spark/deploy/master/ui/IndexPage.scala
@@ -53,7 +53,7 @@ private[spark] class IndexPage(parent: MasterWebUI) {
     val workers = state.workers.sortBy(_.id)
     val workerTable = UIUtils.listingTable(workerHeaders, workerRow, workers)
 
-    val appHeaders = Seq("ID", "Description", "Cores", "Memory per Node", "Submit Time", "User",
+    val appHeaders = Seq("ID", "Name", "Cores", "Memory per Node", "Submitted Time", "User",
       "State", "Duration")
     val activeApps = state.activeApps.sortBy(_.startTime).reverse
     val activeAppsTable = UIUtils.listingTable(appHeaders, appRow, activeApps)
diff --git a/core/src/main/scala/spark/ui/JettyUtils.scala b/core/src/main/scala/spark/ui/JettyUtils.scala
index 1cc85124d3ac6f3a785b99331527b550f3270150..f66fe39905228e3167dabf33a1ab87740e7c01f5 100644
--- a/core/src/main/scala/spark/ui/JettyUtils.scala
+++ b/core/src/main/scala/spark/ui/JettyUtils.scala
@@ -17,21 +17,21 @@
 
 package spark.ui
 
-import annotation.tailrec
-
 import javax.servlet.http.{HttpServletResponse, HttpServletRequest}
 
+import scala.annotation.tailrec
+import scala.util.{Try, Success, Failure}
+import scala.xml.Node
+
 import net.liftweb.json.{JValue, pretty, render}
 
 import org.eclipse.jetty.server.{Server, Request, Handler}
 import org.eclipse.jetty.server.handler.{ResourceHandler, HandlerList, ContextHandler, AbstractHandler}
 import org.eclipse.jetty.util.thread.QueuedThreadPool
 
-import scala.util.{Try, Success, Failure}
-import scala.xml.Node
-
 import spark.Logging
 
+
 /** Utilities for launching a web server using Jetty's HTTP Server class */
 private[spark] object JettyUtils extends Logging {
   // Base type for a function that returns something based on an HTTP request. Allows for
diff --git a/core/src/main/scala/spark/ui/UIUtils.scala b/core/src/main/scala/spark/ui/UIUtils.scala
index ee7a8b482ed57a4d56a68777df1c8ff2cb3537b0..fe2afc11299e47d8210566762491bf682c36ef7b 100644
--- a/core/src/main/scala/spark/ui/UIUtils.scala
+++ b/core/src/main/scala/spark/ui/UIUtils.scala
@@ -125,9 +125,21 @@ private[spark] object UIUtils {
   }
 
   /** Returns an HTML table constructed by generating a row for each object in a sequence. */
-  def listingTable[T](headers: Seq[String], makeRow: T => Seq[Node], rows: Seq[T]): Seq[Node] = {
-    <table class="table table-bordered table-striped table-condensed sortable">
-      <thead>{headers.map(h => <th>{h}</th>)}</thead>
+  def listingTable[T](
+      headers: Seq[String],
+      makeRow: T => Seq[Node],
+      rows: Seq[T],
+      fixedWidth: Boolean = false): Seq[Node] = {
+
+    val colWidth = 100.toDouble / headers.size
+    val colWidthAttr = if (fixedWidth) colWidth + "%" else ""
+    var tableClass = "table table-bordered table-striped table-condensed sortable"
+    if (fixedWidth) {
+      tableClass += " table-fixed"
+    }
+
+    <table class={tableClass}>
+      <thead>{headers.map(h => <th width={colWidthAttr}>{h}</th>)}</thead>
       <tbody>
         {rows.map(r => makeRow(r))}
       </tbody>
diff --git a/core/src/main/scala/spark/ui/UIWorkloadGenerator.scala b/core/src/main/scala/spark/ui/UIWorkloadGenerator.scala
index 0dfb1a064ccd697f5baed9d01353f25bff46e637..f96419520fb8bf19a0ebfab5680f2310ceaf77e2 100644
--- a/core/src/main/scala/spark/ui/UIWorkloadGenerator.scala
+++ b/core/src/main/scala/spark/ui/UIWorkloadGenerator.scala
@@ -22,7 +22,8 @@ import scala.util.Random
 import spark.SparkContext
 import spark.SparkContext._
 import spark.scheduler.cluster.SchedulingMode
-import spark.scheduler.cluster.SchedulingMode.SchedulingMode
+
+
 /**
  * Continuously generates jobs that expose various features of the WebUI (internal testing tool).
  *
diff --git a/core/src/main/scala/spark/ui/env/EnvironmentUI.scala b/core/src/main/scala/spark/ui/env/EnvironmentUI.scala
index dc39b91648e9b4cf35562c80c6a9140adc7dd1bb..b3e28ce317b0b4cffd10ff7c014c936b86f51184 100644
--- a/core/src/main/scala/spark/ui/env/EnvironmentUI.scala
+++ b/core/src/main/scala/spark/ui/env/EnvironmentUI.scala
@@ -19,18 +19,17 @@ package spark.ui.env
 
 import javax.servlet.http.HttpServletRequest
 
-import org.eclipse.jetty.server.Handler
-
 import scala.collection.JavaConversions._
 import scala.util.Properties
+import scala.xml.Node
+
+import org.eclipse.jetty.server.Handler
 
 import spark.ui.JettyUtils._
-import spark.ui.UIUtils.headerSparkPage
+import spark.ui.UIUtils
 import spark.ui.Page.Environment
 import spark.SparkContext
-import spark.ui.UIUtils
 
-import scala.xml.Node
 
 private[spark] class EnvironmentUI(sc: SparkContext) {
 
@@ -46,20 +45,22 @@ private[spark] class EnvironmentUI(sc: SparkContext) {
       ("Scala Home", Properties.scalaHome)
     ).sorted
     def jvmRow(kv: (String, String)) = <tr><td>{kv._1}</td><td>{kv._2}</td></tr>
-    def jvmTable = UIUtils.listingTable(Seq("Name", "Value"), jvmRow, jvmInformation)
+    def jvmTable =
+      UIUtils.listingTable(Seq("Name", "Value"), jvmRow, jvmInformation, fixedWidth = true)
 
     val properties = System.getProperties.iterator.toSeq
-    val classPathProperty = properties
-        .filter{case (k, v) => k.contains("java.class.path")}
-        .headOption
-        .getOrElse("", "")
+    val classPathProperty = properties.find { case (k, v) =>
+      k.contains("java.class.path")
+    }.getOrElse(("", ""))
     val sparkProperties = properties.filter(_._1.startsWith("spark")).sorted
     val otherProperties = properties.diff(sparkProperties :+ classPathProperty).sorted
 
     val propertyHeaders = Seq("Name", "Value")
     def propertyRow(kv: (String, String)) = <tr><td>{kv._1}</td><td>{kv._2}</td></tr>
-    val sparkPropertyTable = UIUtils.listingTable(propertyHeaders, propertyRow, sparkProperties)
-    val otherPropertyTable = UIUtils.listingTable(propertyHeaders, propertyRow, otherProperties)
+    val sparkPropertyTable =
+      UIUtils.listingTable(propertyHeaders, propertyRow, sparkProperties, fixedWidth = true)
+    val otherPropertyTable =
+      UIUtils.listingTable(propertyHeaders, propertyRow, otherProperties, fixedWidth = true)
 
     val classPathEntries = classPathProperty._2
         .split(System.getProperty("path.separator", ":"))
@@ -71,16 +72,23 @@ private[spark] class EnvironmentUI(sc: SparkContext) {
 
     val classPathHeaders = Seq("Resource", "Source")
     def classPathRow(data: (String, String)) = <tr><td>{data._1}</td><td>{data._2}</td></tr>
-    val classPathTable = UIUtils.listingTable(classPathHeaders, classPathRow, classPath)
+    val classPathTable =
+      UIUtils.listingTable(classPathHeaders, classPathRow, classPath, fixedWidth = true)
 
     val content =
       <span>
         <h4>Runtime Information</h4> {jvmTable}
-        <h4>Spark Properties</h4> {sparkPropertyTable}
-        <h4>System Properties</h4> {otherPropertyTable}
-        <h4>Classpath Entries</h4> {classPathTable}
+        <hr/>
+        <h4>{sparkProperties.size} Spark Properties</h4>
+        {sparkPropertyTable}
+        <hr/>
+        <h4>{otherProperties.size} System Properties</h4>
+        {otherPropertyTable}
+        <hr/>
+        <h4>{classPath.size} Classpath Entries</h4>
+        {classPathTable}
       </span>
 
-    headerSparkPage(content, sc, "Environment", Environment)
+    UIUtils.headerSparkPage(content, sc, "Environment", Environment)
   }
 }
diff --git a/core/src/main/scala/spark/ui/exec/ExecutorsUI.scala b/core/src/main/scala/spark/ui/exec/ExecutorsUI.scala
index 43e0c20b19a9e0d95a302ae1225588f37918af39..28f6b3211c6a7e2a3c281aca0e7592784bca7cea 100644
--- a/core/src/main/scala/spark/ui/exec/ExecutorsUI.scala
+++ b/core/src/main/scala/spark/ui/exec/ExecutorsUI.scala
@@ -1,25 +1,20 @@
 package spark.ui.exec
 
-
 import javax.servlet.http.HttpServletRequest
 
-import org.eclipse.jetty.server.Handler
+import scala.collection.mutable.{HashMap, HashSet}
+import scala.xml.Node
 
-import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet}
-import scala.util.Properties
+import org.eclipse.jetty.server.Handler
 
-import spark.{ExceptionFailure, Logging, SparkContext, Success, Utils}
+import spark.{ExceptionFailure, Logging, Utils, SparkContext}
 import spark.executor.TaskMetrics
 import spark.scheduler.cluster.TaskInfo
-import spark.scheduler._
-import spark.SparkContext
-import spark.storage.{StorageStatus, StorageUtils}
+import spark.scheduler.{SparkListenerTaskStart, SparkListenerTaskEnd, SparkListener}
 import spark.ui.JettyUtils._
 import spark.ui.Page.Executors
-import spark.ui.UIUtils.headerSparkPage
 import spark.ui.UIUtils
 
-import scala.xml.{Node, XML}
 
 private[spark] class ExecutorsUI(val sc: SparkContext) {
 
@@ -44,7 +39,8 @@ private[spark] class ExecutorsUI(val sc: SparkContext) {
 
     val execHead = Seq("Executor ID", "Address", "RDD blocks", "Memory used", "Disk used",
       "Active tasks", "Failed tasks", "Complete tasks", "Total tasks")
-    def execRow(kv: Seq[String]) =
+
+    def execRow(kv: Seq[String]) = {
       <tr>
         <td>{kv(0)}</td>
         <td>{kv(1)}</td>
@@ -60,9 +56,9 @@ private[spark] class ExecutorsUI(val sc: SparkContext) {
         <td>{kv(8)}</td>
         <td>{kv(9)}</td>
       </tr>
-    val execInfo =
-      for (b <- 0 until storageStatusList.size)
-        yield getExecInfo(b)
+    }
+
+    val execInfo = for (b <- 0 until storageStatusList.size) yield getExecInfo(b)
     val execTable = UIUtils.listingTable(execHead, execRow, execInfo)
 
     val content =
@@ -82,7 +78,7 @@ private[spark] class ExecutorsUI(val sc: SparkContext) {
         </div>
       </div>;
 
-    headerSparkPage(content, sc, "Executors", Executors)
+    UIUtils.headerSparkPage(content, sc, execInfo.size + " Executors", Executors)
   }
 
   def getExecInfo(a: Int): Seq[String] = {
diff --git a/core/src/main/scala/spark/ui/jobs/IndexPage.scala b/core/src/main/scala/spark/ui/jobs/IndexPage.scala
index 9724671a03225f0d27d0323f7dc1dbb0c45171c6..cda6addd22a241a77e7a6a0b2644311c791c96fa 100644
--- a/core/src/main/scala/spark/ui/jobs/IndexPage.scala
+++ b/core/src/main/scala/spark/ui/jobs/IndexPage.scala
@@ -24,7 +24,7 @@ import scala.xml.{NodeSeq, Node}
 import spark.scheduler.cluster.SchedulingMode
 import spark.ui.Page._
 import spark.ui.UIUtils._
-import spark.Utils
+
 
 /** Page showing list of all ongoing and recently finished stages and pools*/
 private[spark] class IndexPage(parent: JobProgressUI) {
@@ -46,7 +46,8 @@ private[spark] class IndexPage(parent: JobProgressUI) {
       val completedStagesTable = new StageTable(completedStages.sortBy(_.submissionTime).reverse, parent)
       val failedStagesTable = new StageTable(failedStages.sortBy(_.submissionTime).reverse, parent)
 
-      val poolTable = new PoolTable(listener.sc.getAllPools, listener)
+      val pools = listener.sc.getAllPools
+      val poolTable = new PoolTable(pools, listener)
       val summary: NodeSeq =
        <div>
          <ul class="unstyled">
@@ -76,15 +77,15 @@ private[spark] class IndexPage(parent: JobProgressUI) {
 
       val content = summary ++
         {if (listener.sc.getSchedulingMode == SchedulingMode.FAIR) {
-           <h4>Pools</h4> ++ poolTable.toNodeSeq
+           <hr/><h4>{pools.size} Fair Scheduler Pools</h4> ++ poolTable.toNodeSeq
         } else {
           Seq()
         }} ++
-        <h4 id="active">Active Stages: {activeStages.size}</h4> ++
+        <hr/><h4 id="active">{activeStages.size} Active Stages</h4> ++
         activeStagesTable.toNodeSeq++
-        <h4 id="completed">Completed Stages: {completedStages.size}</h4> ++
+        <hr/><h4 id="completed">{completedStages.size} Completed Stages</h4> ++
         completedStagesTable.toNodeSeq++
-        <h4 id ="failed">Failed Stages: {failedStages.size}</h4> ++
+        <hr/><h4 id ="failed">{failedStages.size} Failed Stages</h4> ++
         failedStagesTable.toNodeSeq
 
       headerSparkPage(content, parent.sc, "Spark Stages", Jobs)
diff --git a/core/src/main/scala/spark/ui/jobs/PoolPage.scala b/core/src/main/scala/spark/ui/jobs/PoolPage.scala
index 04ef35c8008a507e6478aa29b0d9be9f9daf5d04..e8f80ebfce7bed6626b7642b734026b5501e3c1b 100644
--- a/core/src/main/scala/spark/ui/jobs/PoolPage.scala
+++ b/core/src/main/scala/spark/ui/jobs/PoolPage.scala
@@ -23,10 +23,11 @@ private[spark] class PoolPage(parent: JobProgressUI) {
       val pool = listener.sc.getPoolForName(poolName).get
       val poolTable = new PoolTable(Seq(pool), listener)
 
-      val content = <h3>Pool </h3> ++ poolTable.toNodeSeq() ++
-                    <h3>Active Stages : {activeStages.size}</h3> ++ activeStagesTable.toNodeSeq()
+      val content = <h4>Summary </h4> ++ poolTable.toNodeSeq() ++
+                    <hr/>
+                    <h4>{activeStages.size} Active Stages</h4> ++ activeStagesTable.toNodeSeq()
 
-      headerSparkPage(content, parent.sc, "Spark Pool Details", Jobs)
+      headerSparkPage(content, parent.sc, "Fair Scheduler Pool: " + poolName, Jobs)
     }
   }
 }
diff --git a/core/src/main/scala/spark/ui/jobs/PoolTable.scala b/core/src/main/scala/spark/ui/jobs/PoolTable.scala
index 21ebcef63aa4f2af91f725ba2fefcb3508786054..621828f9c3ea44899b0fee7b6bc6ceba9cc1837c 100644
--- a/core/src/main/scala/spark/ui/jobs/PoolTable.scala
+++ b/core/src/main/scala/spark/ui/jobs/PoolTable.scala
@@ -1,8 +1,8 @@
 package spark.ui.jobs
 
-import scala.xml.Node
 import scala.collection.mutable.HashMap
 import scala.collection.mutable.HashSet
+import scala.xml.Node
 
 import spark.scheduler.Stage
 import spark.scheduler.cluster.Schedulable
@@ -21,14 +21,14 @@ private[spark] class PoolTable(pools: Seq[Schedulable], listener: JobProgressLis
   private def poolTable(makeRow: (Schedulable, HashMap[String, HashSet[Stage]]) => Seq[Node],
     rows: Seq[Schedulable]
     ): Seq[Node] = {
-    <table class="table table-bordered table-striped table-condensed sortable">
+    <table class="table table-bordered table-striped table-condensed sortable table-fixed">
       <thead>
         <th>Pool Name</th>
         <th>Minimum Share</th>
         <th>Pool Weight</th>
-        <td>Active Stages</td>
-        <td>Running Tasks</td>
-        <td>SchedulingMode</td>
+        <th>Active Stages</th>
+        <th>Running Tasks</th>
+        <th>SchedulingMode</th>
       </thead>
       <tbody>
         {rows.map(r => makeRow(r, poolToActiveStages))}
@@ -36,7 +36,8 @@ private[spark] class PoolTable(pools: Seq[Schedulable], listener: JobProgressLis
     </table>
   }
 
-  private def poolRow(p: Schedulable, poolToActiveStages: HashMap[String, HashSet[Stage]]): Seq[Node] = {
+  private def poolRow(p: Schedulable, poolToActiveStages: HashMap[String, HashSet[Stage]])
+    : Seq[Node] = {
     val activeStages = poolToActiveStages.get(p.name) match {
       case Some(stages) => stages.size
       case None => 0
diff --git a/core/src/main/scala/spark/ui/jobs/StagePage.scala b/core/src/main/scala/spark/ui/jobs/StagePage.scala
index f91a415e370c9ea3e64fa1d215686b78921127c7..f2a6f4f3032d684fa94281f068254d8e225ce581 100644
--- a/core/src/main/scala/spark/ui/jobs/StagePage.scala
+++ b/core/src/main/scala/spark/ui/jobs/StagePage.scala
@@ -46,11 +46,12 @@ private[spark] class StagePage(parent: JobProgressUI) {
             <h4>Summary Metrics</h4> No tasks have started yet
             <h4>Tasks</h4> No tasks have started yet
           </div>
-        return headerSparkPage(content, parent.sc, "Stage Details: %s".format(stageId), Jobs)
+        return headerSparkPage(content, parent.sc, "Details for Stage %s".format(stageId), Jobs)
       }
 
       val tasks = listener.stageToTaskInfos(stageId).toSeq.sortBy(_._1.launchTime)
 
+      val numCompleted = tasks.count(_._1.finished)
       val shuffleReadBytes = listener.stageToShuffleRead.getOrElse(stageId, 0L)
       val hasShuffleRead = shuffleReadBytes > 0
       val shuffleWriteBytes = listener.stageToShuffleWrite.getOrElse(stageId, 0L)
@@ -82,11 +83,11 @@ private[spark] class StagePage(parent: JobProgressUI) {
         </div>
 
       val taskHeaders: Seq[String] =
-        Seq("Task ID", "Status", "Duration", "Locality Level", "Worker", "Launch Time") ++
-          {if (hasShuffleRead) Seq("Shuffle Read")  else Nil} ++
-          {if (hasShuffleWrite) Seq("Shuffle Write") else Nil} ++
+        Seq("Task ID", "Status", "Locality Level", "Executor", "Launch Time", "Duration") ++
         Seq("GC Time") ++
-        Seq("Details")
+        {if (hasShuffleRead) Seq("Shuffle Read")  else Nil} ++
+        {if (hasShuffleWrite) Seq("Shuffle Write") else Nil} ++
+        Seq("Errors")
 
       val taskTable = listingTable(taskHeaders, taskRow(hasShuffleRead, hasShuffleWrite), tasks)
 
@@ -122,16 +123,19 @@ private[spark] class StagePage(parent: JobProgressUI) {
             if (hasShuffleRead) shuffleReadQuantiles else Nil,
             if (hasShuffleWrite) shuffleWriteQuantiles else Nil)
 
-          val quantileHeaders = Seq("Metric", "Min", "25%", "50%", "75%", "Max")
+          val quantileHeaders = Seq("Metric", "Min", "25th percentile",
+            "Median", "75th percentile", "Max")
           def quantileRow(data: Seq[String]): Seq[Node] = <tr> {data.map(d => <td>{d}</td>)} </tr>
-          Some(listingTable(quantileHeaders, quantileRow, listings))
+          Some(listingTable(quantileHeaders, quantileRow, listings, fixedWidth = true))
         }
 
       val content =
-        summary ++ <h2>Summary Metrics</h2> ++ summaryTable.getOrElse(Nil) ++
-          <h2>Tasks</h2> ++ taskTable;
+        summary ++
+        <h4>Summary Metrics for {numCompleted} Completed Tasks</h4> ++
+        <div>{summaryTable.getOrElse("No tasks have reported metrics yet.")}</div> ++
+        <hr/><h4>Tasks</h4> ++ taskTable;
 
-      headerSparkPage(content, parent.sc, "Stage Details: %s".format(stageId), Jobs)
+      headerSparkPage(content, parent.sc, "Details for Stage %d".format(stageId), Jobs)
     }
   }
 
@@ -151,12 +155,15 @@ private[spark] class StagePage(parent: JobProgressUI) {
     <tr>
       <td>{info.taskId}</td>
       <td>{info.status}</td>
-      <td sorttable_customkey={duration.toString}>
-        {formatDuration}
-      </td>
       <td>{info.taskLocality}</td>
       <td>{info.hostPort}</td>
       <td>{dateFmt.format(new Date(info.launchTime))}</td>
+      <td sorttable_customkey={duration.toString}>
+        {formatDuration}
+      </td>
+      <td sorttable_customkey={gcTime.toString}>
+        {if (gcTime > 0) parent.formatDuration(gcTime) else ""}
+      </td>
       {if (shuffleRead) {
         <td>{metrics.flatMap{m => m.shuffleReadMetrics}.map{s =>
           Utils.memoryBytesToString(s.remoteBytesRead)}.getOrElse("")}</td>
@@ -165,9 +172,6 @@ private[spark] class StagePage(parent: JobProgressUI) {
         <td>{metrics.flatMap{m => m.shuffleWriteMetrics}.map{s =>
           Utils.memoryBytesToString(s.shuffleBytesWritten)}.getOrElse("")}</td>
       }}
-      <td sorttable_customkey={gcTime.toString}>
-        {if (gcTime > 0) parent.formatDuration(gcTime) else ""}
-      </td>
       <td>{exception.map(e =>
         <span>
           {e.className} ({e.description})<br/>
diff --git a/core/src/main/scala/spark/ui/jobs/StageTable.scala b/core/src/main/scala/spark/ui/jobs/StageTable.scala
index 19b07ccedac21dfbf9aac148a594edb524bfdc49..96bcc62480cab07e04dcd6295663c4c8931531a3 100644
--- a/core/src/main/scala/spark/ui/jobs/StageTable.scala
+++ b/core/src/main/scala/spark/ui/jobs/StageTable.scala
@@ -1,21 +1,14 @@
 package spark.ui.jobs
 
 import java.util.Date
-import java.text.SimpleDateFormat
 
-import javax.servlet.http.HttpServletRequest
-
-import scala.Some
-import scala.xml.{NodeSeq, Node}
-import scala.collection.mutable.HashMap
+import scala.xml.Node
 import scala.collection.mutable.HashSet
 
+import spark.Utils
 import spark.scheduler.cluster.{SchedulingMode, TaskInfo}
 import spark.scheduler.Stage
-import spark.ui.UIUtils._
-import spark.ui.Page._
-import spark.Utils
-import spark.storage.StorageLevel
+
 
 /** Page showing list of all ongoing and recently finished stages */
 private[spark] class StageTable(val stages: Seq[Stage], val parent: JobProgressUI) {
@@ -38,10 +31,10 @@ private[spark] class StageTable(val stages: Seq[Stage], val parent: JobProgressU
         {if (isFairScheduler) {<th>Pool Name</th>} else {}}
         <th>Description</th>
         <th>Submitted</th>
-        <td>Duration</td>
-        <td>Tasks: Succeeded/Total</td>
-        <td>Shuffle Read</td>
-        <td>Shuffle Write</td>
+        <th>Duration</th>
+        <th>Tasks: Succeeded/Total</th>
+        <th>Shuffle Read</th>
+        <th>Shuffle Write</th>
       </thead>
       <tbody>
         {rows.map(r => makeRow(r))}
diff --git a/core/src/main/scala/spark/ui/storage/RDDPage.scala b/core/src/main/scala/spark/ui/storage/RDDPage.scala
index 40f94b42a6f5c2f425e69f2674642ebfc3ea568b..5fce1ea59be26a9cab02a2a9c714b9ba63b70b06 100644
--- a/core/src/main/scala/spark/ui/storage/RDDPage.scala
+++ b/core/src/main/scala/spark/ui/storage/RDDPage.scala
@@ -21,12 +21,13 @@ import javax.servlet.http.HttpServletRequest
 
 import scala.xml.Node
 
-import spark.storage.{StorageStatus, StorageUtils}
-import spark.ui.UIUtils._
 import spark.Utils
+import spark.storage.{StorageStatus, StorageUtils}
 import spark.storage.BlockManagerMasterActor.BlockStatus
+import spark.ui.UIUtils._
 import spark.ui.Page._
 
+
 /** Page showing storage details for a given RDD */
 private[spark] class RDDPage(parent: BlockManagerUI) {
   val sc = parent.sc
@@ -44,7 +45,7 @@ private[spark] class RDDPage(parent: BlockManagerUI) {
     val workerTable = listingTable(workerHeaders, workerRow, workers)
 
     val blockHeaders = Seq("Block Name", "Storage Level", "Size in Memory", "Size on Disk",
-      "Locations")
+      "Executors")
 
     val blockStatuses = filteredStorageStatusList.flatMap(_.blocks).toArray.sortWith(_._1 < _._1)
     val blockLocations = StorageUtils.blockLocationsFromStorageStatus(filteredStorageStatusList)
@@ -83,19 +84,19 @@ private[spark] class RDDPage(parent: BlockManagerUI) {
       <hr/>
       <div class="row">
         <div class="span12">
-          <h3> Data Distribution Summary </h3>
+          <h4> Data Distribution on {workers.size} Executors </h4>
           {workerTable}
         </div>
       </div>
       <hr/>
       <div class="row">
         <div class="span12">
-          <h4> Partitions </h4>
+          <h4> {blocks.size} Partitions </h4>
           {blockTable}
         </div>
       </div>;
 
-    headerSparkPage(content, parent.sc, "RDD Info: " + rddInfo.name, Storage)
+    headerSparkPage(content, parent.sc, "RDD Storage Info for " + rddInfo.name, Storage)
   }
 
   def blockRow(row: (String, BlockStatus, Seq[String])): Seq[Node] = {