From 94c5139607ec876782e594012a108ebf55fa97db Mon Sep 17 00:00:00 2001
From: Huajian Mao <huajianmao@gmail.com>
Date: Thu, 15 May 2014 18:20:16 -0700
Subject: [PATCH] Typos in Spark

Author: Huajian Mao <huajianmao@gmail.com>

Closes #798 from huajianmao/patch-1 and squashes the following commits:

208a454 [Huajian Mao] A typo in Task
1b515af [Huajian Mao] A typo in the message
---
 core/src/main/scala/org/apache/spark/scheduler/Task.scala       | 2 +-
 .../scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala    | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/core/src/main/scala/org/apache/spark/scheduler/Task.scala b/core/src/main/scala/org/apache/spark/scheduler/Task.scala
index 2ca3479c80..5871edeb85 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/Task.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/Task.scala
@@ -33,7 +33,7 @@ import org.apache.spark.util.ByteBufferInputStream
  * - [[org.apache.spark.scheduler.ResultTask]]
  *
  * A Spark job consists of one or more stages. The very last stage in a job consists of multiple
- * ResultTask's, while earlier stages consist of ShuffleMapTasks. A ResultTask executes the task
+ * ResultTasks, while earlier stages consist of ShuffleMapTasks. A ResultTask executes the task
  * and sends the task output back to the driver application. A ShuffleMapTask executes the task
  * and divides the task output to multiple buckets (based on the task's partitioner).
  *
diff --git a/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala b/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala
index 649eed213e..17292b4c15 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala
@@ -105,7 +105,7 @@ private[spark] class TaskSchedulerImpl(
     SchedulingMode.withName(schedulingModeConf.toUpperCase)
   } catch {
     case e: java.util.NoSuchElementException =>
-      throw new SparkException(s"Urecognized spark.scheduler.mode: $schedulingModeConf")
+      throw new SparkException(s"Unrecognized spark.scheduler.mode: $schedulingModeConf")
   }
 
   // This is a var so that we can reset it for testing purposes.
-- 
GitLab