Skip to content
Snippets Groups Projects
Commit 164ef4c7 authored by Ismael Juma's avatar Ismael Juma
Browse files

Use explicit asInstanceOf instead of misleading unchecked pattern matching.

Also enable -unchecked warnings in SBT build file.
parent cfbe2da1
No related branches found
No related tags found
No related merge requests found
...@@ -96,7 +96,7 @@ class CacheTracker(isMaster: Boolean, theCache: Cache) extends Logging { ...@@ -96,7 +96,7 @@ class CacheTracker(isMaster: Boolean, theCache: Cache) extends Logging {
// Get a snapshot of the currently known locations // Get a snapshot of the currently known locations
def getLocationsSnapshot(): HashMap[Int, Array[List[String]]] = { def getLocationsSnapshot(): HashMap[Int, Array[List[String]]] = {
(trackerActor !? GetCacheLocations) match { (trackerActor !? GetCacheLocations) match {
case h: HashMap[Int, Array[List[String]]] => h case h: HashMap[_, _] => h.asInstanceOf[HashMap[Int, Array[List[String]]]]
case _ => throw new SparkException( case _ => throw new SparkException(
"Internal error: CacheTrackerActor did not reply with a HashMap") "Internal error: CacheTrackerActor did not reply with a HashMap")
} }
......
...@@ -75,8 +75,8 @@ extends RDD[(K, Seq[Seq[_]])](rdds.head.context) with Logging { ...@@ -75,8 +75,8 @@ extends RDD[(K, Seq[Seq[_]])](rdds.head.context) with Logging {
for ((dep, depNum) <- split.deps.zipWithIndex) dep match { for ((dep, depNum) <- split.deps.zipWithIndex) dep match {
case NarrowCoGroupSplitDep(rdd, itsSplit) => { case NarrowCoGroupSplitDep(rdd, itsSplit) => {
// Read them from the parent // Read them from the parent
for ((k: K, v) <- rdd.iterator(itsSplit)) { for ((k, v) <- rdd.iterator(itsSplit)) {
getSeq(k)(depNum) += v getSeq(k.asInstanceOf[K])(depNum) += v
} }
} }
case ShuffleCoGroupSplitDep(shuffleId) => { case ShuffleCoGroupSplitDep(shuffleId) => {
......
...@@ -6,27 +6,24 @@ import assembly._ ...@@ -6,27 +6,24 @@ import assembly._
import de.element34.sbteclipsify._ import de.element34.sbteclipsify._
class SparkProject(info: ProjectInfo) class SparkProject(info: ProjectInfo) extends ParentProject(info) with IdeaProject {
extends ParentProject(info) with IdeaProject
{
lazy val core = project("core", "Spark Core", new CoreProject(_)) lazy val core = project("core", "Spark Core", new CoreProject(_))
lazy val examples = lazy val examples = project("examples", "Spark Examples", new ExamplesProject(_), core)
project("examples", "Spark Examples", new ExamplesProject(_), core)
lazy val bagel = project("bagel", "Bagel", new BagelProject(_), core) lazy val bagel = project("bagel", "Bagel", new BagelProject(_), core)
class CoreProject(info: ProjectInfo) trait BaseProject extends BasicScalaProject with ScalaPaths with Eclipsify with IdeaProject {
extends DefaultProject(info) with Eclipsify with IdeaProject with DepJar with XmlTestReport override def compileOptions = super.compileOptions ++ Seq(Unchecked)
{} }
class CoreProject(info: ProjectInfo) extends DefaultProject(info) with BaseProject with DepJar with XmlTestReport
class ExamplesProject(info: ProjectInfo) class ExamplesProject(info: ProjectInfo) extends DefaultProject(info) with BaseProject
extends DefaultProject(info) with Eclipsify with IdeaProject
{}
class BagelProject(info: ProjectInfo) class BagelProject(info: ProjectInfo) extends DefaultProject(info) with BaseProject with DepJar with XmlTestReport
extends DefaultProject(info) with DepJar with XmlTestReport
{}
} }
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment