diff --git a/core/src/main/scala/spark/CacheManager.scala b/core/src/main/scala/spark/CacheManager.scala index c7b379a3fbe2d8da307693ec12f20de00832e339..f7a2b7e8027ef16c608b9a55e87db4ad9d1139c2 100644 --- a/core/src/main/scala/spark/CacheManager.scala +++ b/core/src/main/scala/spark/CacheManager.scala @@ -27,7 +27,7 @@ private[spark] class CacheManager(blockManager: BlockManager) extends Logging { if (loading.contains(key)) { logInfo("Loading contains " + key + ", waiting...") while (loading.contains(key)) { - try {loading.wait()} catch {case _ =>} + try {loading.wait()} catch {case _ : Throwable =>} } logInfo("Loading no longer contains " + key + ", so returning cached result") // See whether someone else has successfully loaded it. The main way this would fail diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala index d44bf3b5e393755197a82d87e93cea725d084c88..5f378b2398c5b22514345752d8e919aba48d96ba 100644 --- a/project/SparkBuild.scala +++ b/project/SparkBuild.scala @@ -37,7 +37,7 @@ object SparkBuild extends Build { organization := "org.spark-project", version := "0.7.1-SNAPSHOT", scalaVersion := "2.9.2", - scalacOptions := Seq(/*"-deprecation",*/ "-unchecked", "-optimize"), // -deprecation is too noisy due to usage of old Hadoop API, enable it once that's no longer an issue + scalacOptions := Seq("-unchecked", "-optimize", "-deprecation"), unmanagedJars in Compile <<= baseDirectory map { base => (base / "lib" ** "*.jar").classpath }, retrieveManaged := true, retrievePattern := "[type]s/[artifact](-[revision])(-[classifier]).[ext]", diff --git a/streaming/src/main/scala/spark/streaming/dstream/QueueInputDStream.scala b/streaming/src/main/scala/spark/streaming/dstream/QueueInputDStream.scala index 6b310bc0b611c7a6260042e3ddc555a59bed28a2..da224ad6f718cbb8a29e1e6f976923f1bfacf6d9 100644 --- a/streaming/src/main/scala/spark/streaming/dstream/QueueInputDStream.scala +++ b/streaming/src/main/scala/spark/streaming/dstream/QueueInputDStream.scala @@ -28,7 +28,7 @@ class QueueInputDStream[T: ClassManifest]( } if (buffer.size > 0) { if (oneAtATime) { - Some(buffer.first) + Some(buffer.head) } else { Some(new UnionRDD(ssc.sc, buffer.toSeq)) }