diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 985b03bd1ebdefc1cffc37c8f965127097cc4421..e781519be9a90fb7989c6c5dccd9b7ec9b1594fe 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -76,7 +76,7 @@ object SparkBuild extends Build {
   def sharedSettings = Defaults.defaultSettings ++ Seq(
     organization       := "org.apache.spark",
     version            := "0.8.0-SNAPSHOT",
-    scalaVersion       := "2.10.1",
+    scalaVersion       := "2.10.2",
     scalacOptions      := Seq("-unchecked", "-optimize", "-deprecation"),
     unmanagedJars in Compile <<= baseDirectory map { base => (base / "lib" ** "*.jar").classpath },
     retrieveManaged := true,
@@ -208,10 +208,10 @@ object SparkBuild extends Build {
         "com.codahale.metrics"     % "metrics-core"     % "3.0.0",
         "com.codahale.metrics"     % "metrics-jvm"      % "3.0.0",
         "com.codahale.metrics"     % "metrics-json"     % "3.0.0",
-        "com.twitter"              % "chill_2.9.3"      % "0.3.1",
+        "com.twitter"             %% "chill"            % "0.3.1",
         "com.twitter"              % "chill-java"       % "0.3.1",
-        "org.scala-lang"           % "jline"            % "2.10.1",
-        "org.scala-lang"           % "scala-reflect"    % "2.10.1"
+        "org.scala-lang"           % "jline"            % "2.10.2",
+        "org.scala-lang"           % "scala-reflect"    % "2.10.2"
       ) 
   )
 
@@ -222,7 +222,7 @@ object SparkBuild extends Build {
  def replSettings = sharedSettings ++ Seq(
     name := "spark-repl",
     libraryDependencies <+= scalaVersion("org.scala-lang" % "scala-compiler" % _)
-  ) ++ assemblySettings ++ extraAssemblySettings
+  )
 
   
   def examplesSettings = sharedSettings ++ Seq(
diff --git a/repl/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala b/repl/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala
index 0405a9dd80c413cd6c8dc92e4a5f09deea365a63..31596cc02fa29ee18a6cb1df26ff727161dc7bf3 100644
--- a/repl/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala
@@ -119,7 +119,7 @@ trait SparkILoopInit {
       command("""
          @transient val sc = org.apache.spark.repl.Main.interp.createSparkContext();
         """)
-      command("import spark.SparkContext._")
+      command("import org.apache.spark.SparkContext._")
     }
    echo("Spark context available as sc.")
   }
diff --git a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
index b06999a42c365337a58ba9f529e19c4a67018fe3..fccb6e652c47e52badf88b05f87b042705105ca1 100644
--- a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
+++ b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
@@ -51,7 +51,7 @@ class ReplSuite extends FunSuite {
                                            |val accum = sc.accumulator(0)
                                            |sc.parallelize(1 to 10).foreach(x => accum += x)
                                            |accum.value
-                                         """)
+                                         """.stripMargin)
     assertDoesNotContain("error:", output)
     assertDoesNotContain("Exception", output)
     assertContains("res1: Int = 55", output)
@@ -63,7 +63,7 @@ class ReplSuite extends FunSuite {
                                            |sc.parallelize(1 to 10).map(x => v).collect.reduceLeft(_+_)
                                            |v = 10
                                            |sc.parallelize(1 to 10).map(x => v).collect.reduceLeft(_+_)
-                                         """)
+                                         """.stripMargin)
     assertDoesNotContain("error:", output)
     assertDoesNotContain("Exception", output)
     assertContains("res0: Int = 70", output)
@@ -76,7 +76,7 @@ class ReplSuite extends FunSuite {
                                            |def foo = 5
                                            |}
                                            |sc.parallelize(1 to 10).map(x => (new C).foo).collect.reduceLeft(_+_)
-                                         """)
+                                         """.stripMargin)
     assertDoesNotContain("error:", output)
     assertDoesNotContain("Exception", output)
     assertContains("res0: Int = 50", output)
@@ -86,7 +86,7 @@ class ReplSuite extends FunSuite {
     val output = runInterpreter("local", """
                                            |def double(x: Int) = x + x
                                            |sc.parallelize(1 to 10).map(x => double(x)).collect.reduceLeft(_+_)
-                                         """)
+                                         """.stripMargin)
     assertDoesNotContain("error:", output)
     assertDoesNotContain("Exception", output)
     assertContains("res0: Int = 110", output)
@@ -99,7 +99,7 @@ class ReplSuite extends FunSuite {
                                            |sc.parallelize(1 to 10).map(x => getV()).collect.reduceLeft(_+_)
                                            |v = 10
                                            |sc.parallelize(1 to 10).map(x => getV()).collect.reduceLeft(_+_)
-                                         """)
+                                         """.stripMargin)
     assertDoesNotContain("error:", output)
     assertDoesNotContain("Exception", output)
     assertContains("res0: Int = 70", output)
@@ -177,7 +177,7 @@ class ReplSuite extends FunSuite {
                                                   |sc.parallelize(0 to 4).map(x => broadcastArray.value(x)).collect
                                                   |array(0) = 5
                                                   |sc.parallelize(0 to 4).map(x => broadcastArray.value(x)).collect
-                                                """)
+                                                """.stripMargin)
       assertDoesNotContain("error:", output)
       assertDoesNotContain("Exception", output)
       assertContains("res0: Int = 70", output)
diff --git a/spark-class b/spark-class
index 0a5aae9151b8d49fd2813ee42805d326995466a2..1b2388e8c43b8b82aa04d4d95e2c257f0de413d4 100755
--- a/spark-class
+++ b/spark-class
@@ -17,7 +17,7 @@
 # limitations under the License.
 #
 
-SCALA_VERSION=2.9.3
+SCALA_VERSION=2.10
 
 # Figure out where the Scala framework is installed
 FWDIR="$(cd `dirname $0`; pwd)"