diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
new file mode 100644
index 0000000000000000000000000000000000000000..b6191561a0d1d5db2ba69a1302b8bcd100973974
--- /dev/null
+++ b/project/SparkBuild.scala
@@ -0,0 +1,101 @@
+import sbt._
+import Keys._
+
+object SparkBuild extends Build {
+
+  lazy val root = Project("root", file("."), settings = sharedSettings) aggregate(core, repl, examples, bagel)
+
+  lazy val core = Project("core", file("core"), settings = coreSettings)
+
+  lazy val repl = Project("repl", file("repl"), settings = replSettings) dependsOn (core)
+
+  lazy val examples = Project("examples", file("examples"), settings = examplesSettings) dependsOn (core)
+
+  lazy val bagel = Project("bagel", file("bagel"), settings = bagelSettings) dependsOn (core)
+
+  def sharedSettings = Defaults.defaultSettings ++ Seq(
+    organization := "org.spark-project",
+    version := "version=0.4-SNAPSHOT",
+    scalaVersion := "2.9.0-1",
+    scalacOptions := Seq(/*"-deprecation",*/ "-unchecked"), // TODO Enable -deprecation and fix all warnings
+    unmanagedJars in Compile <<= baseDirectory map { base => (base ** "*.jar").classpath },
+    retrieveManaged := true,
+    transitiveClassifiers in Scope.GlobalScope := Seq("sources"),
+    libraryDependencies ++= Seq(
+      "org.eclipse.jetty" % "jetty-server" % "7.4.2.v20110526",
+      "org.scalatest" % "scalatest_2.9.0" % "1.4.1" % "test",
+      "org.scala-tools.testing" % "scalacheck_2.9.0" % "1.9" % "test"
+    )
+  )
+
+  val slf4jVersion = "1.6.1"
+
+  //FIXME DepJar and XmlTestReport
+  def coreSettings = sharedSettings ++ Seq(libraryDependencies ++= Seq(
+    "com.google.guava" % "guava" % "r09",
+    "log4j" % "log4j" % "1.2.16",
+    "org.slf4j" % "slf4j-api" % slf4jVersion,
+    "org.slf4j" % "slf4j-log4j12" % slf4jVersion,
+    "com.ning" % "compress-lzf" % "0.7.0",
+    "org.apache.hadoop" % "hadoop-core" % "0.20.2",
+    "asm" % "asm-all" % "3.3.1"
+  ))
+
+  //FIXME DepJar and XmlTestReport
+  def replSettings = sharedSettings ++ Seq(libraryDependencies <+= scalaVersion("org.scala-lang" % "scala-compiler" % _))
+
+  def examplesSettings = sharedSettings ++ Seq(libraryDependencies += "colt" % "colt" % "1.2.0")
+
+  //FIXME DepJar and XmlTestReport
+  def bagelSettings = sharedSettings
+}
+
+// Project mixin for an XML-based ScalaTest report. Unfortunately
+// there is currently no way to call this directly from SBT without
+// executing a subprocess.
+//trait XmlTestReport extends BasicScalaProject {
+//  def testReportDir = outputPath / "test-report"
+//
+//  lazy val testReport = task {
+//    log.info("Creating " + testReportDir + "...")
+//    if (!testReportDir.exists) {
+//      testReportDir.asFile.mkdirs()
+//    }
+//    log.info("Executing org.scalatest.tools.Runner...")
+//    val command = ("scala -classpath " + testClasspath.absString +
+//                   " org.scalatest.tools.Runner -o " +
+//                   " -u " + testReportDir.absolutePath +
+//                   " -p " + (outputPath / "test-classes").absolutePath)
+//    Process(command, path("."), "JAVA_OPTS" -> "-Xmx500m") !
+//
+//    None
+//  }.dependsOn(compile, testCompile).describedAs("Generate XML test report.")
+//}
+
+// Project mixin for creating a JAR with  a project's dependencies. This is based
+// on the AssemblyBuilder plugin, but because this plugin attempts to package Scala
+// and our project too, we leave that out using our own exclude filter (depJarExclude).
+//trait DepJar extends AssemblyBuilder {
+//  def depJarExclude(base: PathFinder) = {
+//    (base / "scala" ** "*") +++ // exclude scala library
+//    (base / "spark" ** "*") +++ // exclude Spark classes
+//    ((base / "META-INF" ** "*") --- // generally ignore the hell out of META-INF
+//     (base / "META-INF" / "services" ** "*") --- // include all service providers
+//     (base / "META-INF" / "maven" ** "*")) // include all Maven POMs and such
+//  }
+//
+//  def depJarTempDir = outputPath / "dep-classes"
+//
+//  def depJarOutputPath =
+//    outputPath / (name.toLowerCase.replace(" ", "-") + "-dep-" + version.toString + ".jar")
+//
+//  lazy val depJar = {
+//    packageTask(
+//      Path.lazyPathFinder(assemblyPaths(depJarTempDir,
+//                                        assemblyClasspath,
+//                                        assemblyExtraJars,
+//                                        depJarExclude)),
+//      depJarOutputPath,
+//      packageOptions)
+//  }.dependsOn(compile).describedAs("Bundle project's dependencies into a JAR.")
+//}
diff --git a/project/build.properties b/project/build.properties
index 9cef4f0d442a6c861fb87eedb4300c56ae681610..f47a3009ec840a2bf3bb76206ac36d885a539654 100644
--- a/project/build.properties
+++ b/project/build.properties
@@ -1,8 +1 @@
-#Project properties
-#Sat Nov 13 21:57:32 PST 2010
-project.organization=org.spark-project
-project.name=spark
-sbt.version=0.7.7
-project.version=0.4-SNAPSHOT
-build.scala.versions=2.9.0-1
-project.initialize=false
+sbt.version=0.10.1
diff --git a/project/build/SparkProject.scala b/project/build/SparkProject.scala
deleted file mode 100644
index 1ffcc3ef3451d7f6670fd237e651c4dc4e75867d..0000000000000000000000000000000000000000
--- a/project/build/SparkProject.scala
+++ /dev/null
@@ -1,105 +0,0 @@
-import sbt._
-import sbt.Process._
-
-import assembly._
-
-import de.element34.sbteclipsify._
-
-
-class SparkProject(info: ProjectInfo) extends ParentProject(info) with IdeaProject {
-
-  lazy val core = project("core", "Spark Core", new CoreProject(_))
-
-  lazy val repl = project("repl", "Spark REPL", new ReplProject(_), core)
-
-  lazy val examples = project("examples", "Spark Examples", new ExamplesProject(_), core)
-
-  lazy val bagel = project("bagel", "Bagel", new BagelProject(_), core)
-
-  trait BaseProject extends BasicScalaProject with ScalaPaths with BasicPackagePaths with Eclipsify with IdeaProject {
-    override def compileOptions = super.compileOptions ++ Seq(Unchecked)
-
-    lazy val jettyServer = "org.eclipse.jetty" % "jetty-server" % "7.4.2.v20110526"
-    
-    override def packageDocsJar = defaultJarPath("-javadoc.jar")
-    override def packageSrcJar= defaultJarPath("-sources.jar")
-    lazy val sourceArtifact = Artifact.sources(artifactID)
-    lazy val docsArtifact = Artifact.javadoc(artifactID)
-    override def packageToPublishActions = super.packageToPublishActions ++ Seq(packageDocs, packageSrc)
-  }
-
-  class CoreProject(info: ProjectInfo) extends DefaultProject(info) with BaseProject with DepJar with XmlTestReport {
-    val guava = "com.google.guava" % "guava" % "r09"
-    val log4j = "log4j" % "log4j" % "1.2.16"
-    val slf4jVersion = "1.6.1"
-    val slf4jApi = "org.slf4j" % "slf4j-api" % slf4jVersion
-    val slf4jLog4j = "org.slf4j" % "slf4j-log4j12" % slf4jVersion
-    val compressLzf = "com.ning" % "compress-lzf" % "0.7.0"
-    val hadoop = "org.apache.hadoop" % "hadoop-core" % "0.20.2"
-    val asm = "asm" % "asm-all" % "3.3.1"
-    val scalaTest = "org.scalatest" % "scalatest_2.9.0" % "1.4.1" % "test"
-    val scalaCheck = "org.scala-tools.testing" % "scalacheck_2.9.0" % "1.9" % "test"
-  }
-
-  class ReplProject(info: ProjectInfo) extends DefaultProject(info) with BaseProject with DepJar with XmlTestReport
-
-  class ExamplesProject(info: ProjectInfo) extends DefaultProject(info) with BaseProject {
-    val colt = "colt" % "colt" % "1.2.0"
-  }
-
-  class BagelProject(info: ProjectInfo) extends DefaultProject(info) with BaseProject with DepJar with XmlTestReport
-
-  override def managedStyle = ManagedStyle.Maven
-}
-
-
-// Project mixin for an XML-based ScalaTest report. Unfortunately
-// there is currently no way to call this directly from SBT without
-// executing a subprocess.
-trait XmlTestReport extends BasicScalaProject {
-  def testReportDir = outputPath / "test-report"
-
-  lazy val testReport = task {
-    log.info("Creating " + testReportDir + "...")
-    if (!testReportDir.exists) {
-      testReportDir.asFile.mkdirs()
-    }
-    log.info("Executing org.scalatest.tools.Runner...")
-    val command = ("scala -classpath " + testClasspath.absString +
-                   " org.scalatest.tools.Runner -o " +
-                   " -u " + testReportDir.absolutePath +
-                   " -p " + (outputPath / "test-classes").absolutePath)
-    Process(command, path("."), "JAVA_OPTS" -> "-Xmx500m") !
-
-    None
-  }.dependsOn(compile, testCompile).describedAs("Generate XML test report.")
-}
-
-
-// Project mixin for creating a JAR with  a project's dependencies. This is based
-// on the AssemblyBuilder plugin, but because this plugin attempts to package Scala
-// and our project too, we leave that out using our own exclude filter (depJarExclude).
-trait DepJar extends AssemblyBuilder {
-  def depJarExclude(base: PathFinder) = {
-    (base / "scala" ** "*") +++ // exclude scala library
-    (base / "spark" ** "*") +++ // exclude Spark classes
-    ((base / "META-INF" ** "*") --- // generally ignore the hell out of META-INF
-     (base / "META-INF" / "services" ** "*") --- // include all service providers
-     (base / "META-INF" / "maven" ** "*")) // include all Maven POMs and such
-  }
-
-  def depJarTempDir = outputPath / "dep-classes"
-
-  def depJarOutputPath =
-    outputPath / (name.toLowerCase.replace(" ", "-") + "-dep-" + version.toString + ".jar")
-
-  lazy val depJar = {
-    packageTask(
-      Path.lazyPathFinder(assemblyPaths(depJarTempDir,
-                                        assemblyClasspath,
-                                        assemblyExtraJars,
-                                        depJarExclude)),
-      depJarOutputPath,
-      packageOptions)
-  }.dependsOn(compile).describedAs("Bundle project's dependencies into a JAR.")
-}
diff --git a/project/plugins/SparkProjectPlugins.scala b/project/plugins/SparkProjectPlugins.scala
deleted file mode 100644
index 565f16082926ca35c7cce3968bacc82d6eb524b4..0000000000000000000000000000000000000000
--- a/project/plugins/SparkProjectPlugins.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-import sbt._
-
-class SparkProjectPlugins(info: ProjectInfo) extends PluginDefinition(info) {
-  val eclipse = "de.element34" % "sbt-eclipsify" % "0.7.0"
-
-  val sbtIdeaRepo = "sbt-idea-repo" at "http://mpeltonen.github.com/maven/"
-  val sbtIdea = "com.github.mpeltonen" % "sbt-idea-plugin" % "0.4.0"
-
-  val codaRepo = "Coda Hale's Repository" at "http://repo.codahale.com/"
-  val assemblySBT = "com.codahale" % "assembly-sbt" % "0.1.1"
-}
diff --git a/project/plugins/build.sbt b/project/plugins/build.sbt
new file mode 100644
index 0000000000000000000000000000000000000000..c13449ae0315d53209bb2a360f6ed8d0f2efe9c2
--- /dev/null
+++ b/project/plugins/build.sbt
@@ -0,0 +1,16 @@
+resolvers += {
+  val typesafeRepoUrl = new java.net.URL("http://repo.typesafe.com/typesafe/releases")
+  val pattern = Patterns(false, "[organisation]/[module]/[sbtversion]/[revision]/[type]s/[module](-[classifier])-[revision].[ext]")
+  Resolver.url("Typesafe Repository", typesafeRepoUrl)(pattern)
+}
+
+resolvers += "sbt-idea-repo" at "http://mpeltonen.github.com/maven/"
+
+libraryDependencies ++= Seq(
+  "com.github.mpeltonen" %% "sbt-idea" % "0.10.0-SNAPSHOT"
+// FIXME Uncomment once version for SBT 0.10.1 is available "com.eed3si9n" %% "sbt-assembly" % "0.2"
+)
+
+libraryDependencies <<= (libraryDependencies, sbtVersion) { (deps, version) =>
+  deps :+ ("com.typesafe.sbteclipse" %% "sbteclipse" % "1.2" extra("sbtversion" -> version))
+}
\ No newline at end of file
diff --git a/run b/run
index 2ea4cac9a7864dd0a19d29217c2ec77621c2aa3f..78ed51110ffbe1c91c2b19f59ace5d3bce0d4706 100755
--- a/run
+++ b/run
@@ -1,6 +1,6 @@
 #!/bin/bash
 
-SCALA_VERSION=2.9.0-1
+SCALA_VERSION=2.9.0.1
 
 # Figure out where the Scala framework is installed
 FWDIR="$(cd `dirname $0`; pwd)"
@@ -41,23 +41,23 @@ EXAMPLES_DIR=$FWDIR/examples
 BAGEL_DIR=$FWDIR/bagel
 
 # Build up classpath
-CLASSPATH="$SPARK_CLASSPATH:$CORE_DIR/target/scala_$SCALA_VERSION/classes:$MESOS_CLASSPATH"
+CLASSPATH="$SPARK_CLASSPATH:$CORE_DIR/target/scala-$SCALA_VERSION/classes:$MESOS_CLASSPATH"
 CLASSPATH+=:$FWDIR/conf
-CLASSPATH+=:$REPL_DIR/target/scala_$SCALA_VERSION/classes
-CLASSPATH+=:$EXAMPLES_DIR/target/scala_$SCALA_VERSION/classes
+CLASSPATH+=:$REPL_DIR/target/scala-$SCALA_VERSION/classes
+CLASSPATH+=:$EXAMPLES_DIR/target/scala-$SCALA_VERSION/classes
 for jar in `find $CORE_DIR/lib -name '*jar'`; do
   CLASSPATH+=:$jar
 done
-for jar in $CORE_DIR/lib_managed/scala_$SCALA_VERSION/compile/*.jar; do
+for jar in `find $FWDIR/lib_managed/jars -name '*jar'`; do
   CLASSPATH+=:$jar
 done
-for jar in `find $REPL_DIR/lib -name '*jar'`; do
+for jar in `find $FWDIR/lib_managed/bundles -name '*jar'`; do
   CLASSPATH+=:$jar
 done
-for jar in $REPL_DIR/lib_managed/scala_$SCALA_VERSION/compile/*.jar; do
+for jar in `find $REPL_DIR/lib -name '*jar'`; do
   CLASSPATH+=:$jar
 done
-CLASSPATH+=:$BAGEL_DIR/target/scala_$SCALA_VERSION/classes
+CLASSPATH+=:$BAGEL_DIR/target/scala-$SCALA_VERSION/classes
 export CLASSPATH # Needed for spark-shell
 
 if [ -n "$SCALA_HOME" ]; then
@@ -66,4 +66,5 @@ else
   SCALA=scala
 fi
 
+echo $CLASSPATH >> tmp
 exec $SCALA -cp $CLASSPATH "$@"
diff --git a/sbt/sbt-launch-0.10.1.jar b/sbt/sbt-launch-0.10.1.jar
new file mode 100644
index 0000000000000000000000000000000000000000..673495f78af54022f173c03e06daab70e9f1ee56
Binary files /dev/null and b/sbt/sbt-launch-0.10.1.jar differ
diff --git a/sbt/sbt-launch-0.7.5.jar b/sbt/sbt-launch-0.7.5.jar
deleted file mode 100644
index 052c1e1e56547440edb39572b106eeed3b4678c0..0000000000000000000000000000000000000000
Binary files a/sbt/sbt-launch-0.7.5.jar and /dev/null differ