diff --git a/project/MimaBuild.scala b/project/MimaBuild.scala
index e147be7ddaa61fbbe620849472543db106d7cc09..182ca7615de67c3293a1fa334c5dba82b1392420 100644
--- a/project/MimaBuild.scala
+++ b/project/MimaBuild.scala
@@ -31,7 +31,7 @@ object MimaBuild {
     // Read package-private excludes from file
     val excludeFilePath = (base.getAbsolutePath + "/.mima-excludes")
     val excludeFile = file(excludeFilePath)
-    val packagePrivateList: Seq[String] =
+    val ignoredClasses: Seq[String] =
       if (!excludeFile.exists()) {
         Seq()
       } else {
@@ -60,35 +60,9 @@ object MimaBuild {
       excludePackage("org.apache.spark." + packageName)
     }
 
-    val packagePrivateExcludes = packagePrivateList.flatMap(excludeClass)
+    val externalExcludeFileClasses = ignoredClasses.flatMap(excludeClass)
 
-    /* Excludes specific to a given version of Spark. When comparing the given version against
-       its immediate predecessor, the excludes listed here will be applied. */
-    val versionExcludes =
-      SparkBuild.SPARK_VERSION match {
-        case v if v.startsWith("1.0") =>
-          Seq(
-            excludeSparkPackage("api.java"),
-            excludeSparkPackage("mllib"),
-            excludeSparkPackage("streaming")
-          ) ++
-          excludeSparkClass("rdd.ClassTags") ++
-          excludeSparkClass("util.XORShiftRandom") ++
-          excludeSparkClass("graphx.EdgeRDD") ++
-          excludeSparkClass("graphx.VertexRDD") ++
-          excludeSparkClass("graphx.impl.GraphImpl") ++
-          excludeSparkClass("graphx.impl.RoutingTable") ++
-          excludeSparkClass("graphx.util.collection.PrimitiveKeyOpenHashMap") ++
-          excludeSparkClass("graphx.util.collection.GraphXPrimitiveKeyOpenHashMap") ++
-          excludeSparkClass("mllib.recommendation.MFDataGenerator") ++
-          excludeSparkClass("mllib.optimization.SquaredGradient") ++
-          excludeSparkClass("mllib.regression.RidgeRegressionWithSGD") ++
-          excludeSparkClass("mllib.regression.LassoWithSGD") ++
-          excludeSparkClass("mllib.regression.LinearRegressionWithSGD")
-        case _ => Seq()
-      }
-
-    defaultExcludes ++ packagePrivateExcludes ++ versionExcludes
+    defaultExcludes ++ externalExcludeFileClasses
   }
 
   def mimaSettings(sparkHome: File) = mimaDefaultSettings ++ Seq(
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 8ef1e91f609fb462b1d593a9c61726ce3dcd83eb..9833411c90b7bcb1afe2781eaed98ff4e023634a 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -32,7 +32,7 @@ import scala.collection.JavaConversions._
 // import com.jsuereth.pgp.sbtplugin.PgpKeys._
 
 object SparkBuild extends Build {
-  val SPARK_VERSION = "1.0.0-SNAPSHOT"
+  val SPARK_VERSION = "1.1.0-SNAPSHOT"
   val SPARK_VERSION_SHORT = SPARK_VERSION.replaceAll("-SNAPSHOT", "")
 
   // Hadoop version to build against. For example, "1.0.4" for Apache releases, or
@@ -321,7 +321,7 @@ object SparkBuild extends Build {
   val excludeServletApi = ExclusionRule(organization = "javax.servlet", artifact = "servlet-api")
 
   def sparkPreviousArtifact(id: String, organization: String = "org.apache.spark",
-      version: String = "0.9.0-incubating", crossVersion: String = "2.10"): Option[sbt.ModuleID] = {
+      version: String = "1.0.0", crossVersion: String = "2.10"): Option[sbt.ModuleID] = {
     val fullId = if (crossVersion.isEmpty) id else id + "_" + crossVersion
     Some(organization % fullId % version) // the artifact to compare binary compatibility with
   }
@@ -363,7 +363,8 @@ object SparkBuild extends Build {
         "org.spark-project"          % "pyrolite"         % "2.0.1",
         "net.sf.py4j"                % "py4j"             % "0.8.1"
       ),
-    libraryDependencies ++= maybeAvro
+    libraryDependencies ++= maybeAvro,
+    previousArtifact := sparkPreviousArtifact("spark-core")
   )
 
   // Create a colon-separate package list adding "org.apache.spark" in front of all of them,
diff --git a/project/project/SparkPluginBuild.scala b/project/project/SparkPluginBuild.scala
index 0142256e90fb78210162d033e38bbf2f92ed179d..e9fba641eb8a1f617f0b214e3b6080ab1aa4374c 100644
--- a/project/project/SparkPluginBuild.scala
+++ b/project/project/SparkPluginBuild.scala
@@ -26,12 +26,10 @@ import sbt.Keys._
 object SparkPluginDef extends Build {
   lazy val root = Project("plugins", file(".")) dependsOn(sparkStyle)
   lazy val sparkStyle = Project("spark-style", file("spark-style"), settings = styleSettings)
-  val sparkVersion = "1.0.0-SNAPSHOT"
   // There is actually no need to publish this artifact.
   def styleSettings = Defaults.defaultSettings ++ Seq (
     name                 :=  "spark-style",
     organization         :=  "org.apache.spark",
-    version              :=  sparkVersion,
     scalaVersion         :=  "2.10.4",
     scalacOptions        :=  Seq("-unchecked", "-deprecation"),
     libraryDependencies  ++= Dependencies.scalaStyle