diff --git a/.gitignore b/.gitignore
index b54a3058de6590ef7f857373313e910cc85816bf..4f177c82ae5e0f540490aba8e7a3df0bd85e31cd 100644
--- a/.gitignore
+++ b/.gitignore
@@ -7,7 +7,7 @@
 sbt/*.jar
 .settings
 .cache
-.generated-mima-excludes
+.generated-mima*
 /build/
 work/
 out/
diff --git a/dev/mima b/dev/mima
index ab6bd4469b0e8743309771854bddea2a63423be0..b68800d6d01730d00455c474a0af0eaf4bd3d7f0 100755
--- a/dev/mima
+++ b/dev/mima
@@ -23,6 +23,9 @@ set -o pipefail
 FWDIR="$(cd `dirname $0`/..; pwd)"
 cd $FWDIR
 
+echo -e "q\n" | sbt/sbt oldDeps/update
+
+export SPARK_CLASSPATH=`find lib_managed \( -name '*spark*jar' -a -type f \) -printf "%p:" `
 ./bin/spark-class org.apache.spark.tools.GenerateMIMAIgnore
 echo -e "q\n" | sbt/sbt mima-report-binary-issues | grep -v -e "info.*Resolving"
 ret_val=$?
@@ -31,5 +34,5 @@ if [ $ret_val != 0 ]; then
   echo "NOTE: Exceptions to binary compatibility can be added in project/MimaExcludes.scala"
 fi
 
-rm -f .generated-mima-excludes
+rm -f .generated-mima*
 exit $ret_val
diff --git a/project/MimaBuild.scala b/project/MimaBuild.scala
index 1477809943573129de4fb30447c83463da082a84..bb2d73741c3bfe8d6076fa325e97be593d0292d8 100644
--- a/project/MimaBuild.scala
+++ b/project/MimaBuild.scala
@@ -15,16 +15,26 @@
  * limitations under the License.
  */
 
-import com.typesafe.tools.mima.core.{MissingTypesProblem, MissingClassProblem, ProblemFilters}
+import com.typesafe.tools.mima.core._
+import com.typesafe.tools.mima.core.MissingClassProblem
+import com.typesafe.tools.mima.core.MissingTypesProblem
 import com.typesafe.tools.mima.core.ProblemFilters._
 import com.typesafe.tools.mima.plugin.MimaKeys.{binaryIssueFilters, previousArtifact}
 import com.typesafe.tools.mima.plugin.MimaPlugin.mimaDefaultSettings
 import sbt._
 
 object MimaBuild {
+
+  def excludeMember(fullName: String) = Seq(
+      ProblemFilters.exclude[MissingMethodProblem](fullName),
+      ProblemFilters.exclude[MissingFieldProblem](fullName),
+      ProblemFilters.exclude[IncompatibleResultTypeProblem](fullName),
+      ProblemFilters.exclude[IncompatibleMethTypeProblem](fullName),
+      ProblemFilters.exclude[IncompatibleFieldTypeProblem](fullName)
+    )
+
   // Exclude a single class and its corresponding object
-  def excludeClass(className: String) = {
-    Seq(
+  def excludeClass(className: String) = Seq(
       excludePackage(className),
       ProblemFilters.exclude[MissingClassProblem](className),
       ProblemFilters.exclude[MissingTypesProblem](className),
@@ -32,7 +42,7 @@ object MimaBuild {
       ProblemFilters.exclude[MissingClassProblem](className + "$"),
       ProblemFilters.exclude[MissingTypesProblem](className + "$")
     )
-  }
+
   // Exclude a Spark class, that is in the package org.apache.spark
   def excludeSparkClass(className: String) = {
     excludeClass("org.apache.spark." + className)
@@ -49,20 +59,25 @@ object MimaBuild {
     val defaultExcludes = Seq()
 
     // Read package-private excludes from file
-    val excludeFilePath = (base.getAbsolutePath + "/.generated-mima-excludes")
-    val excludeFile = file(excludeFilePath)
+    val classExcludeFilePath = file(base.getAbsolutePath + "/.generated-mima-class-excludes")
+    val memberExcludeFilePath = file(base.getAbsolutePath + "/.generated-mima-member-excludes")
+
     val ignoredClasses: Seq[String] =
-      if (!excludeFile.exists()) {
+      if (!classExcludeFilePath.exists()) {
         Seq()
       } else {
-        IO.read(excludeFile).split("\n")
+        IO.read(classExcludeFilePath).split("\n")
       }
 
+    val ignoredMembers: Seq[String] =
+      if (!memberExcludeFilePath.exists()) {
+      Seq()
+    } else {
+      IO.read(memberExcludeFilePath).split("\n")
+    }
 
-
-    val externalExcludeFileClasses = ignoredClasses.flatMap(excludeClass)
-
-    defaultExcludes ++ externalExcludeFileClasses ++ MimaExcludes.excludes
+    defaultExcludes ++ ignoredClasses.flatMap(excludeClass) ++
+    ignoredMembers.flatMap(excludeMember) ++ MimaExcludes.excludes
   }
 
   def mimaSettings(sparkHome: File) = mimaDefaultSettings ++ Seq(
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 069913dbaac568015bb10f1352cf097746134b37..ecd9d7068068ddd13d813c94910ba943ba0a700b 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -59,6 +59,10 @@ object SparkBuild extends Build {
 
   lazy val core = Project("core", file("core"), settings = coreSettings)
 
+  /** Following project only exists to pull previous artifacts of Spark for generating
+    Mima ignores. For more information see: SPARK 2071 */
+  lazy val oldDeps = Project("oldDeps", file("dev"), settings = oldDepsSettings)
+
   def replDependencies = Seq[ProjectReference](core, graphx, bagel, mllib, sql) ++ maybeHiveRef
 
   lazy val repl = Project("repl", file("repl"), settings = replSettings)
@@ -598,6 +602,17 @@ object SparkBuild extends Build {
     }
   )
 
+  def oldDepsSettings() = Defaults.defaultSettings ++ Seq(
+    name := "old-deps",
+    scalaVersion := "2.10.4",
+    retrieveManaged := true,
+    retrievePattern := "[type]s/[artifact](-[revision])(-[classifier]).[ext]",
+    libraryDependencies := Seq("spark-streaming-mqtt", "spark-streaming-zeromq", 
+      "spark-streaming-flume", "spark-streaming-kafka", "spark-streaming-twitter",
+      "spark-streaming", "spark-mllib", "spark-bagel", "spark-graphx", 
+      "spark-core").map(sparkPreviousArtifact(_).get intransitive())
+  )
+
   def twitterSettings() = sharedSettings ++ Seq(
     name := "spark-streaming-twitter",
     previousArtifact := sparkPreviousArtifact("spark-streaming-twitter"),
diff --git a/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala b/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala
index 6a261e19a35cd9b1e3e975f98292d8b1cdb006fa..03a73f92b275e6ee93c074ff6574ee8ce8871b22 100644
--- a/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala
+++ b/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala
@@ -40,74 +40,78 @@ object GenerateMIMAIgnore {
   private val classLoader = Thread.currentThread().getContextClassLoader
   private val mirror = runtimeMirror(classLoader)
 
-  private def classesPrivateWithin(packageName: String): Set[String] = {
+
+  private def isDeveloperApi(sym: unv.Symbol) =
+    sym.annotations.exists(_.tpe =:= unv.typeOf[org.apache.spark.annotation.DeveloperApi])
+
+  private def isExperimental(sym: unv.Symbol) =
+    sym.annotations.exists(_.tpe =:= unv.typeOf[org.apache.spark.annotation.Experimental])
+
+
+  private def isPackagePrivate(sym: unv.Symbol) =
+    !sym.privateWithin.fullName.startsWith("<none>")
+
+  private def isPackagePrivateModule(moduleSymbol: unv.ModuleSymbol) =
+    !moduleSymbol.privateWithin.fullName.startsWith("<none>")
+
+  /**
+   * For every class checks via scala reflection if the class itself or contained members
+   * have DeveloperApi or Experimental annotations or they are package private.
+   * Returns the tuple of such classes and members.
+   */
+  private def privateWithin(packageName: String): (Set[String], Set[String]) = {
 
     val classes = getClasses(packageName)
     val ignoredClasses = mutable.HashSet[String]()
+    val ignoredMembers = mutable.HashSet[String]()
 
-    def isPackagePrivate(className: String) = {
+    for (className <- classes) {
       try {
-        /* Couldn't figure out if it's possible to determine a-priori whether a given symbol
-           is a module or class. */
-
-        val privateAsClass = mirror
-          .classSymbol(Class.forName(className, false, classLoader))
-          .privateWithin
-          .fullName
-          .startsWith(packageName)
-
-        val privateAsModule = mirror
-          .staticModule(className)
-          .privateWithin
-          .fullName
-          .startsWith(packageName)
-
-        privateAsClass || privateAsModule
-      } catch {
-        case _: Throwable => {
-          println("Error determining visibility: " + className)
-          false
+        val classSymbol = mirror.classSymbol(Class.forName(className, false, classLoader))
+        val moduleSymbol = mirror.staticModule(className) // TODO: see if it is necessary.
+        val directlyPrivateSpark =
+          isPackagePrivate(classSymbol) || isPackagePrivateModule(moduleSymbol)
+        val developerApi = isDeveloperApi(classSymbol)
+        val experimental = isExperimental(classSymbol)
+
+        /* Inner classes defined within a private[spark] class or object are effectively
+         invisible, so we account for them as package private. */
+        lazy val indirectlyPrivateSpark = {
+          val maybeOuter = className.toString.takeWhile(_ != '$')
+          if (maybeOuter != className) {
+            isPackagePrivate(mirror.classSymbol(Class.forName(maybeOuter, false, classLoader))) ||
+              isPackagePrivateModule(mirror.staticModule(maybeOuter))
+          } else {
+            false
+          }
+        }
+        if (directlyPrivateSpark || indirectlyPrivateSpark || developerApi || experimental) {
+          ignoredClasses += className
+        } else {
+          // check if this class has package-private/annotated members.
+          ignoredMembers ++= getAnnotatedOrPackagePrivateMembers(classSymbol)
         }
-      }
-    }
 
-    def isDeveloperApi(className: String) = {
-      try {
-        val clazz = mirror.classSymbol(Class.forName(className, false, classLoader))
-        clazz.annotations.exists(_.tpe =:= unv.typeOf[org.apache.spark.annotation.DeveloperApi])
       } catch {
-        case _: Throwable => {
-          println("Error determining Annotations: " + className)
-          false
-        }
+        case _: Throwable => println("Error instrumenting class:" + className)
       }
     }
+    (ignoredClasses.flatMap(c => Seq(c, c.replace("$", "#"))).toSet, ignoredMembers.toSet)
+  }
 
-    for (className <- classes) {
-      val directlyPrivateSpark = isPackagePrivate(className)
-      val developerApi = isDeveloperApi(className)
-
-      /* Inner classes defined within a private[spark] class or object are effectively
-         invisible, so we account for them as package private. */
-      val indirectlyPrivateSpark = {
-        val maybeOuter = className.toString.takeWhile(_ != '$')
-        if (maybeOuter != className) {
-          isPackagePrivate(maybeOuter)
-        } else {
-          false
-        }
-      }
-      if (directlyPrivateSpark || indirectlyPrivateSpark || developerApi) {
-        ignoredClasses += className
-      }
-    }
-    ignoredClasses.flatMap(c => Seq(c, c.replace("$", "#"))).toSet
+  private def getAnnotatedOrPackagePrivateMembers(classSymbol: unv.ClassSymbol) = {
+    classSymbol.typeSignature.members
+      .filter(x => isPackagePrivate(x) || isDeveloperApi(x) || isExperimental(x)).map(_.fullName)
   }
 
   def main(args: Array[String]) {
-    scala.tools.nsc.io.File(".generated-mima-excludes").
-      writeAll(classesPrivateWithin("org.apache.spark").mkString("\n"))
-    println("Created : .generated-mima-excludes in current directory.")
+    val (privateClasses, privateMembers) = privateWithin("org.apache.spark")
+    scala.tools.nsc.io.File(".generated-mima-class-excludes").
+      writeAll(privateClasses.mkString("\n"))
+    println("Created : .generated-mima-class-excludes in current directory.")
+    scala.tools.nsc.io.File(".generated-mima-member-excludes").
+      writeAll(privateMembers.mkString("\n"))
+    println("Created : .generated-mima-member-excludes in current directory.")
   }
 
 
@@ -140,10 +144,17 @@ object GenerateMIMAIgnore {
    * Get all classes in a package from a jar file.
    */
   private def getClassesFromJar(jarPath: String, packageName: String) = {
+    import scala.collection.mutable
     val jar = new JarFile(new File(jarPath))
     val enums = jar.entries().map(_.getName).filter(_.startsWith(packageName))
-    val classes = for (entry <- enums if entry.endsWith(".class"))
-      yield Class.forName(entry.replace('/', '.').stripSuffix(".class"), false, classLoader)
+    val classes = mutable.HashSet[Class[_]]()
+    for (entry <- enums if entry.endsWith(".class")) {
+      try {
+        classes += Class.forName(entry.replace('/', '.').stripSuffix(".class"), false, classLoader)
+      } catch {
+        case _: Throwable => println("Unable to load:" + entry)
+      }
+    }
     classes
   }
 }