Skip to content
Snippets Groups Projects
Commit 79fa8fd4 authored by Prashant Sharma's avatar Prashant Sharma Committed by Patrick Wendell
Browse files

[SPARK-1971] Update MIMA to compare against Spark 1.0.0

Author: Prashant Sharma <prashant.s@imaginea.com>

Closes #910 from ScrapCodes/enable-mima/spark-core and squashes the following commits:

79f3687 [Prashant Sharma] updated Mima to check against version 1.0
1e8969c [Prashant Sharma] Spark core missed out on Mima settings. So in effect we never tested spark core for mima related errors.
parent c8bf4131
No related branches found
No related tags found
No related merge requests found
......@@ -31,7 +31,7 @@ object MimaBuild {
// Read package-private excludes from file
val excludeFilePath = (base.getAbsolutePath + "/.mima-excludes")
val excludeFile = file(excludeFilePath)
val packagePrivateList: Seq[String] =
val ignoredClasses: Seq[String] =
if (!excludeFile.exists()) {
Seq()
} else {
......@@ -60,35 +60,9 @@ object MimaBuild {
excludePackage("org.apache.spark." + packageName)
}
val packagePrivateExcludes = packagePrivateList.flatMap(excludeClass)
val externalExcludeFileClasses = ignoredClasses.flatMap(excludeClass)
/* Excludes specific to a given version of Spark. When comparing the given version against
its immediate predecessor, the excludes listed here will be applied. */
val versionExcludes =
SparkBuild.SPARK_VERSION match {
case v if v.startsWith("1.0") =>
Seq(
excludeSparkPackage("api.java"),
excludeSparkPackage("mllib"),
excludeSparkPackage("streaming")
) ++
excludeSparkClass("rdd.ClassTags") ++
excludeSparkClass("util.XORShiftRandom") ++
excludeSparkClass("graphx.EdgeRDD") ++
excludeSparkClass("graphx.VertexRDD") ++
excludeSparkClass("graphx.impl.GraphImpl") ++
excludeSparkClass("graphx.impl.RoutingTable") ++
excludeSparkClass("graphx.util.collection.PrimitiveKeyOpenHashMap") ++
excludeSparkClass("graphx.util.collection.GraphXPrimitiveKeyOpenHashMap") ++
excludeSparkClass("mllib.recommendation.MFDataGenerator") ++
excludeSparkClass("mllib.optimization.SquaredGradient") ++
excludeSparkClass("mllib.regression.RidgeRegressionWithSGD") ++
excludeSparkClass("mllib.regression.LassoWithSGD") ++
excludeSparkClass("mllib.regression.LinearRegressionWithSGD")
case _ => Seq()
}
defaultExcludes ++ packagePrivateExcludes ++ versionExcludes
defaultExcludes ++ externalExcludeFileClasses
}
def mimaSettings(sparkHome: File) = mimaDefaultSettings ++ Seq(
......
......@@ -32,7 +32,7 @@ import scala.collection.JavaConversions._
// import com.jsuereth.pgp.sbtplugin.PgpKeys._
object SparkBuild extends Build {
val SPARK_VERSION = "1.0.0-SNAPSHOT"
val SPARK_VERSION = "1.1.0-SNAPSHOT"
val SPARK_VERSION_SHORT = SPARK_VERSION.replaceAll("-SNAPSHOT", "")
// Hadoop version to build against. For example, "1.0.4" for Apache releases, or
......@@ -321,7 +321,7 @@ object SparkBuild extends Build {
val excludeServletApi = ExclusionRule(organization = "javax.servlet", artifact = "servlet-api")
def sparkPreviousArtifact(id: String, organization: String = "org.apache.spark",
version: String = "0.9.0-incubating", crossVersion: String = "2.10"): Option[sbt.ModuleID] = {
version: String = "1.0.0", crossVersion: String = "2.10"): Option[sbt.ModuleID] = {
val fullId = if (crossVersion.isEmpty) id else id + "_" + crossVersion
Some(organization % fullId % version) // the artifact to compare binary compatibility with
}
......@@ -363,7 +363,8 @@ object SparkBuild extends Build {
"org.spark-project" % "pyrolite" % "2.0.1",
"net.sf.py4j" % "py4j" % "0.8.1"
),
libraryDependencies ++= maybeAvro
libraryDependencies ++= maybeAvro,
previousArtifact := sparkPreviousArtifact("spark-core")
)
// Create a colon-separate package list adding "org.apache.spark" in front of all of them,
......
......@@ -26,12 +26,10 @@ import sbt.Keys._
object SparkPluginDef extends Build {
lazy val root = Project("plugins", file(".")) dependsOn(sparkStyle)
lazy val sparkStyle = Project("spark-style", file("spark-style"), settings = styleSettings)
val sparkVersion = "1.0.0-SNAPSHOT"
// There is actually no need to publish this artifact.
def styleSettings = Defaults.defaultSettings ++ Seq (
name := "spark-style",
organization := "org.apache.spark",
version := sparkVersion,
scalaVersion := "2.10.4",
scalacOptions := Seq("-unchecked", "-deprecation"),
libraryDependencies ++= Dependencies.scalaStyle
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment