aboutsummaryrefslogtreecommitdiff
path: root/project
diff options
context:
space:
mode:
authorPrashant Sharma <prashant.s@imaginea.com>2014-05-30 01:13:51 -0700
committerPatrick Wendell <pwendell@gmail.com>2014-05-30 01:13:51 -0700
commit79fa8fd4b198904ca9ed78a821fd924bd1a82902 (patch)
tree0390533c6de2a6d6d95660fecbae6bacb618a858 /project
parentc8bf4131bc2a2e147e977159fc90e94b85738830 (diff)
downloadspark-79fa8fd4b198904ca9ed78a821fd924bd1a82902.tar.gz
spark-79fa8fd4b198904ca9ed78a821fd924bd1a82902.tar.bz2
spark-79fa8fd4b198904ca9ed78a821fd924bd1a82902.zip
[SPARK-1971] Update MIMA to compare against Spark 1.0.0
Author: Prashant Sharma <prashant.s@imaginea.com> Closes #910 from ScrapCodes/enable-mima/spark-core and squashes the following commits: 79f3687 [Prashant Sharma] updated Mima to check against version 1.0 1e8969c [Prashant Sharma] Spark core missed out on Mima settings. So in effect we never tested spark core for mima related errors.
Diffstat (limited to 'project')
-rw-r--r--project/MimaBuild.scala32
-rw-r--r--project/SparkBuild.scala7
-rw-r--r--project/project/SparkPluginBuild.scala2
3 files changed, 7 insertions, 34 deletions
diff --git a/project/MimaBuild.scala b/project/MimaBuild.scala
index e147be7dda..182ca7615d 100644
--- a/project/MimaBuild.scala
+++ b/project/MimaBuild.scala
@@ -31,7 +31,7 @@ object MimaBuild {
// Read package-private excludes from file
val excludeFilePath = (base.getAbsolutePath + "/.mima-excludes")
val excludeFile = file(excludeFilePath)
- val packagePrivateList: Seq[String] =
+ val ignoredClasses: Seq[String] =
if (!excludeFile.exists()) {
Seq()
} else {
@@ -60,35 +60,9 @@ object MimaBuild {
excludePackage("org.apache.spark." + packageName)
}
- val packagePrivateExcludes = packagePrivateList.flatMap(excludeClass)
+ val externalExcludeFileClasses = ignoredClasses.flatMap(excludeClass)
- /* Excludes specific to a given version of Spark. When comparing the given version against
- its immediate predecessor, the excludes listed here will be applied. */
- val versionExcludes =
- SparkBuild.SPARK_VERSION match {
- case v if v.startsWith("1.0") =>
- Seq(
- excludeSparkPackage("api.java"),
- excludeSparkPackage("mllib"),
- excludeSparkPackage("streaming")
- ) ++
- excludeSparkClass("rdd.ClassTags") ++
- excludeSparkClass("util.XORShiftRandom") ++
- excludeSparkClass("graphx.EdgeRDD") ++
- excludeSparkClass("graphx.VertexRDD") ++
- excludeSparkClass("graphx.impl.GraphImpl") ++
- excludeSparkClass("graphx.impl.RoutingTable") ++
- excludeSparkClass("graphx.util.collection.PrimitiveKeyOpenHashMap") ++
- excludeSparkClass("graphx.util.collection.GraphXPrimitiveKeyOpenHashMap") ++
- excludeSparkClass("mllib.recommendation.MFDataGenerator") ++
- excludeSparkClass("mllib.optimization.SquaredGradient") ++
- excludeSparkClass("mllib.regression.RidgeRegressionWithSGD") ++
- excludeSparkClass("mllib.regression.LassoWithSGD") ++
- excludeSparkClass("mllib.regression.LinearRegressionWithSGD")
- case _ => Seq()
- }
-
- defaultExcludes ++ packagePrivateExcludes ++ versionExcludes
+ defaultExcludes ++ externalExcludeFileClasses
}
def mimaSettings(sparkHome: File) = mimaDefaultSettings ++ Seq(
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 8ef1e91f60..9833411c90 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -32,7 +32,7 @@ import scala.collection.JavaConversions._
// import com.jsuereth.pgp.sbtplugin.PgpKeys._
object SparkBuild extends Build {
- val SPARK_VERSION = "1.0.0-SNAPSHOT"
+ val SPARK_VERSION = "1.1.0-SNAPSHOT"
val SPARK_VERSION_SHORT = SPARK_VERSION.replaceAll("-SNAPSHOT", "")
// Hadoop version to build against. For example, "1.0.4" for Apache releases, or
@@ -321,7 +321,7 @@ object SparkBuild extends Build {
val excludeServletApi = ExclusionRule(organization = "javax.servlet", artifact = "servlet-api")
def sparkPreviousArtifact(id: String, organization: String = "org.apache.spark",
- version: String = "0.9.0-incubating", crossVersion: String = "2.10"): Option[sbt.ModuleID] = {
+ version: String = "1.0.0", crossVersion: String = "2.10"): Option[sbt.ModuleID] = {
val fullId = if (crossVersion.isEmpty) id else id + "_" + crossVersion
Some(organization % fullId % version) // the artifact to compare binary compatibility with
}
@@ -363,7 +363,8 @@ object SparkBuild extends Build {
"org.spark-project" % "pyrolite" % "2.0.1",
"net.sf.py4j" % "py4j" % "0.8.1"
),
- libraryDependencies ++= maybeAvro
+ libraryDependencies ++= maybeAvro,
+ previousArtifact := sparkPreviousArtifact("spark-core")
)
// Create a colon-separate package list adding "org.apache.spark" in front of all of them,
diff --git a/project/project/SparkPluginBuild.scala b/project/project/SparkPluginBuild.scala
index 0142256e90..e9fba641eb 100644
--- a/project/project/SparkPluginBuild.scala
+++ b/project/project/SparkPluginBuild.scala
@@ -26,12 +26,10 @@ import sbt.Keys._
object SparkPluginDef extends Build {
lazy val root = Project("plugins", file(".")) dependsOn(sparkStyle)
lazy val sparkStyle = Project("spark-style", file("spark-style"), settings = styleSettings)
- val sparkVersion = "1.0.0-SNAPSHOT"
// There is actually no need to publish this artifact.
def styleSettings = Defaults.defaultSettings ++ Seq (
name := "spark-style",
organization := "org.apache.spark",
- version := sparkVersion,
scalaVersion := "2.10.4",
scalacOptions := Seq("-unchecked", "-deprecation"),
libraryDependencies ++= Dependencies.scalaStyle