aboutsummaryrefslogtreecommitdiff
path: root/project/SparkBuild.scala
diff options
context:
space:
mode:
authorPrashant Sharma <prashant.s@imaginea.com>2014-05-30 01:13:51 -0700
committerPatrick Wendell <pwendell@gmail.com>2014-05-30 01:13:51 -0700
commit79fa8fd4b198904ca9ed78a821fd924bd1a82902 (patch)
tree0390533c6de2a6d6d95660fecbae6bacb618a858 /project/SparkBuild.scala
parentc8bf4131bc2a2e147e977159fc90e94b85738830 (diff)
downloadspark-79fa8fd4b198904ca9ed78a821fd924bd1a82902.tar.gz
spark-79fa8fd4b198904ca9ed78a821fd924bd1a82902.tar.bz2
spark-79fa8fd4b198904ca9ed78a821fd924bd1a82902.zip
[SPARK-1971] Update MIMA to compare against Spark 1.0.0
Author: Prashant Sharma <prashant.s@imaginea.com> Closes #910 from ScrapCodes/enable-mima/spark-core and squashes the following commits: 79f3687 [Prashant Sharma] updated Mima to check against version 1.0 1e8969c [Prashant Sharma] Spark core missed out on Mima settings. So in effect we never tested spark core for mima related errors.
Diffstat (limited to 'project/SparkBuild.scala')
-rw-r--r--project/SparkBuild.scala7
1 files changed, 4 insertions, 3 deletions
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 8ef1e91f60..9833411c90 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -32,7 +32,7 @@ import scala.collection.JavaConversions._
// import com.jsuereth.pgp.sbtplugin.PgpKeys._
object SparkBuild extends Build {
- val SPARK_VERSION = "1.0.0-SNAPSHOT"
+ val SPARK_VERSION = "1.1.0-SNAPSHOT"
val SPARK_VERSION_SHORT = SPARK_VERSION.replaceAll("-SNAPSHOT", "")
// Hadoop version to build against. For example, "1.0.4" for Apache releases, or
@@ -321,7 +321,7 @@ object SparkBuild extends Build {
val excludeServletApi = ExclusionRule(organization = "javax.servlet", artifact = "servlet-api")
def sparkPreviousArtifact(id: String, organization: String = "org.apache.spark",
- version: String = "0.9.0-incubating", crossVersion: String = "2.10"): Option[sbt.ModuleID] = {
+ version: String = "1.0.0", crossVersion: String = "2.10"): Option[sbt.ModuleID] = {
val fullId = if (crossVersion.isEmpty) id else id + "_" + crossVersion
Some(organization % fullId % version) // the artifact to compare binary compatibility with
}
@@ -363,7 +363,8 @@ object SparkBuild extends Build {
"org.spark-project" % "pyrolite" % "2.0.1",
"net.sf.py4j" % "py4j" % "0.8.1"
),
- libraryDependencies ++= maybeAvro
+ libraryDependencies ++= maybeAvro,
+ previousArtifact := sparkPreviousArtifact("spark-core")
)
// Create a colon-separate package list adding "org.apache.spark" in front of all of them,