aboutsummaryrefslogtreecommitdiff
path: root/project
diff options
context:
space:
mode:
authorJosh Rosen <joshrosen@databricks.com>2016-01-30 00:20:28 -0800
committerReynold Xin <rxin@databricks.com>2016-01-30 00:20:28 -0800
commit289373b28cd2546165187de2e6a9185a1257b1e7 (patch)
treeb541a6e52a4ff20604689efafbfa0df7ad0901f5 /project
parentdab246f7e4664d36073ec49d9df8a11c5e998cdb (diff)
downloadspark-289373b28cd2546165187de2e6a9185a1257b1e7.tar.gz
spark-289373b28cd2546165187de2e6a9185a1257b1e7.tar.bz2
spark-289373b28cd2546165187de2e6a9185a1257b1e7.zip
[SPARK-6363][BUILD] Make Scala 2.11 the default Scala version
This patch changes Spark's build to make Scala 2.11 the default Scala version. To be clear, this does not mean that Spark will stop supporting Scala 2.10: users will still be able to compile Spark for Scala 2.10 by following the instructions on the "Building Spark" page; however, it does mean that Scala 2.11 will be the default Scala version used by our CI builds (including pull request builds). The Scala 2.11 compiler is faster than 2.10, so I think we'll be able to look forward to a slight speedup in our CI builds (it looks like it's about 2X faster for the Maven compile-only builds, for instance). After this patch is merged, I'll update Jenkins to add new compile-only jobs to ensure that Scala 2.10 compilation doesn't break. Author: Josh Rosen <joshrosen@databricks.com> Closes #10608 from JoshRosen/SPARK-6363.
Diffstat (limited to 'project')
-rw-r--r--project/MimaBuild.scala2
-rw-r--r--project/MimaExcludes.scala6
-rw-r--r--project/SparkBuild.scala12
3 files changed, 13 insertions, 7 deletions
diff --git a/project/MimaBuild.scala b/project/MimaBuild.scala
index 41856443af..4adf64a5a0 100644
--- a/project/MimaBuild.scala
+++ b/project/MimaBuild.scala
@@ -95,7 +95,7 @@ object MimaBuild {
// because spark-streaming-mqtt(1.6.0) depends on it.
// Remove the setting on updating previousSparkVersion.
val previousSparkVersion = "1.6.0"
- val fullId = "spark-" + projectRef.project + "_2.10"
+ val fullId = "spark-" + projectRef.project + "_2.11"
mimaDefaultSettings ++
Seq(previousArtifact := Some(organization % fullId % previousSparkVersion),
binaryIssueFilters ++= ignoredABIProblems(sparkHome, version.value),
diff --git a/project/MimaExcludes.scala b/project/MimaExcludes.scala
index a3ae4d2b73..3748e07f88 100644
--- a/project/MimaExcludes.scala
+++ b/project/MimaExcludes.scala
@@ -220,6 +220,12 @@ object MimaExcludes {
// SPARK-11622 Make LibSVMRelation extends HadoopFsRelation and Add LibSVMOutputWriter
ProblemFilters.exclude[MissingTypesProblem]("org.apache.spark.ml.source.libsvm.DefaultSource"),
ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.ml.source.libsvm.DefaultSource.createRelation")
+ ) ++ Seq(
+ // SPARK-6363 Make Scala 2.11 the default Scala version
+ ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.SparkContext.cleanup"),
+ ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.SparkContext.metadataCleaner"),
+ ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.scheduler.cluster.YarnSchedulerBackend$YarnDriverEndpoint"),
+ ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.scheduler.cluster.YarnSchedulerBackend$YarnSchedulerEndpoint")
)
case v if v.startsWith("1.6") =>
Seq(
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 4224a65a82..550b5bad8a 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -119,11 +119,11 @@ object SparkBuild extends PomBuild {
v.split("(\\s+|,)").filterNot(_.isEmpty).map(_.trim.replaceAll("-P", "")).toSeq
}
- if (System.getProperty("scala-2.11") == "") {
- // To activate scala-2.11 profile, replace empty property value to non-empty value
+ if (System.getProperty("scala-2.10") == "") {
+ // To activate scala-2.10 profile, replace empty property value to non-empty value
// in the same way as Maven which handles -Dname as -Dname=true before executes build process.
// see: https://github.com/apache/maven/blob/maven-3.0.4/maven-embedder/src/main/java/org/apache/maven/cli/MavenCli.java#L1082
- System.setProperty("scala-2.11", "true")
+ System.setProperty("scala-2.10", "true")
}
profiles
}
@@ -382,7 +382,7 @@ object OldDeps {
lazy val project = Project("oldDeps", file("dev"), settings = oldDepsSettings)
def versionArtifact(id: String): Option[sbt.ModuleID] = {
- val fullId = id + "_2.10"
+ val fullId = id + "_2.11"
Some("org.apache.spark" % fullId % "1.2.0")
}
@@ -390,7 +390,7 @@ object OldDeps {
name := "old-deps",
scalaVersion := "2.10.5",
libraryDependencies := Seq("spark-streaming-mqtt", "spark-streaming-zeromq",
- "spark-streaming-flume", "spark-streaming-kafka", "spark-streaming-twitter",
+ "spark-streaming-flume", "spark-streaming-twitter",
"spark-streaming", "spark-mllib", "spark-graphx",
"spark-core").map(versionArtifact(_).get intransitive())
)
@@ -704,7 +704,7 @@ object Java8TestSettings {
lazy val settings = Seq(
javacJVMVersion := "1.8",
// Targeting Java 8 bytecode is only supported in Scala 2.11.4 and higher:
- scalacJVMVersion := (if (System.getProperty("scala-2.11") == "true") "1.8" else "1.7")
+ scalacJVMVersion := (if (System.getProperty("scala-2.10") == "true") "1.7" else "1.8")
)
}