aboutsummaryrefslogtreecommitdiff
path: root/project
diff options
context:
space:
mode:
Diffstat (limited to 'project')
-rw-r--r--project/MimaBuild.scala2
-rw-r--r--project/MimaExcludes.scala6
-rw-r--r--project/SparkBuild.scala12
3 files changed, 13 insertions, 7 deletions
diff --git a/project/MimaBuild.scala b/project/MimaBuild.scala
index 41856443af..4adf64a5a0 100644
--- a/project/MimaBuild.scala
+++ b/project/MimaBuild.scala
@@ -95,7 +95,7 @@ object MimaBuild {
// because spark-streaming-mqtt(1.6.0) depends on it.
// Remove the setting on updating previousSparkVersion.
val previousSparkVersion = "1.6.0"
- val fullId = "spark-" + projectRef.project + "_2.10"
+ val fullId = "spark-" + projectRef.project + "_2.11"
mimaDefaultSettings ++
Seq(previousArtifact := Some(organization % fullId % previousSparkVersion),
binaryIssueFilters ++= ignoredABIProblems(sparkHome, version.value),
diff --git a/project/MimaExcludes.scala b/project/MimaExcludes.scala
index a3ae4d2b73..3748e07f88 100644
--- a/project/MimaExcludes.scala
+++ b/project/MimaExcludes.scala
@@ -220,6 +220,12 @@ object MimaExcludes {
// SPARK-11622 Make LibSVMRelation extends HadoopFsRelation and Add LibSVMOutputWriter
ProblemFilters.exclude[MissingTypesProblem]("org.apache.spark.ml.source.libsvm.DefaultSource"),
ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.ml.source.libsvm.DefaultSource.createRelation")
+ ) ++ Seq(
+ // SPARK-6363 Make Scala 2.11 the default Scala version
+ ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.SparkContext.cleanup"),
+ ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.SparkContext.metadataCleaner"),
+ ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.scheduler.cluster.YarnSchedulerBackend$YarnDriverEndpoint"),
+ ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.scheduler.cluster.YarnSchedulerBackend$YarnSchedulerEndpoint")
)
case v if v.startsWith("1.6") =>
Seq(
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 4224a65a82..550b5bad8a 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -119,11 +119,11 @@ object SparkBuild extends PomBuild {
v.split("(\\s+|,)").filterNot(_.isEmpty).map(_.trim.replaceAll("-P", "")).toSeq
}
- if (System.getProperty("scala-2.11") == "") {
- // To activate scala-2.11 profile, replace empty property value to non-empty value
+ if (System.getProperty("scala-2.10") == "") {
+ // To activate scala-2.10 profile, replace empty property value to non-empty value
// in the same way as Maven which handles -Dname as -Dname=true before executes build process.
// see: https://github.com/apache/maven/blob/maven-3.0.4/maven-embedder/src/main/java/org/apache/maven/cli/MavenCli.java#L1082
- System.setProperty("scala-2.11", "true")
+ System.setProperty("scala-2.10", "true")
}
profiles
}
@@ -382,7 +382,7 @@ object OldDeps {
lazy val project = Project("oldDeps", file("dev"), settings = oldDepsSettings)
def versionArtifact(id: String): Option[sbt.ModuleID] = {
- val fullId = id + "_2.10"
+ val fullId = id + "_2.11"
Some("org.apache.spark" % fullId % "1.2.0")
}
@@ -390,7 +390,7 @@ object OldDeps {
name := "old-deps",
scalaVersion := "2.10.5",
libraryDependencies := Seq("spark-streaming-mqtt", "spark-streaming-zeromq",
- "spark-streaming-flume", "spark-streaming-kafka", "spark-streaming-twitter",
+ "spark-streaming-flume", "spark-streaming-twitter",
"spark-streaming", "spark-mllib", "spark-graphx",
"spark-core").map(versionArtifact(_).get intransitive())
)
@@ -704,7 +704,7 @@ object Java8TestSettings {
lazy val settings = Seq(
javacJVMVersion := "1.8",
// Targeting Java 8 bytecode is only supported in Scala 2.11.4 and higher:
- scalacJVMVersion := (if (System.getProperty("scala-2.11") == "true") "1.8" else "1.7")
+ scalacJVMVersion := (if (System.getProperty("scala-2.10") == "true") "1.7" else "1.8")
)
}