aboutsummaryrefslogtreecommitdiff
path: root/project
diff options
context:
space:
mode:
Diffstat (limited to 'project')
-rw-r--r--project/MimaExcludes.scala8
-rw-r--r--project/SparkBuild.scala10
2 files changed, 13 insertions, 5 deletions
diff --git a/project/MimaExcludes.scala b/project/MimaExcludes.scala
index 69161e0d61..1a02f660fd 100644
--- a/project/MimaExcludes.scala
+++ b/project/MimaExcludes.scala
@@ -716,6 +716,14 @@ object MimaExcludes {
ProblemFilters.exclude[IncompatibleResultTypeProblem]("org.apache.spark.executor.ShuffleReadMetrics.localBlocksFetched"),
ProblemFilters.exclude[IncompatibleResultTypeProblem]("org.apache.spark.status.api.v1.ShuffleReadMetrics.remoteBlocksFetched"),
ProblemFilters.exclude[IncompatibleResultTypeProblem]("org.apache.spark.status.api.v1.ShuffleReadMetrics.localBlocksFetched")
+ ) ++ Seq(
+ // [SPARK-15290] Move annotations, like @Since / @DeveloperApi, into spark-tags
+ ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.annotation.package$"),
+ ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.annotation.package"),
+ ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.annotation.Private"),
+ ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.annotation.AlphaComponent"),
+ ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.annotation.Experimental"),
+ ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.annotation.DeveloperApi")
)
case v if v.startsWith("1.6") =>
Seq(
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index d83afa03f5..3ad9873f43 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -50,10 +50,10 @@ object BuildCommons {
).map(ProjectRef(buildLocation, _))
val allProjects@Seq(
- core, graphx, mllib, mllibLocal, repl, networkCommon, networkShuffle, launcher, unsafe, testTags, sketch, _*
+ core, graphx, mllib, mllibLocal, repl, networkCommon, networkShuffle, launcher, unsafe, tags, sketch, _*
) = Seq(
"core", "graphx", "mllib", "mllib-local", "repl", "network-common", "network-shuffle", "launcher", "unsafe",
- "test-tags", "sketch"
+ "tags", "sketch"
).map(ProjectRef(buildLocation, _)) ++ sqlProjects ++ streamingProjects
val optionallyEnabledProjects@Seq(yarn, java8Tests, sparkGangliaLgpl,
@@ -340,7 +340,7 @@ object SparkBuild extends PomBuild {
val mimaProjects = allProjects.filterNot { x =>
Seq(
spark, hive, hiveThriftServer, hiveCompatibility, catalyst, repl, networkCommon, networkShuffle, networkYarn,
- unsafe, testTags, sketch, mllibLocal
+ unsafe, tags, sketch, mllibLocal
).contains(x)
}
@@ -685,9 +685,9 @@ object Unidoc {
publish := {},
unidocProjectFilter in(ScalaUnidoc, unidoc) :=
- inAnyProject -- inProjects(OldDeps.project, repl, examples, tools, streamingFlumeSink, yarn, testTags),
+ inAnyProject -- inProjects(OldDeps.project, repl, examples, tools, streamingFlumeSink, yarn, tags),
unidocProjectFilter in(JavaUnidoc, unidoc) :=
- inAnyProject -- inProjects(OldDeps.project, repl, examples, tools, streamingFlumeSink, yarn, testTags),
+ inAnyProject -- inProjects(OldDeps.project, repl, examples, tools, streamingFlumeSink, yarn, tags),
// Skip actual catalyst, but include the subproject.
// Catalyst is not public API and contains quasiquotes which break scaladoc.