diff options
author | Pravin Gadakh <prgadakh@in.ibm.com> | 2016-04-28 15:59:18 -0700 |
---|---|---|
committer | DB Tsai <dbt@netflix.com> | 2016-04-28 15:59:18 -0700 |
commit | dae538a4d7c36191c1feb02ba87ffc624ab960dc (patch) | |
tree | 7e40905215019e4bb5f6f927315d71e3c6fdc96f /project | |
parent | 78c8aaf849aadbb065730959e7c1b70bb58d69c9 (diff) | |
download | spark-dae538a4d7c36191c1feb02ba87ffc624ab960dc.tar.gz spark-dae538a4d7c36191c1feb02ba87ffc624ab960dc.tar.bz2 spark-dae538a4d7c36191c1feb02ba87ffc624ab960dc.zip |
[SPARK-14613][ML] Add @Since into the matrix and vector classes in spark-mllib-local
## What changes were proposed in this pull request?
This PR adds `since` tag into the matrix and vector classes in spark-mllib-local.
## How was this patch tested?
Scala-style checks passed.
Author: Pravin Gadakh <prgadakh@in.ibm.com>
Closes #12416 from pravingadakh/SPARK-14613.
Diffstat (limited to 'project')
-rw-r--r-- | project/MimaExcludes.scala | 4 | ||||
-rw-r--r-- | project/SparkBuild.scala | 10 |
2 files changed, 9 insertions, 5 deletions
diff --git a/project/MimaExcludes.scala b/project/MimaExcludes.scala index 6fc49a08fe..26a3760bc3 100644 --- a/project/MimaExcludes.scala +++ b/project/MimaExcludes.scala @@ -700,6 +700,10 @@ object MimaExcludes { ProblemFilters.exclude[IncompatibleResultTypeProblem]("org.apache.spark.executor.ShuffleReadMetrics.localBlocksFetched"), ProblemFilters.exclude[IncompatibleResultTypeProblem]("org.apache.spark.status.api.v1.ShuffleReadMetrics.remoteBlocksFetched"), ProblemFilters.exclude[IncompatibleResultTypeProblem]("org.apache.spark.status.api.v1.ShuffleReadMetrics.localBlocksFetched") + ) ++ Seq( + // [SPARK-14613] Add @Since into the matrix and vector classes in spark-mllib-local + ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.annotation.package$"), + ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.annotation.package") ) case v if v.startsWith("1.6") => Seq( diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala index ffbca25e46..ecd08defd2 100644 --- a/project/SparkBuild.scala +++ b/project/SparkBuild.scala @@ -50,10 +50,10 @@ object BuildCommons { ).map(ProjectRef(buildLocation, _)) val allProjects@Seq( - core, graphx, mllib, mllibLocal, repl, networkCommon, networkShuffle, launcher, unsafe, testTags, sketch, _* + core, graphx, mllib, mllibLocal, repl, networkCommon, networkShuffle, launcher, unsafe, tags, sketch, _* ) = Seq( "core", "graphx", "mllib", "mllib-local", "repl", "network-common", "network-shuffle", "launcher", "unsafe", - "test-tags", "sketch" + "tags", "sketch" ).map(ProjectRef(buildLocation, _)) ++ sqlProjects ++ streamingProjects val optionallyEnabledProjects@Seq(yarn, java8Tests, sparkGangliaLgpl, @@ -336,7 +336,7 @@ object SparkBuild extends PomBuild { val mimaProjects = allProjects.filterNot { x => Seq( spark, hive, hiveThriftServer, hiveCompatibility, catalyst, repl, networkCommon, networkShuffle, networkYarn, - unsafe, testTags, sketch, mllibLocal + unsafe, tags, sketch, mllibLocal ).contains(x) } @@ -680,9 +680,9 @@ object Unidoc { publish := {}, unidocProjectFilter in(ScalaUnidoc, unidoc) := - inAnyProject -- inProjects(OldDeps.project, repl, examples, tools, streamingFlumeSink, yarn, testTags), + inAnyProject -- inProjects(OldDeps.project, repl, examples, tools, streamingFlumeSink, yarn, tags), unidocProjectFilter in(JavaUnidoc, unidoc) := - inAnyProject -- inProjects(OldDeps.project, repl, examples, tools, streamingFlumeSink, yarn, testTags), + inAnyProject -- inProjects(OldDeps.project, repl, examples, tools, streamingFlumeSink, yarn, tags), // Skip actual catalyst, but include the subproject. // Catalyst is not public API and contains quasiquotes which break scaladoc. |