aboutsummaryrefslogtreecommitdiff
path: root/project/MimaExcludes.scala
diff options
context:
space:
mode:
authorSean Owen <sowen@cloudera.com>2016-01-26 11:55:28 +0000
committerSean Owen <sowen@cloudera.com>2016-01-26 11:55:28 +0000
commit649e9d0f5b2d5fc13f2dd5be675331510525927f (patch)
treecc500b373fda20ef42243c199ecfb6f381310abb /project/MimaExcludes.scala
parent5936bf9fa85ccf7f0216145356140161c2801682 (diff)
downloadspark-649e9d0f5b2d5fc13f2dd5be675331510525927f.tar.gz
spark-649e9d0f5b2d5fc13f2dd5be675331510525927f.tar.bz2
spark-649e9d0f5b2d5fc13f2dd5be675331510525927f.zip
[SPARK-3369][CORE][STREAMING] Java mapPartitions Iterator->Iterable is inconsistent with Scala's Iterator->Iterator
Fix Java function API methods for flatMap and mapPartitions to require producing only an Iterator, not Iterable. Also fix DStream.flatMap to require a function producing TraversableOnce only, not Traversable. CC rxin pwendell for API change; tdas since it also touches streaming. Author: Sean Owen <sowen@cloudera.com> Closes #10413 from srowen/SPARK-3369.
Diffstat (limited to 'project/MimaExcludes.scala')
-rw-r--r--project/MimaExcludes.scala31
1 files changed, 31 insertions, 0 deletions
diff --git a/project/MimaExcludes.scala b/project/MimaExcludes.scala
index 501456b043..643bee6969 100644
--- a/project/MimaExcludes.scala
+++ b/project/MimaExcludes.scala
@@ -61,6 +61,37 @@ object MimaExcludes {
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.broadcast.HttpBroadcastFactory")
) ++
Seq(
+ // SPARK-3369 Fix Iterable/Iterator in Java API
+ ProblemFilters.exclude[IncompatibleResultTypeProblem](
+ "org.apache.spark.api.java.function.FlatMapFunction.call"),
+ ProblemFilters.exclude[MissingMethodProblem](
+ "org.apache.spark.api.java.function.FlatMapFunction.call"),
+ ProblemFilters.exclude[IncompatibleResultTypeProblem](
+ "org.apache.spark.api.java.function.DoubleFlatMapFunction.call"),
+ ProblemFilters.exclude[MissingMethodProblem](
+ "org.apache.spark.api.java.function.DoubleFlatMapFunction.call"),
+ ProblemFilters.exclude[IncompatibleResultTypeProblem](
+ "org.apache.spark.api.java.function.FlatMapFunction2.call"),
+ ProblemFilters.exclude[MissingMethodProblem](
+ "org.apache.spark.api.java.function.FlatMapFunction2.call"),
+ ProblemFilters.exclude[IncompatibleResultTypeProblem](
+ "org.apache.spark.api.java.function.PairFlatMapFunction.call"),
+ ProblemFilters.exclude[MissingMethodProblem](
+ "org.apache.spark.api.java.function.PairFlatMapFunction.call"),
+ ProblemFilters.exclude[IncompatibleResultTypeProblem](
+ "org.apache.spark.api.java.function.CoGroupFunction.call"),
+ ProblemFilters.exclude[MissingMethodProblem](
+ "org.apache.spark.api.java.function.CoGroupFunction.call"),
+ ProblemFilters.exclude[IncompatibleResultTypeProblem](
+ "org.apache.spark.api.java.function.MapPartitionsFunction.call"),
+ ProblemFilters.exclude[MissingMethodProblem](
+ "org.apache.spark.api.java.function.MapPartitionsFunction.call"),
+ ProblemFilters.exclude[IncompatibleResultTypeProblem](
+ "org.apache.spark.api.java.function.FlatMapGroupsFunction.call"),
+ ProblemFilters.exclude[MissingMethodProblem](
+ "org.apache.spark.api.java.function.FlatMapGroupsFunction.call")
+ ) ++
+ Seq(
// SPARK-4819 replace Guava Optional
ProblemFilters.exclude[IncompatibleResultTypeProblem]("org.apache.spark.api.java.JavaSparkContext.getCheckpointDir"),
ProblemFilters.exclude[IncompatibleResultTypeProblem]("org.apache.spark.api.java.JavaSparkContext.getSparkHome"),