aboutsummaryrefslogtreecommitdiff
path: root/project/MimaExcludes.scala
diff options
context:
space:
mode:
authorJosh Rosen <joshrosen@databricks.com>2016-10-21 11:25:01 -0700
committerJosh Rosen <joshrosen@databricks.com>2016-10-21 11:25:01 -0700
commitb3b4b9542223de3495a7a7e0dd27634ddb9f929d (patch)
tree16058cc243d5b4074818e70e999bb4fd1d773b6f /project/MimaExcludes.scala
parent3a237512b162d192b5503c08d121134a2dac6ff1 (diff)
downloadspark-b3b4b9542223de3495a7a7e0dd27634ddb9f929d.tar.gz
spark-b3b4b9542223de3495a7a7e0dd27634ddb9f929d.tar.bz2
spark-b3b4b9542223de3495a7a7e0dd27634ddb9f929d.zip
[SPARK-18034] Upgrade to MiMa 0.1.11 to fix flakiness
We should upgrade to the latest release of MiMa (0.1.11) in order to include a fix for a bug which led to flakiness in the MiMa checks (https://github.com/typesafehub/migration-manager/issues/115). Author: Josh Rosen <joshrosen@databricks.com> Closes #15571 from JoshRosen/SPARK-18034.
Diffstat (limited to 'project/MimaExcludes.scala')
-rw-r--r--project/MimaExcludes.scala7
1 files changed, 6 insertions, 1 deletions
diff --git a/project/MimaExcludes.scala b/project/MimaExcludes.scala
index facf034ea7..350b144f82 100644
--- a/project/MimaExcludes.scala
+++ b/project/MimaExcludes.scala
@@ -81,7 +81,12 @@ object MimaExcludes {
// [SPARK-17338][SQL] add global temp view
ProblemFilters.exclude[ReversedMissingMethodProblem]("org.apache.spark.sql.catalog.Catalog.dropGlobalTempView"),
ProblemFilters.exclude[IncompatibleResultTypeProblem]("org.apache.spark.sql.catalog.Catalog.dropTempView"),
- ProblemFilters.exclude[ReversedMissingMethodProblem]("org.apache.spark.sql.catalog.Catalog.dropTempView")
+ ProblemFilters.exclude[ReversedMissingMethodProblem]("org.apache.spark.sql.catalog.Catalog.dropTempView"),
+
+ // [SPARK-18034] Upgrade to MiMa 0.1.11 to fix flakiness.
+ ProblemFilters.exclude[InheritedNewAbstractMethodProblem]("org.apache.spark.ml.param.shared.HasAggregationDepth.aggregationDepth"),
+ ProblemFilters.exclude[InheritedNewAbstractMethodProblem]("org.apache.spark.ml.param.shared.HasAggregationDepth.getAggregationDepth"),
+ ProblemFilters.exclude[InheritedNewAbstractMethodProblem]("org.apache.spark.ml.param.shared.HasAggregationDepth.org$apache$spark$ml$param$shared$HasAggregationDepth$_setter_$aggregationDepth_=")
)
}