From d17d221487fa7a3af6f4af2217f1d4889ceb084d Mon Sep 17 00:00:00 2001 From: Patrick Wendell Date: Sun, 1 Jun 2014 17:27:05 -0700 Subject: Better explanation for how to use MIMA excludes. This patch does a few things: 1. We have a file MimaExcludes.scala exclusively for excludes. 2. The test runner tells users about that file if a test fails. 3. I've added back the excludes used from 0.9->1.0. We should keep these in the project as an official audit trail of times where we decided to make exceptions. Author: Patrick Wendell Closes #937 from pwendell/mima and squashes the following commits: 7ee0db2 [Patrick Wendell] Better explanation for how to use MIMA excludes. --- project/MimaBuild.scala | 48 ++++++++++++++++++++++++------------------------ 1 file changed, 24 insertions(+), 24 deletions(-) (limited to 'project/MimaBuild.scala') diff --git a/project/MimaBuild.scala b/project/MimaBuild.scala index 182ca7615d..1477809943 100644 --- a/project/MimaBuild.scala +++ b/project/MimaBuild.scala @@ -15,21 +15,41 @@ * limitations under the License. */ +import com.typesafe.tools.mima.core.{MissingTypesProblem, MissingClassProblem, ProblemFilters} +import com.typesafe.tools.mima.core.ProblemFilters._ import com.typesafe.tools.mima.plugin.MimaKeys.{binaryIssueFilters, previousArtifact} import com.typesafe.tools.mima.plugin.MimaPlugin.mimaDefaultSettings import sbt._ object MimaBuild { + // Exclude a single class and its corresponding object + def excludeClass(className: String) = { + Seq( + excludePackage(className), + ProblemFilters.exclude[MissingClassProblem](className), + ProblemFilters.exclude[MissingTypesProblem](className), + excludePackage(className + "$"), + ProblemFilters.exclude[MissingClassProblem](className + "$"), + ProblemFilters.exclude[MissingTypesProblem](className + "$") + ) + } + // Exclude a Spark class, that is in the package org.apache.spark + def excludeSparkClass(className: String) = { + excludeClass("org.apache.spark." + className) + } + + // Exclude a Spark package, that is in the package org.apache.spark + def excludeSparkPackage(packageName: String) = { + excludePackage("org.apache.spark." + packageName) + } def ignoredABIProblems(base: File) = { - import com.typesafe.tools.mima.core._ - import com.typesafe.tools.mima.core.ProblemFilters._ // Excludes placed here will be used for all Spark versions val defaultExcludes = Seq() // Read package-private excludes from file - val excludeFilePath = (base.getAbsolutePath + "/.mima-excludes") + val excludeFilePath = (base.getAbsolutePath + "/.generated-mima-excludes") val excludeFile = file(excludeFilePath) val ignoredClasses: Seq[String] = if (!excludeFile.exists()) { @@ -38,31 +58,11 @@ object MimaBuild { IO.read(excludeFile).split("\n") } - // Exclude a single class and its corresponding object - def excludeClass(className: String) = { - Seq( - excludePackage(className), - ProblemFilters.exclude[MissingClassProblem](className), - ProblemFilters.exclude[MissingTypesProblem](className), - excludePackage(className + "$"), - ProblemFilters.exclude[MissingClassProblem](className + "$"), - ProblemFilters.exclude[MissingTypesProblem](className + "$") - ) - } - - // Exclude a Spark class, that is in the package org.apache.spark - def excludeSparkClass(className: String) = { - excludeClass("org.apache.spark." + className) - } - // Exclude a Spark package, that is in the package org.apache.spark - def excludeSparkPackage(packageName: String) = { - excludePackage("org.apache.spark." + packageName) - } val externalExcludeFileClasses = ignoredClasses.flatMap(excludeClass) - defaultExcludes ++ externalExcludeFileClasses + defaultExcludes ++ externalExcludeFileClasses ++ MimaExcludes.excludes } def mimaSettings(sparkHome: File) = mimaDefaultSettings ++ Seq( -- cgit v1.2.3