aboutsummaryrefslogtreecommitdiff
path: root/tools
diff options
context:
space:
mode:
authorDongjoon Hyun <dongjoon@apache.org>2016-03-15 23:25:31 -0700
committerReynold Xin <rxin@databricks.com>2016-03-15 23:25:31 -0700
commit3c578c594ebe124eb1a1b21e41a64e7661b04de9 (patch)
tree3a62f4130baf09512dfcaa633261e30b49ed88b4 /tools
parent3665294d4e1c6ea13ee66e71cf802f1a961ab15c (diff)
downloadspark-3c578c594ebe124eb1a1b21e41a64e7661b04de9.tar.gz
spark-3c578c594ebe124eb1a1b21e41a64e7661b04de9.tar.bz2
spark-3c578c594ebe124eb1a1b21e41a64e7661b04de9.zip
[SPARK-13920][BUILD] MIMA checks should apply to @Experimental and @DeveloperAPI APIs
## What changes were proposed in this pull request? We are able to change `Experimental` and `DeveloperAPI` API freely but also should monitor and manage those API carefully. This PR for [SPARK-13920](https://issues.apache.org/jira/browse/SPARK-13920) enables MiMa check and adds filters for them. ## How was this patch tested? Pass the Jenkins tests (including MiMa). Author: Dongjoon Hyun <dongjoon@apache.org> Closes #11751 from dongjoon-hyun/SPARK-13920.
Diffstat (limited to 'tools')
-rw-r--r--tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala20
1 files changed, 3 insertions, 17 deletions
diff --git a/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala b/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala
index 738bd2150a..0df3c501de 100644
--- a/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala
+++ b/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala
@@ -40,15 +40,6 @@ object GenerateMIMAIgnore {
private val classLoader = Thread.currentThread().getContextClassLoader
private val mirror = runtimeMirror(classLoader)
- private def isDeveloperApi(sym: unv.Symbol) = sym.annotations.exists {
- _.tpe =:= mirror.staticClass("org.apache.spark.annotation.DeveloperApi").toType
- }
-
- private def isExperimental(sym: unv.Symbol) = sym.annotations.exists {
- _.tpe =:= mirror.staticClass("org.apache.spark.annotation.Experimental").toType
- }
-
-
private def isPackagePrivate(sym: unv.Symbol) =
!sym.privateWithin.fullName.startsWith("<none>")
@@ -57,7 +48,7 @@ object GenerateMIMAIgnore {
/**
* For every class checks via scala reflection if the class itself or contained members
- * have DeveloperApi or Experimental annotations or they are package private.
+ * are package private.
* Returns the tuple of such classes and members.
*/
private def privateWithin(packageName: String): (Set[String], Set[String]) = {
@@ -74,8 +65,6 @@ object GenerateMIMAIgnore {
isPackagePrivate(classSymbol) ||
isPackagePrivateModule(moduleSymbol) ||
classSymbol.isPrivate
- val developerApi = isDeveloperApi(classSymbol) || isDeveloperApi(moduleSymbol)
- val experimental = isExperimental(classSymbol) || isExperimental(moduleSymbol)
/* Inner classes defined within a private[spark] class or object are effectively
invisible, so we account for them as package private. */
lazy val indirectlyPrivateSpark = {
@@ -87,7 +76,7 @@ object GenerateMIMAIgnore {
false
}
}
- if (directlyPrivateSpark || indirectlyPrivateSpark || developerApi || experimental) {
+ if (directlyPrivateSpark || indirectlyPrivateSpark) {
ignoredClasses += className
}
// check if this class has package-private/annotated members.
@@ -122,9 +111,7 @@ object GenerateMIMAIgnore {
private def getAnnotatedOrPackagePrivateMembers(classSymbol: unv.ClassSymbol) = {
classSymbol.typeSignature.members.filterNot(x =>
x.fullName.startsWith("java") || x.fullName.startsWith("scala")
- ).filter(x =>
- isPackagePrivate(x) || isDeveloperApi(x) || isExperimental(x)
- ).map(_.fullName) ++ getInnerFunctions(classSymbol)
+ ).filter(x => isPackagePrivate(x)).map(_.fullName) ++ getInnerFunctions(classSymbol)
}
def main(args: Array[String]) {
@@ -144,7 +131,6 @@ object GenerateMIMAIgnore {
// scalastyle:on println
}
-
private def shouldExclude(name: String) = {
// Heuristic to remove JVM classes that do not correspond to user-facing classes in Scala
name.contains("anon") ||