aboutsummaryrefslogtreecommitdiff
path: root/tools/src/main/scala/org/apache
diff options
context:
space:
mode:
Diffstat (limited to 'tools/src/main/scala/org/apache')
-rw-r--r--tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala8
-rw-r--r--tools/src/main/scala/org/apache/spark/tools/JavaAPICompletenessChecker.scala4
-rw-r--r--tools/src/main/scala/org/apache/spark/tools/StoragePerfTester.scala4
3 files changed, 16 insertions, 0 deletions
diff --git a/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala b/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala
index 595ded6ae6..9483d2b692 100644
--- a/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala
+++ b/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala
@@ -92,7 +92,9 @@ object GenerateMIMAIgnore {
ignoredMembers ++= getAnnotatedOrPackagePrivateMembers(classSymbol)
} catch {
+ // scalastyle:off println
case _: Throwable => println("Error instrumenting class:" + className)
+ // scalastyle:on println
}
}
(ignoredClasses.flatMap(c => Seq(c, c.replace("$", "#"))).toSet, ignoredMembers.toSet)
@@ -108,7 +110,9 @@ object GenerateMIMAIgnore {
.filter(_.contains("$$")).map(classSymbol.fullName + "." + _)
} catch {
case t: Throwable =>
+ // scalastyle:off println
println("[WARN] Unable to detect inner functions for class:" + classSymbol.fullName)
+ // scalastyle:on println
Seq.empty[String]
}
}
@@ -128,12 +132,14 @@ object GenerateMIMAIgnore {
getOrElse(Iterator.empty).mkString("\n")
File(".generated-mima-class-excludes")
.writeAll(previousContents + privateClasses.mkString("\n"))
+ // scalastyle:off println
println("Created : .generated-mima-class-excludes in current directory.")
val previousMembersContents = Try(File(".generated-mima-member-excludes").lines)
.getOrElse(Iterator.empty).mkString("\n")
File(".generated-mima-member-excludes").writeAll(previousMembersContents +
privateMembers.mkString("\n"))
println("Created : .generated-mima-member-excludes in current directory.")
+ // scalastyle:on println
}
@@ -174,7 +180,9 @@ object GenerateMIMAIgnore {
try {
classes += Class.forName(entry.replace('/', '.').stripSuffix(".class"), false, classLoader)
} catch {
+ // scalastyle:off println
case _: Throwable => println("Unable to load:" + entry)
+ // scalastyle:on println
}
}
classes
diff --git a/tools/src/main/scala/org/apache/spark/tools/JavaAPICompletenessChecker.scala b/tools/src/main/scala/org/apache/spark/tools/JavaAPICompletenessChecker.scala
index 583823c90c..856ea177a9 100644
--- a/tools/src/main/scala/org/apache/spark/tools/JavaAPICompletenessChecker.scala
+++ b/tools/src/main/scala/org/apache/spark/tools/JavaAPICompletenessChecker.scala
@@ -323,11 +323,14 @@ object JavaAPICompletenessChecker {
val missingMethods = javaEquivalents -- javaMethods
for (method <- missingMethods) {
+ // scalastyle:off println
println(method)
+ // scalastyle:on println
}
}
def main(args: Array[String]) {
+ // scalastyle:off println
println("Missing RDD methods")
printMissingMethods(classOf[RDD[_]], classOf[JavaRDD[_]])
println()
@@ -359,5 +362,6 @@ object JavaAPICompletenessChecker {
println("Missing PairDStream methods")
printMissingMethods(classOf[PairDStreamFunctions[_, _]], classOf[JavaPairDStream[_, _]])
println()
+ // scalastyle:on println
}
}
diff --git a/tools/src/main/scala/org/apache/spark/tools/StoragePerfTester.scala b/tools/src/main/scala/org/apache/spark/tools/StoragePerfTester.scala
index baa97616ea..0dc2861253 100644
--- a/tools/src/main/scala/org/apache/spark/tools/StoragePerfTester.scala
+++ b/tools/src/main/scala/org/apache/spark/tools/StoragePerfTester.scala
@@ -85,7 +85,9 @@ object StoragePerfTester {
latch.countDown()
} catch {
case e: Exception =>
+ // scalastyle:off println
println("Exception in child thread: " + e + " " + e.getMessage)
+ // scalastyle:on println
System.exit(1)
}
}
@@ -97,9 +99,11 @@ object StoragePerfTester {
val bytesPerSecond = totalBytes.get() / time
val bytesPerFile = (totalBytes.get() / (numOutputSplits * numMaps.toDouble)).toLong
+ // scalastyle:off println
System.err.println("files_total\t\t%s".format(numMaps * numOutputSplits))
System.err.println("bytes_per_file\t\t%s".format(Utils.bytesToString(bytesPerFile)))
System.err.println("agg_throughput\t\t%s/s".format(Utils.bytesToString(bytesPerSecond.toLong)))
+ // scalastyle:on println
executor.shutdown()
sc.stop()