From d34d6503786bbe429c10ddb1879519cc9bd709b6 Mon Sep 17 00:00:00 2001 From: Dongjoon Hyun Date: Sun, 24 Apr 2016 20:40:03 -0700 Subject: [SPARK-14868][BUILD] Enable NewLineAtEofChecker in checkstyle and fix lint-java errors ## What changes were proposed in this pull request? Spark uses `NewLineAtEofChecker` rule in Scala by ScalaStyle. And, most Java code also comply with the rule. This PR aims to enforce the same rule `NewlineAtEndOfFile` by CheckStyle explicitly. Also, this fixes lint-java errors since SPARK-14465. The followings are the items. - Adds a new line at the end of the files (19 files) - Fixes 25 lint-java errors (12 RedundantModifier, 6 **ArrayTypeStyle**, 2 LineLength, 2 UnusedImports, 2 RegexpSingleline, 1 ModifierOrder) ## How was this patch tested? After the Jenkins test succeeds, `dev/lint-java` should pass. (Currently, Jenkins dose not run lint-java.) ```bash $ dev/lint-java Using `mvn` from path: /usr/local/bin/mvn Checkstyle checks passed. ``` Author: Dongjoon Hyun Closes #12632 from dongjoon-hyun/SPARK-14868. --- .../spark/api/java/function/package-info.java | 2 +- .../spark/shuffle/sort/ShuffleExternalSorter.java | 4 ++-- .../collection/unsafe/sort/PrefixComparators.java | 27 +++++++++++----------- .../util/collection/unsafe/sort/RadixSort.java | 4 ++-- .../unsafe/sort/UnsafeExternalSorter.java | 4 ++-- .../org/apache/spark/annotation/package-info.java | 2 +- .../org/apache/spark/api/java/package-info.java | 2 +- .../org/apache/spark/broadcast/package-info.java | 2 +- .../org/apache/spark/executor/package-info.java | 2 +- .../scala/org/apache/spark/io/package-info.java | 2 +- .../scala/org/apache/spark/rdd/package-info.java | 2 +- .../org/apache/spark/scheduler/package-info.java | 2 +- .../scala/org/apache/spark/util/package-info.java | 2 +- .../org/apache/spark/util/random/package-info.java | 2 +- dev/checkstyle.xml | 2 ++ .../apache/spark/streaming/flume/package-info.java | 2 +- .../apache/spark/streaming/kafka/package-info.java | 2 +- .../src/main/resources/log4j.properties | 2 +- .../org/apache/spark/graphx/package-info.java | 2 +- .../org/apache/spark/graphx/util/package-info.java | 2 +- .../scala/org/apache/spark/mllib/package-info.java | 2 +- .../JavaRandomForestClassifierSuite.java | 6 ++--- .../regression/JavaRandomForestRegressorSuite.java | 6 ++--- .../parquet/SpecificParquetRecordReaderBase.java | 1 - .../spark/streaming/api/java/package-info.java | 2 +- .../spark/streaming/dstream/package-info.java | 2 +- 26 files changed, 45 insertions(+), 45 deletions(-) diff --git a/core/src/main/java/org/apache/spark/api/java/function/package-info.java b/core/src/main/java/org/apache/spark/api/java/function/package-info.java index 463a42f233..eefb29aca9 100644 --- a/core/src/main/java/org/apache/spark/api/java/function/package-info.java +++ b/core/src/main/java/org/apache/spark/api/java/function/package-info.java @@ -20,4 +20,4 @@ * these interfaces to pass functions to various Java API methods for Spark. Please visit Spark's * Java programming guide for more details. */ -package org.apache.spark.api.java.function; \ No newline at end of file +package org.apache.spark.api.java.function; diff --git a/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleExternalSorter.java b/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleExternalSorter.java index c4041a97e8..2be5a16b2d 100644 --- a/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleExternalSorter.java +++ b/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleExternalSorter.java @@ -256,8 +256,8 @@ final class ShuffleExternalSorter extends MemoryConsumer { final long spillSize = freeMemory(); inMemSorter.reset(); // Reset the in-memory sorter's pointer array only after freeing up the memory pages holding the - // records. Otherwise, if the task is over allocated memory, then without freeing the memory pages, - // we might not be able to get memory for the pointer array. + // records. Otherwise, if the task is over allocated memory, then without freeing the memory + // pages, we might not be able to get memory for the pointer array. taskContext.taskMetrics().incMemoryBytesSpilled(spillSize); return spillSize; } diff --git a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/PrefixComparators.java b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/PrefixComparators.java index 21f2fde79d..c44630fbbc 100644 --- a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/PrefixComparators.java +++ b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/PrefixComparators.java @@ -22,7 +22,6 @@ import com.google.common.primitives.UnsignedLongs; import org.apache.spark.annotation.Private; import org.apache.spark.unsafe.types.ByteArray; import org.apache.spark.unsafe.types.UTF8String; -import org.apache.spark.util.Utils; @Private public class PrefixComparators { @@ -69,7 +68,7 @@ public class PrefixComparators { * Provides radix sort parameters. Comparators implementing this also are indicating that the * ordering they define is compatible with radix sort. */ - public static abstract class RadixSortSupport extends PrefixComparator { + public abstract static class RadixSortSupport extends PrefixComparator { /** @return Whether the sort should be descending in binary sort order. */ public abstract boolean sortDescending(); @@ -82,37 +81,37 @@ public class PrefixComparators { // public static final class UnsignedPrefixComparator extends RadixSortSupport { - @Override public final boolean sortDescending() { return false; } - @Override public final boolean sortSigned() { return false; } + @Override public boolean sortDescending() { return false; } + @Override public boolean sortSigned() { return false; } @Override - public final int compare(long aPrefix, long bPrefix) { + public int compare(long aPrefix, long bPrefix) { return UnsignedLongs.compare(aPrefix, bPrefix); } } public static final class UnsignedPrefixComparatorDesc extends RadixSortSupport { - @Override public final boolean sortDescending() { return true; } - @Override public final boolean sortSigned() { return false; } + @Override public boolean sortDescending() { return true; } + @Override public boolean sortSigned() { return false; } @Override - public final int compare(long bPrefix, long aPrefix) { + public int compare(long bPrefix, long aPrefix) { return UnsignedLongs.compare(aPrefix, bPrefix); } } public static final class SignedPrefixComparator extends RadixSortSupport { - @Override public final boolean sortDescending() { return false; } - @Override public final boolean sortSigned() { return true; } + @Override public boolean sortDescending() { return false; } + @Override public boolean sortSigned() { return true; } @Override - public final int compare(long a, long b) { + public int compare(long a, long b) { return (a < b) ? -1 : (a > b) ? 1 : 0; } } public static final class SignedPrefixComparatorDesc extends RadixSortSupport { - @Override public final boolean sortDescending() { return true; } - @Override public final boolean sortSigned() { return true; } + @Override public boolean sortDescending() { return true; } + @Override public boolean sortSigned() { return true; } @Override - public final int compare(long b, long a) { + public int compare(long b, long a) { return (a < b) ? -1 : (a > b) ? 1 : 0; } } diff --git a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/RadixSort.java b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/RadixSort.java index 3357b8e474..4f3f0de7b8 100644 --- a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/RadixSort.java +++ b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/RadixSort.java @@ -16,7 +16,7 @@ */ package org.apache.spark.util.collection.unsafe.sort; - + import org.apache.spark.unsafe.Platform; import org.apache.spark.unsafe.array.LongArray; @@ -227,7 +227,7 @@ public class RadixSort { } return counts; } - + /** * Specialization of sortAtByte() for key-prefix arrays. */ diff --git a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java index 3c1cd39dc2..8b6c96a4c4 100644 --- a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java +++ b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java @@ -212,8 +212,8 @@ public final class UnsafeExternalSorter extends MemoryConsumer { // written to disk. This also counts the space needed to store the sorter's pointer array. inMemSorter.reset(); // Reset the in-memory sorter's pointer array only after freeing up the memory pages holding the - // records. Otherwise, if the task is over allocated memory, then without freeing the memory pages, - // we might not be able to get memory for the pointer array. + // records. Otherwise, if the task is over allocated memory, then without freeing the memory + // pages, we might not be able to get memory for the pointer array. taskContext.taskMetrics().incMemoryBytesSpilled(spillSize); totalSpillBytes += spillSize; diff --git a/core/src/main/scala/org/apache/spark/annotation/package-info.java b/core/src/main/scala/org/apache/spark/annotation/package-info.java index 12c7afe6f1..9efdccf6b0 100644 --- a/core/src/main/scala/org/apache/spark/annotation/package-info.java +++ b/core/src/main/scala/org/apache/spark/annotation/package-info.java @@ -20,4 +20,4 @@ * This package consist of these annotations, which are used project wide and are reflected in * Scala and Java docs. */ -package org.apache.spark.annotation; \ No newline at end of file +package org.apache.spark.annotation; diff --git a/core/src/main/scala/org/apache/spark/api/java/package-info.java b/core/src/main/scala/org/apache/spark/api/java/package-info.java index 10a480fc78..699181cafa 100644 --- a/core/src/main/scala/org/apache/spark/api/java/package-info.java +++ b/core/src/main/scala/org/apache/spark/api/java/package-info.java @@ -18,4 +18,4 @@ /** * Spark Java programming APIs. */ -package org.apache.spark.api.java; \ No newline at end of file +package org.apache.spark.api.java; diff --git a/core/src/main/scala/org/apache/spark/broadcast/package-info.java b/core/src/main/scala/org/apache/spark/broadcast/package-info.java index 1510e6e84c..bbf4a684a1 100644 --- a/core/src/main/scala/org/apache/spark/broadcast/package-info.java +++ b/core/src/main/scala/org/apache/spark/broadcast/package-info.java @@ -18,4 +18,4 @@ /** * Spark's broadcast variables, used to broadcast immutable datasets to all nodes. */ -package org.apache.spark.broadcast; \ No newline at end of file +package org.apache.spark.broadcast; diff --git a/core/src/main/scala/org/apache/spark/executor/package-info.java b/core/src/main/scala/org/apache/spark/executor/package-info.java index dd3b6815fb..fb280964c4 100644 --- a/core/src/main/scala/org/apache/spark/executor/package-info.java +++ b/core/src/main/scala/org/apache/spark/executor/package-info.java @@ -18,4 +18,4 @@ /** * Package for executor components used with various cluster managers. */ -package org.apache.spark.executor; \ No newline at end of file +package org.apache.spark.executor; diff --git a/core/src/main/scala/org/apache/spark/io/package-info.java b/core/src/main/scala/org/apache/spark/io/package-info.java index bea1bfdb63..1a46660280 100644 --- a/core/src/main/scala/org/apache/spark/io/package-info.java +++ b/core/src/main/scala/org/apache/spark/io/package-info.java @@ -18,4 +18,4 @@ /** * IO codecs used for compression. */ -package org.apache.spark.io; \ No newline at end of file +package org.apache.spark.io; diff --git a/core/src/main/scala/org/apache/spark/rdd/package-info.java b/core/src/main/scala/org/apache/spark/rdd/package-info.java index 176cc58179..d9aa9bebe5 100644 --- a/core/src/main/scala/org/apache/spark/rdd/package-info.java +++ b/core/src/main/scala/org/apache/spark/rdd/package-info.java @@ -18,4 +18,4 @@ /** * Provides implementation's of various RDDs. */ -package org.apache.spark.rdd; \ No newline at end of file +package org.apache.spark.rdd; diff --git a/core/src/main/scala/org/apache/spark/scheduler/package-info.java b/core/src/main/scala/org/apache/spark/scheduler/package-info.java index 5b4a628d3c..90fc65251e 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/package-info.java +++ b/core/src/main/scala/org/apache/spark/scheduler/package-info.java @@ -18,4 +18,4 @@ /** * Spark's DAG scheduler. */ -package org.apache.spark.scheduler; \ No newline at end of file +package org.apache.spark.scheduler; diff --git a/core/src/main/scala/org/apache/spark/util/package-info.java b/core/src/main/scala/org/apache/spark/util/package-info.java index 819f54ee41..4c5d33d88d 100644 --- a/core/src/main/scala/org/apache/spark/util/package-info.java +++ b/core/src/main/scala/org/apache/spark/util/package-info.java @@ -18,4 +18,4 @@ /** * Spark utilities. */ -package org.apache.spark.util; \ No newline at end of file +package org.apache.spark.util; diff --git a/core/src/main/scala/org/apache/spark/util/random/package-info.java b/core/src/main/scala/org/apache/spark/util/random/package-info.java index 62c3762dd1..e4f0c0febb 100644 --- a/core/src/main/scala/org/apache/spark/util/random/package-info.java +++ b/core/src/main/scala/org/apache/spark/util/random/package-info.java @@ -18,4 +18,4 @@ /** * Utilities for random number generation. */ -package org.apache.spark.util.random; \ No newline at end of file +package org.apache.spark.util.random; diff --git a/dev/checkstyle.xml b/dev/checkstyle.xml index b66dca9041..3de6aa91dc 100644 --- a/dev/checkstyle.xml +++ b/dev/checkstyle.xml @@ -64,6 +64,8 @@ + + diff --git a/external/flume/src/main/scala/org/apache/spark/streaming/flume/package-info.java b/external/flume/src/main/scala/org/apache/spark/streaming/flume/package-info.java index d31aa5f5c0..4a5da226ad 100644 --- a/external/flume/src/main/scala/org/apache/spark/streaming/flume/package-info.java +++ b/external/flume/src/main/scala/org/apache/spark/streaming/flume/package-info.java @@ -18,4 +18,4 @@ /** * Spark streaming receiver for Flume. */ -package org.apache.spark.streaming.flume; \ No newline at end of file +package org.apache.spark.streaming.flume; diff --git a/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/package-info.java b/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/package-info.java index 947bae115a..2e5ab0fb3b 100644 --- a/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/package-info.java +++ b/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/package-info.java @@ -18,4 +18,4 @@ /** * Kafka receiver for spark streaming. */ -package org.apache.spark.streaming.kafka; \ No newline at end of file +package org.apache.spark.streaming.kafka; diff --git a/external/kinesis-asl/src/main/resources/log4j.properties b/external/kinesis-asl/src/main/resources/log4j.properties index 8118d12c5d..4f5ea7bafe 100644 --- a/external/kinesis-asl/src/main/resources/log4j.properties +++ b/external/kinesis-asl/src/main/resources/log4j.properties @@ -34,4 +34,4 @@ log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: log4j.logger.org.spark_project.jetty=WARN log4j.logger.org.spark_project.jetty.util.component.AbstractLifeCycle=ERROR log4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=INFO -log4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=INFO \ No newline at end of file +log4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=INFO diff --git a/graphx/src/main/scala/org/apache/spark/graphx/package-info.java b/graphx/src/main/scala/org/apache/spark/graphx/package-info.java index f659cc518e..7c63447070 100644 --- a/graphx/src/main/scala/org/apache/spark/graphx/package-info.java +++ b/graphx/src/main/scala/org/apache/spark/graphx/package-info.java @@ -19,4 +19,4 @@ * ALPHA COMPONENT * GraphX is a graph processing framework built on top of Spark. */ -package org.apache.spark.graphx; \ No newline at end of file +package org.apache.spark.graphx; diff --git a/graphx/src/main/scala/org/apache/spark/graphx/util/package-info.java b/graphx/src/main/scala/org/apache/spark/graphx/util/package-info.java index 90cd1d46db..86b427e31d 100644 --- a/graphx/src/main/scala/org/apache/spark/graphx/util/package-info.java +++ b/graphx/src/main/scala/org/apache/spark/graphx/util/package-info.java @@ -18,4 +18,4 @@ /** * Collections of utilities used by graphx. */ -package org.apache.spark.graphx.util; \ No newline at end of file +package org.apache.spark.graphx.util; diff --git a/mllib/src/main/scala/org/apache/spark/mllib/package-info.java b/mllib/src/main/scala/org/apache/spark/mllib/package-info.java index 4991bc9e97..5962efa96b 100644 --- a/mllib/src/main/scala/org/apache/spark/mllib/package-info.java +++ b/mllib/src/main/scala/org/apache/spark/mllib/package-info.java @@ -18,4 +18,4 @@ /** * Spark's machine learning library. */ -package org.apache.spark.mllib; \ No newline at end of file +package org.apache.spark.mllib; diff --git a/mllib/src/test/java/org/apache/spark/ml/classification/JavaRandomForestClassifierSuite.java b/mllib/src/test/java/org/apache/spark/ml/classification/JavaRandomForestClassifierSuite.java index 5aec52ac72..4f40fd65b9 100644 --- a/mllib/src/test/java/org/apache/spark/ml/classification/JavaRandomForestClassifierSuite.java +++ b/mllib/src/test/java/org/apache/spark/ml/classification/JavaRandomForestClassifierSuite.java @@ -81,15 +81,15 @@ public class JavaRandomForestClassifierSuite implements Serializable { for (String featureSubsetStrategy: RandomForestClassifier.supportedFeatureSubsetStrategies()) { rf.setFeatureSubsetStrategy(featureSubsetStrategy); } - String realStrategies[] = {".1", ".10", "0.10", "0.1", "0.9", "1.0"}; + String[] realStrategies = {".1", ".10", "0.10", "0.1", "0.9", "1.0"}; for (String strategy: realStrategies) { rf.setFeatureSubsetStrategy(strategy); } - String integerStrategies[] = {"1", "10", "100", "1000", "10000"}; + String[] integerStrategies = {"1", "10", "100", "1000", "10000"}; for (String strategy: integerStrategies) { rf.setFeatureSubsetStrategy(strategy); } - String invalidStrategies[] = {"-.1", "-.10", "-0.10", ".0", "0.0", "1.1", "0"}; + String[] invalidStrategies = {"-.1", "-.10", "-0.10", ".0", "0.0", "1.1", "0"}; for (String strategy: invalidStrategies) { try { rf.setFeatureSubsetStrategy(strategy); diff --git a/mllib/src/test/java/org/apache/spark/ml/regression/JavaRandomForestRegressorSuite.java b/mllib/src/test/java/org/apache/spark/ml/regression/JavaRandomForestRegressorSuite.java index a8736669f7..38b895f1fd 100644 --- a/mllib/src/test/java/org/apache/spark/ml/regression/JavaRandomForestRegressorSuite.java +++ b/mllib/src/test/java/org/apache/spark/ml/regression/JavaRandomForestRegressorSuite.java @@ -81,15 +81,15 @@ public class JavaRandomForestRegressorSuite implements Serializable { for (String featureSubsetStrategy: RandomForestRegressor.supportedFeatureSubsetStrategies()) { rf.setFeatureSubsetStrategy(featureSubsetStrategy); } - String realStrategies[] = {".1", ".10", "0.10", "0.1", "0.9", "1.0"}; + String[] realStrategies = {".1", ".10", "0.10", "0.1", "0.9", "1.0"}; for (String strategy: realStrategies) { rf.setFeatureSubsetStrategy(strategy); } - String integerStrategies[] = {"1", "10", "100", "1000", "10000"}; + String[] integerStrategies = {"1", "10", "100", "1000", "10000"}; for (String strategy: integerStrategies) { rf.setFeatureSubsetStrategy(strategy); } - String invalidStrategies[] = {"-.1", "-.10", "-0.10", ".0", "0.0", "1.1", "0"}; + String[] invalidStrategies = {"-.1", "-.10", "-0.10", ".0", "0.0", "1.1", "0"}; for (String strategy: invalidStrategies) { try { rf.setFeatureSubsetStrategy(strategy); diff --git a/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/SpecificParquetRecordReaderBase.java b/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/SpecificParquetRecordReaderBase.java index b224a86845..cbe8f78164 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/SpecificParquetRecordReaderBase.java +++ b/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/SpecificParquetRecordReaderBase.java @@ -38,7 +38,6 @@ import static org.apache.parquet.hadoop.ParquetFileReader.readFooter; import static org.apache.parquet.hadoop.ParquetInputFormat.getFilter; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.RecordReader; diff --git a/streaming/src/main/scala/org/apache/spark/streaming/api/java/package-info.java b/streaming/src/main/scala/org/apache/spark/streaming/api/java/package-info.java index d43d949d76..348d21d49a 100644 --- a/streaming/src/main/scala/org/apache/spark/streaming/api/java/package-info.java +++ b/streaming/src/main/scala/org/apache/spark/streaming/api/java/package-info.java @@ -18,4 +18,4 @@ /** * Java APIs for spark streaming. */ -package org.apache.spark.streaming.api.java; \ No newline at end of file +package org.apache.spark.streaming.api.java; diff --git a/streaming/src/main/scala/org/apache/spark/streaming/dstream/package-info.java b/streaming/src/main/scala/org/apache/spark/streaming/dstream/package-info.java index 05ca2ddffd..4d08afcbfe 100644 --- a/streaming/src/main/scala/org/apache/spark/streaming/dstream/package-info.java +++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/package-info.java @@ -18,4 +18,4 @@ /** * Various implementations of DStreams. */ -package org.apache.spark.streaming.dstream; \ No newline at end of file +package org.apache.spark.streaming.dstream; -- cgit v1.2.3