aboutsummaryrefslogtreecommitdiff
path: root/common/network-common
diff options
context:
space:
mode:
authorDongjoon Hyun <dongjoon@apache.org>2016-03-21 07:58:57 +0000
committerSean Owen <sowen@cloudera.com>2016-03-21 07:58:57 +0000
commit20fd254101553cb5a4c932c8d03064899112bee6 (patch)
tree25b1dd840cd2ec27fd875c3b52987d502e6423f5 /common/network-common
parente474088144cdd2632cf2fef6b2cf10b3cd191c23 (diff)
downloadspark-20fd254101553cb5a4c932c8d03064899112bee6.tar.gz
spark-20fd254101553cb5a4c932c8d03064899112bee6.tar.bz2
spark-20fd254101553cb5a4c932c8d03064899112bee6.zip
[SPARK-14011][CORE][SQL] Enable `LineLength` Java checkstyle rule
## What changes were proposed in this pull request? [Spark Coding Style Guide](https://cwiki.apache.org/confluence/display/SPARK/Spark+Code+Style+Guide) has 100-character limit on lines, but it's disabled for Java since 11/09/15. This PR enables **LineLength** checkstyle again. To help that, this also introduces **RedundantImport** and **RedundantModifier**, too. The following is the diff on `checkstyle.xml`. ```xml - <!-- TODO: 11/09/15 disabled - the lengths are currently > 100 in many places --> - <!-- <module name="LineLength"> <property name="max" value="100"/> <property name="ignorePattern" value="^package.*|^import.*|a href|href|http://|https://|ftp://"/> </module> - --> <module name="NoLineWrap"/> <module name="EmptyBlock"> <property name="option" value="TEXT"/> -167,5 +164,7 </module> <module name="CommentsIndentation"/> <module name="UnusedImports"/> + <module name="RedundantImport"/> + <module name="RedundantModifier"/> ``` ## How was this patch tested? Currently, `lint-java` is disabled in Jenkins. It needs a manual test. After passing the Jenkins tests, `dev/lint-java` should passes locally. Author: Dongjoon Hyun <dongjoon@apache.org> Closes #11831 from dongjoon-hyun/SPARK-14011.
Diffstat (limited to 'common/network-common')
-rw-r--r--common/network-common/src/main/java/org/apache/spark/network/TransportContext.java3
-rw-r--r--common/network-common/src/main/java/org/apache/spark/network/client/StreamCallback.java6
-rw-r--r--common/network-common/src/main/java/org/apache/spark/network/client/TransportClientFactory.java2
-rw-r--r--common/network-common/src/main/java/org/apache/spark/network/protocol/Message.java4
-rw-r--r--common/network-common/src/main/java/org/apache/spark/network/protocol/RequestMessage.java2
-rw-r--r--common/network-common/src/main/java/org/apache/spark/network/protocol/ResponseMessage.java2
-rw-r--r--common/network-common/src/main/java/org/apache/spark/network/sasl/SaslMessage.java4
-rw-r--r--common/network-common/src/main/java/org/apache/spark/network/server/OneForOneStreamManager.java4
-rw-r--r--common/network-common/src/main/java/org/apache/spark/network/server/TransportChannelHandler.java4
-rw-r--r--common/network-common/src/main/java/org/apache/spark/network/util/ByteUnit.java2
-rw-r--r--common/network-common/src/main/java/org/apache/spark/network/util/SystemPropertyConfigProvider.java2
-rw-r--r--common/network-common/src/main/java/org/apache/spark/network/util/TransportFrameDecoder.java2
12 files changed, 16 insertions, 21 deletions
diff --git a/common/network-common/src/main/java/org/apache/spark/network/TransportContext.java b/common/network-common/src/main/java/org/apache/spark/network/TransportContext.java
index 238710d172..5320b28bc0 100644
--- a/common/network-common/src/main/java/org/apache/spark/network/TransportContext.java
+++ b/common/network-common/src/main/java/org/apache/spark/network/TransportContext.java
@@ -43,7 +43,8 @@ import org.apache.spark.network.util.TransportFrameDecoder;
/**
* Contains the context to create a {@link TransportServer}, {@link TransportClientFactory}, and to
- * setup Netty Channel pipelines with a {@link org.apache.spark.network.server.TransportChannelHandler}.
+ * setup Netty Channel pipelines with a
+ * {@link org.apache.spark.network.server.TransportChannelHandler}.
*
* There are two communication protocols that the TransportClient provides, control-plane RPCs and
* data-plane "chunk fetching". The handling of the RPCs is performed outside of the scope of the
diff --git a/common/network-common/src/main/java/org/apache/spark/network/client/StreamCallback.java b/common/network-common/src/main/java/org/apache/spark/network/client/StreamCallback.java
index 29e6a30dc1..d322aec287 100644
--- a/common/network-common/src/main/java/org/apache/spark/network/client/StreamCallback.java
+++ b/common/network-common/src/main/java/org/apache/spark/network/client/StreamCallback.java
@@ -21,9 +21,9 @@ import java.io.IOException;
import java.nio.ByteBuffer;
/**
- * Callback for streaming data. Stream data will be offered to the {@link #onData(String, ByteBuffer)}
- * method as it arrives. Once all the stream data is received, {@link #onComplete(String)} will be
- * called.
+ * Callback for streaming data. Stream data will be offered to the
+ * {@link #onData(String, ByteBuffer)} method as it arrives. Once all the stream data is received,
+ * {@link #onComplete(String)} will be called.
* <p>
* The network library guarantees that a single thread will call these methods at a time, but
* different call may be made by different threads.
diff --git a/common/network-common/src/main/java/org/apache/spark/network/client/TransportClientFactory.java b/common/network-common/src/main/java/org/apache/spark/network/client/TransportClientFactory.java
index 1008c67de3..f179bad1f4 100644
--- a/common/network-common/src/main/java/org/apache/spark/network/client/TransportClientFactory.java
+++ b/common/network-common/src/main/java/org/apache/spark/network/client/TransportClientFactory.java
@@ -64,7 +64,7 @@ public class TransportClientFactory implements Closeable {
TransportClient[] clients;
Object[] locks;
- public ClientPool(int size) {
+ ClientPool(int size) {
clients = new TransportClient[size];
locks = new Object[size];
for (int i = 0; i < size; i++) {
diff --git a/common/network-common/src/main/java/org/apache/spark/network/protocol/Message.java b/common/network-common/src/main/java/org/apache/spark/network/protocol/Message.java
index 66f5b8b3a5..434935a8ef 100644
--- a/common/network-common/src/main/java/org/apache/spark/network/protocol/Message.java
+++ b/common/network-common/src/main/java/org/apache/spark/network/protocol/Message.java
@@ -33,7 +33,7 @@ public interface Message extends Encodable {
boolean isBodyInFrame();
/** Preceding every serialized Message is its type, which allows us to deserialize it. */
- public static enum Type implements Encodable {
+ enum Type implements Encodable {
ChunkFetchRequest(0), ChunkFetchSuccess(1), ChunkFetchFailure(2),
RpcRequest(3), RpcResponse(4), RpcFailure(5),
StreamRequest(6), StreamResponse(7), StreamFailure(8),
@@ -41,7 +41,7 @@ public interface Message extends Encodable {
private final byte id;
- private Type(int id) {
+ Type(int id) {
assert id < 128 : "Cannot have more than 128 message types";
this.id = (byte) id;
}
diff --git a/common/network-common/src/main/java/org/apache/spark/network/protocol/RequestMessage.java b/common/network-common/src/main/java/org/apache/spark/network/protocol/RequestMessage.java
index 31b15bb17a..b85171ed6f 100644
--- a/common/network-common/src/main/java/org/apache/spark/network/protocol/RequestMessage.java
+++ b/common/network-common/src/main/java/org/apache/spark/network/protocol/RequestMessage.java
@@ -17,8 +17,6 @@
package org.apache.spark.network.protocol;
-import org.apache.spark.network.protocol.Message;
-
/** Messages from the client to the server. */
public interface RequestMessage extends Message {
// token interface
diff --git a/common/network-common/src/main/java/org/apache/spark/network/protocol/ResponseMessage.java b/common/network-common/src/main/java/org/apache/spark/network/protocol/ResponseMessage.java
index 6edffd11cf..194e6d9aa2 100644
--- a/common/network-common/src/main/java/org/apache/spark/network/protocol/ResponseMessage.java
+++ b/common/network-common/src/main/java/org/apache/spark/network/protocol/ResponseMessage.java
@@ -17,8 +17,6 @@
package org.apache.spark.network.protocol;
-import org.apache.spark.network.protocol.Message;
-
/** Messages from the server to the client. */
public interface ResponseMessage extends Message {
// token interface
diff --git a/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslMessage.java b/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslMessage.java
index e52b526f09..7331c2b481 100644
--- a/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslMessage.java
+++ b/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslMessage.java
@@ -36,11 +36,11 @@ class SaslMessage extends AbstractMessage {
public final String appId;
- public SaslMessage(String appId, byte[] message) {
+ SaslMessage(String appId, byte[] message) {
this(appId, Unpooled.wrappedBuffer(message));
}
- public SaslMessage(String appId, ByteBuf message) {
+ SaslMessage(String appId, ByteBuf message) {
super(new NettyManagedBuffer(message), true);
this.appId = appId;
}
diff --git a/common/network-common/src/main/java/org/apache/spark/network/server/OneForOneStreamManager.java b/common/network-common/src/main/java/org/apache/spark/network/server/OneForOneStreamManager.java
index ea9e735e0a..e2222ae085 100644
--- a/common/network-common/src/main/java/org/apache/spark/network/server/OneForOneStreamManager.java
+++ b/common/network-common/src/main/java/org/apache/spark/network/server/OneForOneStreamManager.java
@@ -32,8 +32,8 @@ import org.apache.spark.network.buffer.ManagedBuffer;
import org.apache.spark.network.client.TransportClient;
/**
- * StreamManager which allows registration of an Iterator&lt;ManagedBuffer&gt;, which are individually
- * fetched as chunks by the client. Each registered buffer is one chunk.
+ * StreamManager which allows registration of an Iterator&lt;ManagedBuffer&gt;, which are
+ * individually fetched as chunks by the client. Each registered buffer is one chunk.
*/
public class OneForOneStreamManager extends StreamManager {
private final Logger logger = LoggerFactory.getLogger(OneForOneStreamManager.class);
diff --git a/common/network-common/src/main/java/org/apache/spark/network/server/TransportChannelHandler.java b/common/network-common/src/main/java/org/apache/spark/network/server/TransportChannelHandler.java
index 18a9b7887e..f2223379a9 100644
--- a/common/network-common/src/main/java/org/apache/spark/network/server/TransportChannelHandler.java
+++ b/common/network-common/src/main/java/org/apache/spark/network/server/TransportChannelHandler.java
@@ -141,8 +141,8 @@ public class TransportChannelHandler extends SimpleChannelInboundHandler<Message
if (responseHandler.numOutstandingRequests() > 0) {
String address = NettyUtils.getRemoteAddress(ctx.channel());
logger.error("Connection to {} has been quiet for {} ms while there are outstanding " +
- "requests. Assuming connection is dead; please adjust spark.network.timeout if this " +
- "is wrong.", address, requestTimeoutNs / 1000 / 1000);
+ "requests. Assuming connection is dead; please adjust spark.network.timeout if " +
+ "this is wrong.", address, requestTimeoutNs / 1000 / 1000);
client.timeOut();
ctx.close();
} else if (closeIdleConnections) {
diff --git a/common/network-common/src/main/java/org/apache/spark/network/util/ByteUnit.java b/common/network-common/src/main/java/org/apache/spark/network/util/ByteUnit.java
index a2f018373f..e097714bbc 100644
--- a/common/network-common/src/main/java/org/apache/spark/network/util/ByteUnit.java
+++ b/common/network-common/src/main/java/org/apache/spark/network/util/ByteUnit.java
@@ -24,7 +24,7 @@ public enum ByteUnit {
TiB ((long) Math.pow(1024L, 4L)),
PiB ((long) Math.pow(1024L, 5L));
- private ByteUnit(long multiplier) {
+ ByteUnit(long multiplier) {
this.multiplier = multiplier;
}
diff --git a/common/network-common/src/main/java/org/apache/spark/network/util/SystemPropertyConfigProvider.java b/common/network-common/src/main/java/org/apache/spark/network/util/SystemPropertyConfigProvider.java
index 5f20b70678..f15ec8d294 100644
--- a/common/network-common/src/main/java/org/apache/spark/network/util/SystemPropertyConfigProvider.java
+++ b/common/network-common/src/main/java/org/apache/spark/network/util/SystemPropertyConfigProvider.java
@@ -19,8 +19,6 @@ package org.apache.spark.network.util;
import java.util.NoSuchElementException;
-import org.apache.spark.network.util.ConfigProvider;
-
/** Uses System properties to obtain config values. */
public class SystemPropertyConfigProvider extends ConfigProvider {
@Override
diff --git a/common/network-common/src/main/java/org/apache/spark/network/util/TransportFrameDecoder.java b/common/network-common/src/main/java/org/apache/spark/network/util/TransportFrameDecoder.java
index 3f7024a6aa..bd1830e6ab 100644
--- a/common/network-common/src/main/java/org/apache/spark/network/util/TransportFrameDecoder.java
+++ b/common/network-common/src/main/java/org/apache/spark/network/util/TransportFrameDecoder.java
@@ -205,7 +205,7 @@ public class TransportFrameDecoder extends ChannelInboundHandlerAdapter {
return interceptor != null;
}
- public static interface Interceptor {
+ public interface Interceptor {
/**
* Handles data received from the remote end.