aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorWeiqing Yang <yangweiqing001@gmail.com>2016-08-08 09:24:37 +0100
committerSean Owen <sowen@cloudera.com>2016-08-08 09:24:37 +0100
commite10ca8de49206087b336c6db0c40868fa271b989 (patch)
tree13fbeadf475c07280f55e7d4b24a328ceafa1781
parent1db1c6567bae0c80fdc522f2cbb65557cd62263f (diff)
downloadspark-e10ca8de49206087b336c6db0c40868fa271b989.tar.gz
spark-e10ca8de49206087b336c6db0c40868fa271b989.tar.bz2
spark-e10ca8de49206087b336c6db0c40868fa271b989.zip
[SPARK-16945] Fix Java Lint errors
## What changes were proposed in this pull request? This PR is to fix the minor Java linter errors as following: [ERROR] src/main/java/org/apache/spark/sql/catalyst/expressions/VariableLengthRowBasedKeyValueBatch.java:[42,10] (modifier) RedundantModifier: Redundant 'final' modifier. [ERROR] src/main/java/org/apache/spark/sql/catalyst/expressions/VariableLengthRowBasedKeyValueBatch.java:[97,10] (modifier) RedundantModifier: Redundant 'final' modifier. ## How was this patch tested? Manual test. dev/lint-java Using `mvn` from path: /usr/local/bin/mvn Checkstyle checks passed. Author: Weiqing Yang <yangweiqing001@gmail.com> Closes #14532 from Sherry302/master.
-rw-r--r--examples/src/main/java/org/apache/spark/examples/sql/JavaSQLDataSourceExample.java3
-rw-r--r--sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/FixedLengthRowBasedKeyValueBatch.java10
-rw-r--r--sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/RowBasedKeyValueBatch.java2
-rw-r--r--sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/VariableLengthRowBasedKeyValueBatch.java6
4 files changed, 11 insertions, 10 deletions
diff --git a/examples/src/main/java/org/apache/spark/examples/sql/JavaSQLDataSourceExample.java b/examples/src/main/java/org/apache/spark/examples/sql/JavaSQLDataSourceExample.java
index fc92446783..f9087e0593 100644
--- a/examples/src/main/java/org/apache/spark/examples/sql/JavaSQLDataSourceExample.java
+++ b/examples/src/main/java/org/apache/spark/examples/sql/JavaSQLDataSourceExample.java
@@ -221,7 +221,8 @@ public class JavaSQLDataSourceExample {
// an RDD[String] storing one JSON object per string.
List<String> jsonData = Arrays.asList(
"{\"name\":\"Yin\",\"address\":{\"city\":\"Columbus\",\"state\":\"Ohio\"}}");
- JavaRDD<String> anotherPeopleRDD = new JavaSparkContext(spark.sparkContext()).parallelize(jsonData);
+ JavaRDD<String> anotherPeopleRDD =
+ new JavaSparkContext(spark.sparkContext()).parallelize(jsonData);
Dataset anotherPeople = spark.read().json(anotherPeopleRDD);
anotherPeople.show();
// +---------------+----+
diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/FixedLengthRowBasedKeyValueBatch.java b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/FixedLengthRowBasedKeyValueBatch.java
index b6130d1f33..85529f6a0a 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/FixedLengthRowBasedKeyValueBatch.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/FixedLengthRowBasedKeyValueBatch.java
@@ -33,7 +33,7 @@ public final class FixedLengthRowBasedKeyValueBatch extends RowBasedKeyValueBatc
private final int vlen;
private final int recordLength;
- private final long getKeyOffsetForFixedLengthRecords(int rowId) {
+ private long getKeyOffsetForFixedLengthRecords(int rowId) {
return recordStartOffset + rowId * (long) recordLength;
}
@@ -43,7 +43,7 @@ public final class FixedLengthRowBasedKeyValueBatch extends RowBasedKeyValueBatc
* Returns an UnsafeRow pointing to the value if succeeds, otherwise returns null.
*/
@Override
- public final UnsafeRow appendRow(Object kbase, long koff, int klen,
+ public UnsafeRow appendRow(Object kbase, long koff, int klen,
Object vbase, long voff, int vlen) {
// if run out of max supported rows or page size, return null
if (numRows >= capacity || page == null || page.size() - pageCursor < recordLength) {
@@ -71,7 +71,7 @@ public final class FixedLengthRowBasedKeyValueBatch extends RowBasedKeyValueBatc
* Returns the key row in this batch at `rowId`. Returned key row is reused across calls.
*/
@Override
- public final UnsafeRow getKeyRow(int rowId) {
+ public UnsafeRow getKeyRow(int rowId) {
assert(rowId >= 0);
assert(rowId < numRows);
if (keyRowId != rowId) { // if keyRowId == rowId, desired keyRow is already cached
@@ -90,7 +90,7 @@ public final class FixedLengthRowBasedKeyValueBatch extends RowBasedKeyValueBatc
* In most times, 1) is skipped because `getKeyRow(id)` is often called before `getValueRow(id)`.
*/
@Override
- protected final UnsafeRow getValueFromKey(int rowId) {
+ protected UnsafeRow getValueFromKey(int rowId) {
if (keyRowId != rowId) {
getKeyRow(rowId);
}
@@ -103,7 +103,7 @@ public final class FixedLengthRowBasedKeyValueBatch extends RowBasedKeyValueBatc
* Returns an iterator to go through all rows
*/
@Override
- public final org.apache.spark.unsafe.KVIterator<UnsafeRow, UnsafeRow> rowIterator() {
+ public org.apache.spark.unsafe.KVIterator<UnsafeRow, UnsafeRow> rowIterator() {
return new org.apache.spark.unsafe.KVIterator<UnsafeRow, UnsafeRow>() {
private final UnsafeRow key = new UnsafeRow(keySchema.length());
private final UnsafeRow value = new UnsafeRow(valueSchema.length());
diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/RowBasedKeyValueBatch.java b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/RowBasedKeyValueBatch.java
index cea9d5d5bc..4899f856c8 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/RowBasedKeyValueBatch.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/RowBasedKeyValueBatch.java
@@ -123,7 +123,7 @@ public abstract class RowBasedKeyValueBatch extends MemoryConsumer {
}
}
- private final boolean acquirePage(long requiredSize) {
+ private boolean acquirePage(long requiredSize) {
try {
page = allocatePage(requiredSize);
} catch (OutOfMemoryError e) {
diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/VariableLengthRowBasedKeyValueBatch.java b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/VariableLengthRowBasedKeyValueBatch.java
index f4002ee0d5..ea4f984be2 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/VariableLengthRowBasedKeyValueBatch.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/VariableLengthRowBasedKeyValueBatch.java
@@ -39,7 +39,7 @@ public final class VariableLengthRowBasedKeyValueBatch extends RowBasedKeyValueB
* Returns an UnsafeRow pointing to the value if succeeds, otherwise returns null.
*/
@Override
- public final UnsafeRow appendRow(Object kbase, long koff, int klen,
+ public UnsafeRow appendRow(Object kbase, long koff, int klen,
Object vbase, long voff, int vlen) {
final long recordLength = 8 + klen + vlen + 8;
// if run out of max supported rows or page size, return null
@@ -94,7 +94,7 @@ public final class VariableLengthRowBasedKeyValueBatch extends RowBasedKeyValueB
* In most times, 1) is skipped because `getKeyRow(id)` is often called before `getValueRow(id)`.
*/
@Override
- public final UnsafeRow getValueFromKey(int rowId) {
+ public UnsafeRow getValueFromKey(int rowId) {
if (keyRowId != rowId) {
getKeyRow(rowId);
}
@@ -110,7 +110,7 @@ public final class VariableLengthRowBasedKeyValueBatch extends RowBasedKeyValueB
* Returns an iterator to go through all rows
*/
@Override
- public final org.apache.spark.unsafe.KVIterator<UnsafeRow, UnsafeRow> rowIterator() {
+ public org.apache.spark.unsafe.KVIterator<UnsafeRow, UnsafeRow> rowIterator() {
return new org.apache.spark.unsafe.KVIterator<UnsafeRow, UnsafeRow>() {
private final UnsafeRow key = new UnsafeRow(keySchema.length());
private final UnsafeRow value = new UnsafeRow(valueSchema.length());