aboutsummaryrefslogtreecommitdiff
path: root/sql/core
diff options
context:
space:
mode:
authorDongjoon Hyun <dongjoon@apache.org>2016-03-21 07:58:57 +0000
committerSean Owen <sowen@cloudera.com>2016-03-21 07:58:57 +0000
commit20fd254101553cb5a4c932c8d03064899112bee6 (patch)
tree25b1dd840cd2ec27fd875c3b52987d502e6423f5 /sql/core
parente474088144cdd2632cf2fef6b2cf10b3cd191c23 (diff)
downloadspark-20fd254101553cb5a4c932c8d03064899112bee6.tar.gz
spark-20fd254101553cb5a4c932c8d03064899112bee6.tar.bz2
spark-20fd254101553cb5a4c932c8d03064899112bee6.zip
[SPARK-14011][CORE][SQL] Enable `LineLength` Java checkstyle rule
## What changes were proposed in this pull request? [Spark Coding Style Guide](https://cwiki.apache.org/confluence/display/SPARK/Spark+Code+Style+Guide) has 100-character limit on lines, but it's disabled for Java since 11/09/15. This PR enables **LineLength** checkstyle again. To help that, this also introduces **RedundantImport** and **RedundantModifier**, too. The following is the diff on `checkstyle.xml`. ```xml - <!-- TODO: 11/09/15 disabled - the lengths are currently > 100 in many places --> - <!-- <module name="LineLength"> <property name="max" value="100"/> <property name="ignorePattern" value="^package.*|^import.*|a href|href|http://|https://|ftp://"/> </module> - --> <module name="NoLineWrap"/> <module name="EmptyBlock"> <property name="option" value="TEXT"/> -167,5 +164,7 </module> <module name="CommentsIndentation"/> <module name="UnusedImports"/> + <module name="RedundantImport"/> + <module name="RedundantModifier"/> ``` ## How was this patch tested? Currently, `lint-java` is disabled in Jenkins. It needs a manual test. After passing the Jenkins tests, `dev/lint-java` should passes locally. Author: Dongjoon Hyun <dongjoon@apache.org> Closes #11831 from dongjoon-hyun/SPARK-14011.
Diffstat (limited to 'sql/core')
-rw-r--r--sql/core/src/main/java/org/apache/spark/sql/execution/UnsafeFixedWidthAggregationMap.java4
-rw-r--r--sql/core/src/main/java/org/apache/spark/sql/execution/UnsafeKVExternalSorter.java2
-rw-r--r--sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/ColumnVector.java6
-rw-r--r--sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/ColumnarBatch.java44
-rw-r--r--sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/OffHeapColumnVector.java90
-rw-r--r--sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/OnHeapColumnVector.java92
-rw-r--r--sql/core/src/test/java/test/org/apache/spark/sql/JavaApplySchemaSuite.java13
-rw-r--r--sql/core/src/test/java/test/org/apache/spark/sql/JavaDataFrameSuite.java3
8 files changed, 128 insertions, 126 deletions
diff --git a/sql/core/src/main/java/org/apache/spark/sql/execution/UnsafeFixedWidthAggregationMap.java b/sql/core/src/main/java/org/apache/spark/sql/execution/UnsafeFixedWidthAggregationMap.java
index 57e8218f3b..acf6c583bb 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/execution/UnsafeFixedWidthAggregationMap.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/execution/UnsafeFixedWidthAggregationMap.java
@@ -236,8 +236,8 @@ public final class UnsafeFixedWidthAggregationMap {
/**
* Sorts the map's records in place, spill them to disk, and returns an [[UnsafeKVExternalSorter]]
*
- * Note that the map will be reset for inserting new records, and the returned sorter can NOT be used
- * to insert records.
+ * Note that the map will be reset for inserting new records, and the returned sorter can NOT be
+ * used to insert records.
*/
public UnsafeKVExternalSorter destructAndCreateExternalSorter() throws IOException {
return new UnsafeKVExternalSorter(
diff --git a/sql/core/src/main/java/org/apache/spark/sql/execution/UnsafeKVExternalSorter.java b/sql/core/src/main/java/org/apache/spark/sql/execution/UnsafeKVExternalSorter.java
index 51e10b0e93..9e08675c3e 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/execution/UnsafeKVExternalSorter.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/execution/UnsafeKVExternalSorter.java
@@ -198,7 +198,7 @@ public final class UnsafeKVExternalSorter {
private final UnsafeRow row2;
private final int numKeyFields;
- public KVComparator(BaseOrdering ordering, int numKeyFields) {
+ KVComparator(BaseOrdering ordering, int numKeyFields) {
this.numKeyFields = numKeyFields;
this.row1 = new UnsafeRow(numKeyFields);
this.row2 = new UnsafeRow(numKeyFields);
diff --git a/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/ColumnVector.java b/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/ColumnVector.java
index ffcc9c2ace..04adf1fb6d 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/ColumnVector.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/ColumnVector.java
@@ -94,7 +94,7 @@ public abstract class ColumnVector {
}
@Override
- public final int numElements() { return length; }
+ public int numElements() { return length; }
@Override
public ArrayData copy() {
@@ -175,10 +175,10 @@ public abstract class ColumnVector {
}
@Override
- public final boolean isNullAt(int ordinal) { return data.getIsNull(offset + ordinal); }
+ public boolean isNullAt(int ordinal) { return data.getIsNull(offset + ordinal); }
@Override
- public final boolean getBoolean(int ordinal) {
+ public boolean getBoolean(int ordinal) {
throw new NotImplementedException();
}
diff --git a/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/ColumnarBatch.java b/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/ColumnarBatch.java
index c462ab1a13..b6fa9a0b9e 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/ColumnarBatch.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/ColumnarBatch.java
@@ -115,20 +115,20 @@ public final class ColumnarBatch {
* Marks this row as being filtered out. This means a subsequent iteration over the rows
* in this batch will not include this row.
*/
- public final void markFiltered() {
+ public void markFiltered() {
parent.markFiltered(rowId);
}
public ColumnVector[] columns() { return columns; }
@Override
- public final int numFields() { return columns.length; }
+ public int numFields() { return columns.length; }
@Override
/**
* Revisit this. This is expensive. This is currently only used in test paths.
*/
- public final InternalRow copy() {
+ public InternalRow copy() {
GenericMutableRow row = new GenericMutableRow(columns.length);
for (int i = 0; i < numFields(); i++) {
if (isNullAt(i)) {
@@ -163,73 +163,73 @@ public final class ColumnarBatch {
}
@Override
- public final boolean anyNull() {
+ public boolean anyNull() {
throw new NotImplementedException();
}
@Override
- public final boolean isNullAt(int ordinal) { return columns[ordinal].getIsNull(rowId); }
+ public boolean isNullAt(int ordinal) { return columns[ordinal].getIsNull(rowId); }
@Override
- public final boolean getBoolean(int ordinal) { return columns[ordinal].getBoolean(rowId); }
+ public boolean getBoolean(int ordinal) { return columns[ordinal].getBoolean(rowId); }
@Override
- public final byte getByte(int ordinal) { return columns[ordinal].getByte(rowId); }
+ public byte getByte(int ordinal) { return columns[ordinal].getByte(rowId); }
@Override
- public final short getShort(int ordinal) { return columns[ordinal].getShort(rowId); }
+ public short getShort(int ordinal) { return columns[ordinal].getShort(rowId); }
@Override
- public final int getInt(int ordinal) { return columns[ordinal].getInt(rowId); }
+ public int getInt(int ordinal) { return columns[ordinal].getInt(rowId); }
@Override
- public final long getLong(int ordinal) { return columns[ordinal].getLong(rowId); }
+ public long getLong(int ordinal) { return columns[ordinal].getLong(rowId); }
@Override
- public final float getFloat(int ordinal) { return columns[ordinal].getFloat(rowId); }
+ public float getFloat(int ordinal) { return columns[ordinal].getFloat(rowId); }
@Override
- public final double getDouble(int ordinal) { return columns[ordinal].getDouble(rowId); }
+ public double getDouble(int ordinal) { return columns[ordinal].getDouble(rowId); }
@Override
- public final Decimal getDecimal(int ordinal, int precision, int scale) {
+ public Decimal getDecimal(int ordinal, int precision, int scale) {
return columns[ordinal].getDecimal(rowId, precision, scale);
}
@Override
- public final UTF8String getUTF8String(int ordinal) {
+ public UTF8String getUTF8String(int ordinal) {
return columns[ordinal].getUTF8String(rowId);
}
@Override
- public final byte[] getBinary(int ordinal) {
+ public byte[] getBinary(int ordinal) {
return columns[ordinal].getBinary(rowId);
}
@Override
- public final CalendarInterval getInterval(int ordinal) {
+ public CalendarInterval getInterval(int ordinal) {
final int months = columns[ordinal].getChildColumn(0).getInt(rowId);
final long microseconds = columns[ordinal].getChildColumn(1).getLong(rowId);
return new CalendarInterval(months, microseconds);
}
@Override
- public final InternalRow getStruct(int ordinal, int numFields) {
+ public InternalRow getStruct(int ordinal, int numFields) {
return columns[ordinal].getStruct(rowId);
}
@Override
- public final ArrayData getArray(int ordinal) {
+ public ArrayData getArray(int ordinal) {
return columns[ordinal].getArray(rowId);
}
@Override
- public final MapData getMap(int ordinal) {
+ public MapData getMap(int ordinal) {
throw new NotImplementedException();
}
@Override
- public final Object get(int ordinal, DataType dataType) {
+ public Object get(int ordinal, DataType dataType) {
throw new NotImplementedException();
}
}
@@ -357,7 +357,7 @@ public final class ColumnarBatch {
* Marks this row as being filtered out. This means a subsequent iteration over the rows
* in this batch will not include this row.
*/
- public final void markFiltered(int rowId) {
+ public void markFiltered(int rowId) {
assert(!filteredRows[rowId]);
filteredRows[rowId] = true;
++numRowsFiltered;
@@ -367,7 +367,7 @@ public final class ColumnarBatch {
* Marks a given column as non-nullable. Any row that has a NULL value for the corresponding
* attribute is filtered out.
*/
- public final void filterNullsInColumn(int ordinal) {
+ public void filterNullsInColumn(int ordinal) {
nullFilteredColumns.add(ordinal);
}
diff --git a/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/OffHeapColumnVector.java b/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/OffHeapColumnVector.java
index b06b7f2457..d5a9163274 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/OffHeapColumnVector.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/OffHeapColumnVector.java
@@ -52,7 +52,7 @@ public final class OffHeapColumnVector extends ColumnVector {
}
@Override
- public final long valuesNativeAddress() {
+ public long valuesNativeAddress() {
return data;
}
@@ -62,7 +62,7 @@ public final class OffHeapColumnVector extends ColumnVector {
}
@Override
- public final void close() {
+ public void close() {
Platform.freeMemory(nulls);
Platform.freeMemory(data);
Platform.freeMemory(lengthData);
@@ -78,19 +78,19 @@ public final class OffHeapColumnVector extends ColumnVector {
//
@Override
- public final void putNotNull(int rowId) {
+ public void putNotNull(int rowId) {
Platform.putByte(null, nulls + rowId, (byte) 0);
}
@Override
- public final void putNull(int rowId) {
+ public void putNull(int rowId) {
Platform.putByte(null, nulls + rowId, (byte) 1);
++numNulls;
anyNullsSet = true;
}
@Override
- public final void putNulls(int rowId, int count) {
+ public void putNulls(int rowId, int count) {
long offset = nulls + rowId;
for (int i = 0; i < count; ++i, ++offset) {
Platform.putByte(null, offset, (byte) 1);
@@ -100,7 +100,7 @@ public final class OffHeapColumnVector extends ColumnVector {
}
@Override
- public final void putNotNulls(int rowId, int count) {
+ public void putNotNulls(int rowId, int count) {
if (!anyNullsSet) return;
long offset = nulls + rowId;
for (int i = 0; i < count; ++i, ++offset) {
@@ -109,7 +109,7 @@ public final class OffHeapColumnVector extends ColumnVector {
}
@Override
- public final boolean getIsNull(int rowId) {
+ public boolean getIsNull(int rowId) {
return Platform.getByte(null, nulls + rowId) == 1;
}
@@ -118,12 +118,12 @@ public final class OffHeapColumnVector extends ColumnVector {
//
@Override
- public final void putBoolean(int rowId, boolean value) {
+ public void putBoolean(int rowId, boolean value) {
Platform.putByte(null, data + rowId, (byte)((value) ? 1 : 0));
}
@Override
- public final void putBooleans(int rowId, int count, boolean value) {
+ public void putBooleans(int rowId, int count, boolean value) {
byte v = (byte)((value) ? 1 : 0);
for (int i = 0; i < count; ++i) {
Platform.putByte(null, data + rowId + i, v);
@@ -131,32 +131,32 @@ public final class OffHeapColumnVector extends ColumnVector {
}
@Override
- public final boolean getBoolean(int rowId) { return Platform.getByte(null, data + rowId) == 1; }
+ public boolean getBoolean(int rowId) { return Platform.getByte(null, data + rowId) == 1; }
//
// APIs dealing with Bytes
//
@Override
- public final void putByte(int rowId, byte value) {
+ public void putByte(int rowId, byte value) {
Platform.putByte(null, data + rowId, value);
}
@Override
- public final void putBytes(int rowId, int count, byte value) {
+ public void putBytes(int rowId, int count, byte value) {
for (int i = 0; i < count; ++i) {
Platform.putByte(null, data + rowId + i, value);
}
}
@Override
- public final void putBytes(int rowId, int count, byte[] src, int srcIndex) {
+ public void putBytes(int rowId, int count, byte[] src, int srcIndex) {
Platform.copyMemory(src, Platform.BYTE_ARRAY_OFFSET + srcIndex, null, data + rowId, count);
}
@Override
- public final byte getByte(int rowId) {
+ public byte getByte(int rowId) {
if (dictionary == null) {
return Platform.getByte(null, data + rowId);
} else {
@@ -169,12 +169,12 @@ public final class OffHeapColumnVector extends ColumnVector {
//
@Override
- public final void putShort(int rowId, short value) {
+ public void putShort(int rowId, short value) {
Platform.putShort(null, data + 2 * rowId, value);
}
@Override
- public final void putShorts(int rowId, int count, short value) {
+ public void putShorts(int rowId, int count, short value) {
long offset = data + 2 * rowId;
for (int i = 0; i < count; ++i, offset += 4) {
Platform.putShort(null, offset, value);
@@ -182,13 +182,13 @@ public final class OffHeapColumnVector extends ColumnVector {
}
@Override
- public final void putShorts(int rowId, int count, short[] src, int srcIndex) {
+ public void putShorts(int rowId, int count, short[] src, int srcIndex) {
Platform.copyMemory(src, Platform.SHORT_ARRAY_OFFSET + srcIndex * 2,
null, data + 2 * rowId, count * 2);
}
@Override
- public final short getShort(int rowId) {
+ public short getShort(int rowId) {
if (dictionary == null) {
return Platform.getShort(null, data + 2 * rowId);
} else {
@@ -201,12 +201,12 @@ public final class OffHeapColumnVector extends ColumnVector {
//
@Override
- public final void putInt(int rowId, int value) {
+ public void putInt(int rowId, int value) {
Platform.putInt(null, data + 4 * rowId, value);
}
@Override
- public final void putInts(int rowId, int count, int value) {
+ public void putInts(int rowId, int count, int value) {
long offset = data + 4 * rowId;
for (int i = 0; i < count; ++i, offset += 4) {
Platform.putInt(null, offset, value);
@@ -214,19 +214,19 @@ public final class OffHeapColumnVector extends ColumnVector {
}
@Override
- public final void putInts(int rowId, int count, int[] src, int srcIndex) {
+ public void putInts(int rowId, int count, int[] src, int srcIndex) {
Platform.copyMemory(src, Platform.INT_ARRAY_OFFSET + srcIndex * 4,
null, data + 4 * rowId, count * 4);
}
@Override
- public final void putIntsLittleEndian(int rowId, int count, byte[] src, int srcIndex) {
+ public void putIntsLittleEndian(int rowId, int count, byte[] src, int srcIndex) {
Platform.copyMemory(src, srcIndex + Platform.BYTE_ARRAY_OFFSET,
null, data + 4 * rowId, count * 4);
}
@Override
- public final int getInt(int rowId) {
+ public int getInt(int rowId) {
if (dictionary == null) {
return Platform.getInt(null, data + 4 * rowId);
} else {
@@ -239,12 +239,12 @@ public final class OffHeapColumnVector extends ColumnVector {
//
@Override
- public final void putLong(int rowId, long value) {
+ public void putLong(int rowId, long value) {
Platform.putLong(null, data + 8 * rowId, value);
}
@Override
- public final void putLongs(int rowId, int count, long value) {
+ public void putLongs(int rowId, int count, long value) {
long offset = data + 8 * rowId;
for (int i = 0; i < count; ++i, offset += 8) {
Platform.putLong(null, offset, value);
@@ -252,19 +252,19 @@ public final class OffHeapColumnVector extends ColumnVector {
}
@Override
- public final void putLongs(int rowId, int count, long[] src, int srcIndex) {
+ public void putLongs(int rowId, int count, long[] src, int srcIndex) {
Platform.copyMemory(src, Platform.LONG_ARRAY_OFFSET + srcIndex * 8,
null, data + 8 * rowId, count * 8);
}
@Override
- public final void putLongsLittleEndian(int rowId, int count, byte[] src, int srcIndex) {
+ public void putLongsLittleEndian(int rowId, int count, byte[] src, int srcIndex) {
Platform.copyMemory(src, srcIndex + Platform.BYTE_ARRAY_OFFSET,
null, data + 8 * rowId, count * 8);
}
@Override
- public final long getLong(int rowId) {
+ public long getLong(int rowId) {
if (dictionary == null) {
return Platform.getLong(null, data + 8 * rowId);
} else {
@@ -277,12 +277,12 @@ public final class OffHeapColumnVector extends ColumnVector {
//
@Override
- public final void putFloat(int rowId, float value) {
+ public void putFloat(int rowId, float value) {
Platform.putFloat(null, data + rowId * 4, value);
}
@Override
- public final void putFloats(int rowId, int count, float value) {
+ public void putFloats(int rowId, int count, float value) {
long offset = data + 4 * rowId;
for (int i = 0; i < count; ++i, offset += 4) {
Platform.putFloat(null, offset, value);
@@ -290,19 +290,19 @@ public final class OffHeapColumnVector extends ColumnVector {
}
@Override
- public final void putFloats(int rowId, int count, float[] src, int srcIndex) {
+ public void putFloats(int rowId, int count, float[] src, int srcIndex) {
Platform.copyMemory(src, Platform.FLOAT_ARRAY_OFFSET + srcIndex * 4,
null, data + 4 * rowId, count * 4);
}
@Override
- public final void putFloats(int rowId, int count, byte[] src, int srcIndex) {
+ public void putFloats(int rowId, int count, byte[] src, int srcIndex) {
Platform.copyMemory(src, Platform.BYTE_ARRAY_OFFSET + srcIndex,
null, data + rowId * 4, count * 4);
}
@Override
- public final float getFloat(int rowId) {
+ public float getFloat(int rowId) {
if (dictionary == null) {
return Platform.getFloat(null, data + rowId * 4);
} else {
@@ -316,12 +316,12 @@ public final class OffHeapColumnVector extends ColumnVector {
//
@Override
- public final void putDouble(int rowId, double value) {
+ public void putDouble(int rowId, double value) {
Platform.putDouble(null, data + rowId * 8, value);
}
@Override
- public final void putDoubles(int rowId, int count, double value) {
+ public void putDoubles(int rowId, int count, double value) {
long offset = data + 8 * rowId;
for (int i = 0; i < count; ++i, offset += 8) {
Platform.putDouble(null, offset, value);
@@ -329,19 +329,19 @@ public final class OffHeapColumnVector extends ColumnVector {
}
@Override
- public final void putDoubles(int rowId, int count, double[] src, int srcIndex) {
+ public void putDoubles(int rowId, int count, double[] src, int srcIndex) {
Platform.copyMemory(src, Platform.DOUBLE_ARRAY_OFFSET + srcIndex * 8,
null, data + 8 * rowId, count * 8);
}
@Override
- public final void putDoubles(int rowId, int count, byte[] src, int srcIndex) {
+ public void putDoubles(int rowId, int count, byte[] src, int srcIndex) {
Platform.copyMemory(src, Platform.BYTE_ARRAY_OFFSET + srcIndex,
null, data + rowId * 8, count * 8);
}
@Override
- public final double getDouble(int rowId) {
+ public double getDouble(int rowId) {
if (dictionary == null) {
return Platform.getDouble(null, data + rowId * 8);
} else {
@@ -353,25 +353,25 @@ public final class OffHeapColumnVector extends ColumnVector {
// APIs dealing with Arrays.
//
@Override
- public final void putArray(int rowId, int offset, int length) {
+ public void putArray(int rowId, int offset, int length) {
assert(offset >= 0 && offset + length <= childColumns[0].capacity);
Platform.putInt(null, lengthData + 4 * rowId, length);
Platform.putInt(null, offsetData + 4 * rowId, offset);
}
@Override
- public final int getArrayLength(int rowId) {
+ public int getArrayLength(int rowId) {
return Platform.getInt(null, lengthData + 4 * rowId);
}
@Override
- public final int getArrayOffset(int rowId) {
+ public int getArrayOffset(int rowId) {
return Platform.getInt(null, offsetData + 4 * rowId);
}
// APIs dealing with ByteArrays
@Override
- public final int putByteArray(int rowId, byte[] value, int offset, int length) {
+ public int putByteArray(int rowId, byte[] value, int offset, int length) {
int result = arrayData().appendBytes(length, value, offset);
Platform.putInt(null, lengthData + 4 * rowId, length);
Platform.putInt(null, offsetData + 4 * rowId, result);
@@ -379,7 +379,7 @@ public final class OffHeapColumnVector extends ColumnVector {
}
@Override
- public final void loadBytes(ColumnVector.Array array) {
+ public void loadBytes(ColumnVector.Array array) {
if (array.tmpByteArray.length < array.length) array.tmpByteArray = new byte[array.length];
Platform.copyMemory(
null, data + array.offset, array.tmpByteArray, Platform.BYTE_ARRAY_OFFSET, array.length);
@@ -388,12 +388,12 @@ public final class OffHeapColumnVector extends ColumnVector {
}
@Override
- public final void reserve(int requiredCapacity) {
+ public void reserve(int requiredCapacity) {
if (requiredCapacity > capacity) reserveInternal(requiredCapacity * 2);
}
// Split out the slow path.
- private final void reserveInternal(int newCapacity) {
+ private void reserveInternal(int newCapacity) {
if (this.resultArray != null) {
this.lengthData =
Platform.reallocateMemory(lengthData, elementsAppended * 4, newCapacity * 4);
diff --git a/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/OnHeapColumnVector.java b/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/OnHeapColumnVector.java
index 03160d1ec3..5b671a7432 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/OnHeapColumnVector.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/OnHeapColumnVector.java
@@ -52,16 +52,16 @@ public final class OnHeapColumnVector extends ColumnVector {
}
@Override
- public final long valuesNativeAddress() {
+ public long valuesNativeAddress() {
throw new RuntimeException("Cannot get native address for on heap column");
}
@Override
- public final long nullsNativeAddress() {
+ public long nullsNativeAddress() {
throw new RuntimeException("Cannot get native address for on heap column");
}
@Override
- public final void close() {
+ public void close() {
}
//
@@ -69,19 +69,19 @@ public final class OnHeapColumnVector extends ColumnVector {
//
@Override
- public final void putNotNull(int rowId) {
+ public void putNotNull(int rowId) {
nulls[rowId] = (byte)0;
}
@Override
- public final void putNull(int rowId) {
+ public void putNull(int rowId) {
nulls[rowId] = (byte)1;
++numNulls;
anyNullsSet = true;
}
@Override
- public final void putNulls(int rowId, int count) {
+ public void putNulls(int rowId, int count) {
for (int i = 0; i < count; ++i) {
nulls[rowId + i] = (byte)1;
}
@@ -90,7 +90,7 @@ public final class OnHeapColumnVector extends ColumnVector {
}
@Override
- public final void putNotNulls(int rowId, int count) {
+ public void putNotNulls(int rowId, int count) {
if (!anyNullsSet) return;
for (int i = 0; i < count; ++i) {
nulls[rowId + i] = (byte)0;
@@ -98,7 +98,7 @@ public final class OnHeapColumnVector extends ColumnVector {
}
@Override
- public final boolean getIsNull(int rowId) {
+ public boolean getIsNull(int rowId) {
return nulls[rowId] == 1;
}
@@ -107,12 +107,12 @@ public final class OnHeapColumnVector extends ColumnVector {
//
@Override
- public final void putBoolean(int rowId, boolean value) {
+ public void putBoolean(int rowId, boolean value) {
byteData[rowId] = (byte)((value) ? 1 : 0);
}
@Override
- public final void putBooleans(int rowId, int count, boolean value) {
+ public void putBooleans(int rowId, int count, boolean value) {
byte v = (byte)((value) ? 1 : 0);
for (int i = 0; i < count; ++i) {
byteData[i + rowId] = v;
@@ -120,7 +120,7 @@ public final class OnHeapColumnVector extends ColumnVector {
}
@Override
- public final boolean getBoolean(int rowId) {
+ public boolean getBoolean(int rowId) {
return byteData[rowId] == 1;
}
@@ -131,24 +131,24 @@ public final class OnHeapColumnVector extends ColumnVector {
//
@Override
- public final void putByte(int rowId, byte value) {
+ public void putByte(int rowId, byte value) {
byteData[rowId] = value;
}
@Override
- public final void putBytes(int rowId, int count, byte value) {
+ public void putBytes(int rowId, int count, byte value) {
for (int i = 0; i < count; ++i) {
byteData[i + rowId] = value;
}
}
@Override
- public final void putBytes(int rowId, int count, byte[] src, int srcIndex) {
+ public void putBytes(int rowId, int count, byte[] src, int srcIndex) {
System.arraycopy(src, srcIndex, byteData, rowId, count);
}
@Override
- public final byte getByte(int rowId) {
+ public byte getByte(int rowId) {
if (dictionary == null) {
return byteData[rowId];
} else {
@@ -161,24 +161,24 @@ public final class OnHeapColumnVector extends ColumnVector {
//
@Override
- public final void putShort(int rowId, short value) {
+ public void putShort(int rowId, short value) {
shortData[rowId] = value;
}
@Override
- public final void putShorts(int rowId, int count, short value) {
+ public void putShorts(int rowId, int count, short value) {
for (int i = 0; i < count; ++i) {
shortData[i + rowId] = value;
}
}
@Override
- public final void putShorts(int rowId, int count, short[] src, int srcIndex) {
+ public void putShorts(int rowId, int count, short[] src, int srcIndex) {
System.arraycopy(src, srcIndex, shortData, rowId, count);
}
@Override
- public final short getShort(int rowId) {
+ public short getShort(int rowId) {
if (dictionary == null) {
return shortData[rowId];
} else {
@@ -192,24 +192,24 @@ public final class OnHeapColumnVector extends ColumnVector {
//
@Override
- public final void putInt(int rowId, int value) {
+ public void putInt(int rowId, int value) {
intData[rowId] = value;
}
@Override
- public final void putInts(int rowId, int count, int value) {
+ public void putInts(int rowId, int count, int value) {
for (int i = 0; i < count; ++i) {
intData[i + rowId] = value;
}
}
@Override
- public final void putInts(int rowId, int count, int[] src, int srcIndex) {
+ public void putInts(int rowId, int count, int[] src, int srcIndex) {
System.arraycopy(src, srcIndex, intData, rowId, count);
}
@Override
- public final void putIntsLittleEndian(int rowId, int count, byte[] src, int srcIndex) {
+ public void putIntsLittleEndian(int rowId, int count, byte[] src, int srcIndex) {
int srcOffset = srcIndex + Platform.BYTE_ARRAY_OFFSET;
for (int i = 0; i < count; ++i) {
intData[i + rowId] = Platform.getInt(src, srcOffset);;
@@ -219,7 +219,7 @@ public final class OnHeapColumnVector extends ColumnVector {
}
@Override
- public final int getInt(int rowId) {
+ public int getInt(int rowId) {
if (dictionary == null) {
return intData[rowId];
} else {
@@ -232,24 +232,24 @@ public final class OnHeapColumnVector extends ColumnVector {
//
@Override
- public final void putLong(int rowId, long value) {
+ public void putLong(int rowId, long value) {
longData[rowId] = value;
}
@Override
- public final void putLongs(int rowId, int count, long value) {
+ public void putLongs(int rowId, int count, long value) {
for (int i = 0; i < count; ++i) {
longData[i + rowId] = value;
}
}
@Override
- public final void putLongs(int rowId, int count, long[] src, int srcIndex) {
+ public void putLongs(int rowId, int count, long[] src, int srcIndex) {
System.arraycopy(src, srcIndex, longData, rowId, count);
}
@Override
- public final void putLongsLittleEndian(int rowId, int count, byte[] src, int srcIndex) {
+ public void putLongsLittleEndian(int rowId, int count, byte[] src, int srcIndex) {
int srcOffset = srcIndex + Platform.BYTE_ARRAY_OFFSET;
for (int i = 0; i < count; ++i) {
longData[i + rowId] = Platform.getLong(src, srcOffset);
@@ -259,7 +259,7 @@ public final class OnHeapColumnVector extends ColumnVector {
}
@Override
- public final long getLong(int rowId) {
+ public long getLong(int rowId) {
if (dictionary == null) {
return longData[rowId];
} else {
@@ -272,26 +272,26 @@ public final class OnHeapColumnVector extends ColumnVector {
//
@Override
- public final void putFloat(int rowId, float value) { floatData[rowId] = value; }
+ public void putFloat(int rowId, float value) { floatData[rowId] = value; }
@Override
- public final void putFloats(int rowId, int count, float value) {
+ public void putFloats(int rowId, int count, float value) {
Arrays.fill(floatData, rowId, rowId + count, value);
}
@Override
- public final void putFloats(int rowId, int count, float[] src, int srcIndex) {
+ public void putFloats(int rowId, int count, float[] src, int srcIndex) {
System.arraycopy(src, srcIndex, floatData, rowId, count);
}
@Override
- public final void putFloats(int rowId, int count, byte[] src, int srcIndex) {
+ public void putFloats(int rowId, int count, byte[] src, int srcIndex) {
Platform.copyMemory(src, Platform.BYTE_ARRAY_OFFSET + srcIndex,
floatData, Platform.DOUBLE_ARRAY_OFFSET + rowId * 4, count * 4);
}
@Override
- public final float getFloat(int rowId) {
+ public float getFloat(int rowId) {
if (dictionary == null) {
return floatData[rowId];
} else {
@@ -304,28 +304,28 @@ public final class OnHeapColumnVector extends ColumnVector {
//
@Override
- public final void putDouble(int rowId, double value) {
+ public void putDouble(int rowId, double value) {
doubleData[rowId] = value;
}
@Override
- public final void putDoubles(int rowId, int count, double value) {
+ public void putDoubles(int rowId, int count, double value) {
Arrays.fill(doubleData, rowId, rowId + count, value);
}
@Override
- public final void putDoubles(int rowId, int count, double[] src, int srcIndex) {
+ public void putDoubles(int rowId, int count, double[] src, int srcIndex) {
System.arraycopy(src, srcIndex, doubleData, rowId, count);
}
@Override
- public final void putDoubles(int rowId, int count, byte[] src, int srcIndex) {
+ public void putDoubles(int rowId, int count, byte[] src, int srcIndex) {
Platform.copyMemory(src, Platform.BYTE_ARRAY_OFFSET + srcIndex, doubleData,
Platform.DOUBLE_ARRAY_OFFSET + rowId * 8, count * 8);
}
@Override
- public final double getDouble(int rowId) {
+ public double getDouble(int rowId) {
if (dictionary == null) {
return doubleData[rowId];
} else {
@@ -338,22 +338,22 @@ public final class OnHeapColumnVector extends ColumnVector {
//
@Override
- public final int getArrayLength(int rowId) {
+ public int getArrayLength(int rowId) {
return arrayLengths[rowId];
}
@Override
- public final int getArrayOffset(int rowId) {
+ public int getArrayOffset(int rowId) {
return arrayOffsets[rowId];
}
@Override
- public final void putArray(int rowId, int offset, int length) {
+ public void putArray(int rowId, int offset, int length) {
arrayOffsets[rowId] = offset;
arrayLengths[rowId] = length;
}
@Override
- public final void loadBytes(ColumnVector.Array array) {
+ public void loadBytes(ColumnVector.Array array) {
array.byteArray = byteData;
array.byteArrayOffset = array.offset;
}
@@ -363,7 +363,7 @@ public final class OnHeapColumnVector extends ColumnVector {
//
@Override
- public final int putByteArray(int rowId, byte[] value, int offset, int length) {
+ public int putByteArray(int rowId, byte[] value, int offset, int length) {
int result = arrayData().appendBytes(length, value, offset);
arrayOffsets[rowId] = result;
arrayLengths[rowId] = length;
@@ -371,12 +371,12 @@ public final class OnHeapColumnVector extends ColumnVector {
}
@Override
- public final void reserve(int requiredCapacity) {
+ public void reserve(int requiredCapacity) {
if (requiredCapacity > capacity) reserveInternal(requiredCapacity * 2);
}
// Spilt this function out since it is the slow path.
- private final void reserveInternal(int newCapacity) {
+ private void reserveInternal(int newCapacity) {
if (this.resultArray != null || DecimalType.isByteArrayDecimalType(type)) {
int[] newLengths = new int[newCapacity];
int[] newOffsets = new int[newCapacity];
diff --git a/sql/core/src/test/java/test/org/apache/spark/sql/JavaApplySchemaSuite.java b/sql/core/src/test/java/test/org/apache/spark/sql/JavaApplySchemaSuite.java
index ae9c8cc1ba..189cc3972c 100644
--- a/sql/core/src/test/java/test/org/apache/spark/sql/JavaApplySchemaSuite.java
+++ b/sql/core/src/test/java/test/org/apache/spark/sql/JavaApplySchemaSuite.java
@@ -145,12 +145,13 @@ public class JavaApplySchemaSuite implements Serializable {
Dataset<Row> df = sqlContext.createDataFrame(rowRDD, schema);
df.registerTempTable("people");
- List<String> actual = sqlContext.sql("SELECT * FROM people").toJavaRDD().map(new Function<Row, String>() {
- @Override
- public String call(Row row) {
- return row.getString(0) + "_" + row.get(1);
- }
- }).collect();
+ List<String> actual = sqlContext.sql("SELECT * FROM people").toJavaRDD()
+ .map(new Function<Row, String>() {
+ @Override
+ public String call(Row row) {
+ return row.getString(0) + "_" + row.get(1);
+ }
+ }).collect();
List<String> expected = new ArrayList<>(2);
expected.add("Michael_29");
diff --git a/sql/core/src/test/java/test/org/apache/spark/sql/JavaDataFrameSuite.java b/sql/core/src/test/java/test/org/apache/spark/sql/JavaDataFrameSuite.java
index f3c5a86e20..cf764c645f 100644
--- a/sql/core/src/test/java/test/org/apache/spark/sql/JavaDataFrameSuite.java
+++ b/sql/core/src/test/java/test/org/apache/spark/sql/JavaDataFrameSuite.java
@@ -220,7 +220,8 @@ public class JavaDataFrameSuite {
StructType schema1 = StructType$.MODULE$.apply(fields1);
Assert.assertEquals(0, schema1.fieldIndex("id"));
- List<StructField> fields2 = Arrays.asList(new StructField("id", DataTypes.StringType, true, Metadata.empty()));
+ List<StructField> fields2 =
+ Arrays.asList(new StructField("id", DataTypes.StringType, true, Metadata.empty()));
StructType schema2 = StructType$.MODULE$.apply(fields2);
Assert.assertEquals(0, schema2.fieldIndex("id"));
}