aboutsummaryrefslogtreecommitdiff
path: root/core/src/test/java
diff options
context:
space:
mode:
authorSean Owen <sowen@cloudera.com>2016-03-03 09:54:09 +0000
committerSean Owen <sowen@cloudera.com>2016-03-03 09:54:09 +0000
commite97fc7f176f8bf501c9b3afd8410014e3b0e1602 (patch)
tree23a11a3646b13195aaf50078a0f35fad96190618 /core/src/test/java
parent02b7677e9584f5ccd68869abdb0bf980dc847ce1 (diff)
downloadspark-e97fc7f176f8bf501c9b3afd8410014e3b0e1602.tar.gz
spark-e97fc7f176f8bf501c9b3afd8410014e3b0e1602.tar.bz2
spark-e97fc7f176f8bf501c9b3afd8410014e3b0e1602.zip
[SPARK-13423][WIP][CORE][SQL][STREAMING] Static analysis fixes for 2.x
## What changes were proposed in this pull request? Make some cross-cutting code improvements according to static analysis. These are individually up for discussion since they exist in separate commits that can be reverted. The changes are broadly: - Inner class should be static - Mismatched hashCode/equals - Overflow in compareTo - Unchecked warnings - Misuse of assert, vs junit.assert - get(a) + getOrElse(b) -> getOrElse(a,b) - Array/String .size -> .length (occasionally, -> .isEmpty / .nonEmpty) to avoid implicit conversions - Dead code - tailrec - exists(_ == ) -> contains find + nonEmpty -> exists filter + size -> count - reduce(_+_) -> sum map + flatten -> map The most controversial may be .size -> .length simply because of its size. It is intended to avoid implicits that might be expensive in some places. ## How was the this patch tested? Existing Jenkins unit tests. Author: Sean Owen <sowen@cloudera.com> Closes #11292 from srowen/SPARK-13423.
Diffstat (limited to 'core/src/test/java')
-rw-r--r--core/src/test/java/org/apache/spark/memory/TaskMemoryManagerSuite.java36
-rw-r--r--core/src/test/java/org/apache/spark/shuffle/sort/ShuffleInMemorySorterSuite.java2
-rw-r--r--core/src/test/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriterSuite.java6
-rw-r--r--core/src/test/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorterSuite.java22
-rw-r--r--core/src/test/java/org/apache/spark/util/collection/unsafe/sort/UnsafeInMemorySorterSuite.java3
5 files changed, 35 insertions, 34 deletions
diff --git a/core/src/test/java/org/apache/spark/memory/TaskMemoryManagerSuite.java b/core/src/test/java/org/apache/spark/memory/TaskMemoryManagerSuite.java
index 776a2997cf..127789b632 100644
--- a/core/src/test/java/org/apache/spark/memory/TaskMemoryManagerSuite.java
+++ b/core/src/test/java/org/apache/spark/memory/TaskMemoryManagerSuite.java
@@ -73,37 +73,37 @@ public class TaskMemoryManagerSuite {
TestMemoryConsumer c1 = new TestMemoryConsumer(manager);
TestMemoryConsumer c2 = new TestMemoryConsumer(manager);
c1.use(100);
- assert(c1.getUsed() == 100);
+ Assert.assertEquals(100, c1.getUsed());
c2.use(100);
- assert(c2.getUsed() == 100);
- assert(c1.getUsed() == 0); // spilled
+ Assert.assertEquals(100, c2.getUsed());
+ Assert.assertEquals(0, c1.getUsed()); // spilled
c1.use(100);
- assert(c1.getUsed() == 100);
- assert(c2.getUsed() == 0); // spilled
+ Assert.assertEquals(100, c1.getUsed());
+ Assert.assertEquals(0, c2.getUsed()); // spilled
c1.use(50);
- assert(c1.getUsed() == 50); // spilled
- assert(c2.getUsed() == 0);
+ Assert.assertEquals(50, c1.getUsed()); // spilled
+ Assert.assertEquals(0, c2.getUsed());
c2.use(50);
- assert(c1.getUsed() == 50);
- assert(c2.getUsed() == 50);
+ Assert.assertEquals(50, c1.getUsed());
+ Assert.assertEquals(50, c2.getUsed());
c1.use(100);
- assert(c1.getUsed() == 100);
- assert(c2.getUsed() == 0); // spilled
+ Assert.assertEquals(100, c1.getUsed());
+ Assert.assertEquals(0, c2.getUsed()); // spilled
c1.free(20);
- assert(c1.getUsed() == 80);
+ Assert.assertEquals(80, c1.getUsed());
c2.use(10);
- assert(c1.getUsed() == 80);
- assert(c2.getUsed() == 10);
+ Assert.assertEquals(80, c1.getUsed());
+ Assert.assertEquals(10, c2.getUsed());
c2.use(100);
- assert(c2.getUsed() == 100);
- assert(c1.getUsed() == 0); // spilled
+ Assert.assertEquals(100, c2.getUsed());
+ Assert.assertEquals(0, c1.getUsed()); // spilled
c1.free(0);
c2.free(100);
- assert(manager.cleanUpAllAllocatedMemory() == 0);
+ Assert.assertEquals(0, manager.cleanUpAllAllocatedMemory());
}
@Test
@@ -114,7 +114,7 @@ public class TaskMemoryManagerSuite {
.set("spark.unsafe.offHeap", "true")
.set("spark.memory.offHeap.size", "1000");
final TaskMemoryManager manager = new TaskMemoryManager(new TestMemoryManager(conf), 0);
- assert(manager.tungstenMemoryMode == MemoryMode.OFF_HEAP);
+ Assert.assertSame(MemoryMode.OFF_HEAP, manager.tungstenMemoryMode);
}
}
diff --git a/core/src/test/java/org/apache/spark/shuffle/sort/ShuffleInMemorySorterSuite.java b/core/src/test/java/org/apache/spark/shuffle/sort/ShuffleInMemorySorterSuite.java
index eb1da8e1b4..b4fa33f32a 100644
--- a/core/src/test/java/org/apache/spark/shuffle/sort/ShuffleInMemorySorterSuite.java
+++ b/core/src/test/java/org/apache/spark/shuffle/sort/ShuffleInMemorySorterSuite.java
@@ -48,7 +48,7 @@ public class ShuffleInMemorySorterSuite {
public void testSortingEmptyInput() {
final ShuffleInMemorySorter sorter = new ShuffleInMemorySorter(consumer, 100);
final ShuffleInMemorySorter.ShuffleSorterIterator iter = sorter.getSortedIterator();
- assert(!iter.hasNext());
+ Assert.assertFalse(iter.hasNext());
}
@Test
diff --git a/core/src/test/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriterSuite.java b/core/src/test/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriterSuite.java
index 876c3a2283..add9d937d3 100644
--- a/core/src/test/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriterSuite.java
+++ b/core/src/test/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriterSuite.java
@@ -139,7 +139,7 @@ public class UnsafeShuffleWriterSuite {
new Answer<InputStream>() {
@Override
public InputStream answer(InvocationOnMock invocation) throws Throwable {
- assert (invocation.getArguments()[0] instanceof TempShuffleBlockId);
+ assertTrue(invocation.getArguments()[0] instanceof TempShuffleBlockId);
InputStream is = (InputStream) invocation.getArguments()[1];
if (conf.getBoolean("spark.shuffle.compress", true)) {
return CompressionCodec$.MODULE$.createCodec(conf).compressedInputStream(is);
@@ -154,7 +154,7 @@ public class UnsafeShuffleWriterSuite {
new Answer<OutputStream>() {
@Override
public OutputStream answer(InvocationOnMock invocation) throws Throwable {
- assert (invocation.getArguments()[0] instanceof TempShuffleBlockId);
+ assertTrue(invocation.getArguments()[0] instanceof TempShuffleBlockId);
OutputStream os = (OutputStream) invocation.getArguments()[1];
if (conf.getBoolean("spark.shuffle.compress", true)) {
return CompressionCodec$.MODULE$.createCodec(conf).compressedOutputStream(os);
@@ -252,7 +252,7 @@ public class UnsafeShuffleWriterSuite {
createWriter(false).stop(false);
}
- class PandaException extends RuntimeException {
+ static class PandaException extends RuntimeException {
}
@Test(expected=PandaException.class)
diff --git a/core/src/test/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorterSuite.java b/core/src/test/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorterSuite.java
index 32f5a1a7e6..492fe49ba4 100644
--- a/core/src/test/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorterSuite.java
+++ b/core/src/test/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorterSuite.java
@@ -323,23 +323,23 @@ public class UnsafeExternalSorterSuite {
record[0] = (long) i;
sorter.insertRecord(record, Platform.LONG_ARRAY_OFFSET, recordSize, 0);
}
- assert(sorter.getNumberOfAllocatedPages() >= 2);
+ assertTrue(sorter.getNumberOfAllocatedPages() >= 2);
UnsafeExternalSorter.SpillableIterator iter =
(UnsafeExternalSorter.SpillableIterator) sorter.getSortedIterator();
int lastv = 0;
for (int i = 0; i < n / 3; i++) {
iter.hasNext();
iter.loadNext();
- assert(Platform.getLong(iter.getBaseObject(), iter.getBaseOffset()) == i);
+ assertTrue(Platform.getLong(iter.getBaseObject(), iter.getBaseOffset()) == i);
lastv = i;
}
- assert(iter.spill() > 0);
- assert(iter.spill() == 0);
- assert(Platform.getLong(iter.getBaseObject(), iter.getBaseOffset()) == lastv);
+ assertTrue(iter.spill() > 0);
+ assertEquals(0, iter.spill());
+ assertTrue(Platform.getLong(iter.getBaseObject(), iter.getBaseOffset()) == lastv);
for (int i = n / 3; i < n; i++) {
iter.hasNext();
iter.loadNext();
- assert(Platform.getLong(iter.getBaseObject(), iter.getBaseOffset()) == i);
+ assertEquals(i, Platform.getLong(iter.getBaseObject(), iter.getBaseOffset()));
}
sorter.cleanupResources();
assertSpillFilesWereCleanedUp();
@@ -355,15 +355,15 @@ public class UnsafeExternalSorterSuite {
record[0] = (long) i;
sorter.insertRecord(record, Platform.LONG_ARRAY_OFFSET, recordSize, 0);
}
- assert(sorter.getNumberOfAllocatedPages() >= 2);
+ assertTrue(sorter.getNumberOfAllocatedPages() >= 2);
UnsafeExternalSorter.SpillableIterator iter =
(UnsafeExternalSorter.SpillableIterator) sorter.getSortedIterator();
- assert(iter.spill() > 0);
- assert(iter.spill() == 0);
+ assertTrue(iter.spill() > 0);
+ assertEquals(0, iter.spill());
for (int i = 0; i < n; i++) {
iter.hasNext();
iter.loadNext();
- assert(Platform.getLong(iter.getBaseObject(), iter.getBaseOffset()) == i);
+ assertEquals(i, Platform.getLong(iter.getBaseObject(), iter.getBaseOffset()));
}
sorter.cleanupResources();
assertSpillFilesWereCleanedUp();
@@ -394,7 +394,7 @@ public class UnsafeExternalSorterSuite {
for (int i = 0; i < n; i++) {
iter.hasNext();
iter.loadNext();
- assert(Platform.getLong(iter.getBaseObject(), iter.getBaseOffset()) == i);
+ assertEquals(i, Platform.getLong(iter.getBaseObject(), iter.getBaseOffset()));
}
sorter.cleanupResources();
assertSpillFilesWereCleanedUp();
diff --git a/core/src/test/java/org/apache/spark/util/collection/unsafe/sort/UnsafeInMemorySorterSuite.java b/core/src/test/java/org/apache/spark/util/collection/unsafe/sort/UnsafeInMemorySorterSuite.java
index 8e557ec0ab..ff41768df1 100644
--- a/core/src/test/java/org/apache/spark/util/collection/unsafe/sort/UnsafeInMemorySorterSuite.java
+++ b/core/src/test/java/org/apache/spark/util/collection/unsafe/sort/UnsafeInMemorySorterSuite.java
@@ -19,6 +19,7 @@ package org.apache.spark.util.collection.unsafe.sort;
import java.util.Arrays;
+import org.junit.Assert;
import org.junit.Test;
import org.apache.spark.HashPartitioner;
@@ -54,7 +55,7 @@ public class UnsafeInMemorySorterSuite {
mock(PrefixComparator.class),
100);
final UnsafeSorterIterator iter = sorter.getSortedIterator();
- assert(!iter.hasNext());
+ Assert.assertFalse(iter.hasNext());
}
@Test