aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorDongjoon Hyun <dongjoon@apache.org>2016-03-09 10:31:26 +0000
committerSean Owen <sowen@cloudera.com>2016-03-09 10:31:26 +0000
commitc3689bc24e03a9471cd6e8169da61963c4528252 (patch)
tree5d1ee90afa2087ede8e4dbc4dd666d699578c230 /core
parentcbff2803ef117d7cffe6f05fc1bbd395a1e9c587 (diff)
downloadspark-c3689bc24e03a9471cd6e8169da61963c4528252.tar.gz
spark-c3689bc24e03a9471cd6e8169da61963c4528252.tar.bz2
spark-c3689bc24e03a9471cd6e8169da61963c4528252.zip
[SPARK-13702][CORE][SQL][MLLIB] Use diamond operator for generic instance creation in Java code.
## What changes were proposed in this pull request? In order to make `docs/examples` (and other related code) more simple/readable/user-friendly, this PR replaces existing codes like the followings by using `diamond` operator. ``` - final ArrayList<Product2<Object, Object>> dataToWrite = - new ArrayList<Product2<Object, Object>>(); + final ArrayList<Product2<Object, Object>> dataToWrite = new ArrayList<>(); ``` Java 7 or higher supports **diamond** operator which replaces the type arguments required to invoke the constructor of a generic class with an empty set of type parameters (<>). Currently, Spark Java code use mixed usage of this. ## How was this patch tested? Manual. Pass the existing tests. Author: Dongjoon Hyun <dongjoon@apache.org> Closes #11541 from dongjoon-hyun/SPARK-13702.
Diffstat (limited to 'core')
-rw-r--r--core/src/main/java/org/apache/spark/shuffle/sort/ShuffleExternalSorter.java4
-rw-r--r--core/src/main/java/org/apache/spark/status/api/v1/TaskSorting.java2
-rw-r--r--core/src/test/java/org/apache/spark/launcher/SparkLauncherSuite.java2
-rw-r--r--core/src/test/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriterSuite.java19
-rw-r--r--core/src/test/java/org/apache/spark/unsafe/map/AbstractBytesToBytesMapSuite.java6
-rw-r--r--core/src/test/java/org/apache/spark/util/collection/TestTimSort.java2
-rw-r--r--core/src/test/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorterSuite.java2
7 files changed, 17 insertions, 20 deletions
diff --git a/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleExternalSorter.java b/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleExternalSorter.java
index f97e76d7ed..7a114df2d6 100644
--- a/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleExternalSorter.java
+++ b/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleExternalSorter.java
@@ -84,9 +84,9 @@ final class ShuffleExternalSorter extends MemoryConsumer {
* this might not be necessary if we maintained a pool of re-usable pages in the TaskMemoryManager
* itself).
*/
- private final LinkedList<MemoryBlock> allocatedPages = new LinkedList<MemoryBlock>();
+ private final LinkedList<MemoryBlock> allocatedPages = new LinkedList<>();
- private final LinkedList<SpillInfo> spills = new LinkedList<SpillInfo>();
+ private final LinkedList<SpillInfo> spills = new LinkedList<>();
/** Peak memory used by this sorter so far, in bytes. **/
private long peakMemoryUsedBytes;
diff --git a/core/src/main/java/org/apache/spark/status/api/v1/TaskSorting.java b/core/src/main/java/org/apache/spark/status/api/v1/TaskSorting.java
index f19ed01d5a..0cf84d5f9b 100644
--- a/core/src/main/java/org/apache/spark/status/api/v1/TaskSorting.java
+++ b/core/src/main/java/org/apache/spark/status/api/v1/TaskSorting.java
@@ -29,7 +29,7 @@ public enum TaskSorting {
private final Set<String> alternateNames;
private TaskSorting(String... names) {
- alternateNames = new HashSet<String>();
+ alternateNames = new HashSet<>();
for (String n: names) {
alternateNames.add(n);
}
diff --git a/core/src/test/java/org/apache/spark/launcher/SparkLauncherSuite.java b/core/src/test/java/org/apache/spark/launcher/SparkLauncherSuite.java
index 1692df7d30..3e47bfc274 100644
--- a/core/src/test/java/org/apache/spark/launcher/SparkLauncherSuite.java
+++ b/core/src/test/java/org/apache/spark/launcher/SparkLauncherSuite.java
@@ -88,7 +88,7 @@ public class SparkLauncherSuite {
@Test
public void testChildProcLauncher() throws Exception {
SparkSubmitOptionParser opts = new SparkSubmitOptionParser();
- Map<String, String> env = new HashMap<String, String>();
+ Map<String, String> env = new HashMap<>();
env.put("SPARK_PRINT_LAUNCH_COMMAND", "1");
SparkLauncher launcher = new SparkLauncher(env)
diff --git a/core/src/test/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriterSuite.java b/core/src/test/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriterSuite.java
index add9d937d3..ddea6f5a69 100644
--- a/core/src/test/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriterSuite.java
+++ b/core/src/test/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriterSuite.java
@@ -67,7 +67,7 @@ public class UnsafeShuffleWriterSuite {
File mergedOutputFile;
File tempDir;
long[] partitionSizesInMergedFile;
- final LinkedList<File> spillFilesCreated = new LinkedList<File>();
+ final LinkedList<File> spillFilesCreated = new LinkedList<>();
SparkConf conf;
final Serializer serializer = new KryoSerializer(new SparkConf());
TaskMetrics taskMetrics;
@@ -217,7 +217,7 @@ public class UnsafeShuffleWriterSuite {
}
private List<Tuple2<Object, Object>> readRecordsFromFile() throws IOException {
- final ArrayList<Tuple2<Object, Object>> recordsList = new ArrayList<Tuple2<Object, Object>>();
+ final ArrayList<Tuple2<Object, Object>> recordsList = new ArrayList<>();
long startOffset = 0;
for (int i = 0; i < NUM_PARTITITONS; i++) {
final long partitionSize = partitionSizesInMergedFile[i];
@@ -286,8 +286,7 @@ public class UnsafeShuffleWriterSuite {
@Test
public void writeWithoutSpilling() throws Exception {
// In this example, each partition should have exactly one record:
- final ArrayList<Product2<Object, Object>> dataToWrite =
- new ArrayList<Product2<Object, Object>>();
+ final ArrayList<Product2<Object, Object>> dataToWrite = new ArrayList<>();
for (int i = 0; i < NUM_PARTITITONS; i++) {
dataToWrite.add(new Tuple2<Object, Object>(i, i));
}
@@ -325,8 +324,7 @@ public class UnsafeShuffleWriterSuite {
conf.set("spark.shuffle.compress", "false");
}
final UnsafeShuffleWriter<Object, Object> writer = createWriter(transferToEnabled);
- final ArrayList<Product2<Object, Object>> dataToWrite =
- new ArrayList<Product2<Object, Object>>();
+ final ArrayList<Product2<Object, Object>> dataToWrite = new ArrayList<>();
for (int i : new int[] { 1, 2, 3, 4, 4, 2 }) {
dataToWrite.add(new Tuple2<Object, Object>(i, i));
}
@@ -403,7 +401,7 @@ public class UnsafeShuffleWriterSuite {
public void writeEnoughDataToTriggerSpill() throws Exception {
memoryManager.limit(PackedRecordPointer.MAXIMUM_PAGE_SIZE_BYTES);
final UnsafeShuffleWriter<Object, Object> writer = createWriter(false);
- final ArrayList<Product2<Object, Object>> dataToWrite = new ArrayList<Product2<Object, Object>>();
+ final ArrayList<Product2<Object, Object>> dataToWrite = new ArrayList<>();
final byte[] bigByteArray = new byte[PackedRecordPointer.MAXIMUM_PAGE_SIZE_BYTES / 10];
for (int i = 0; i < 10 + 1; i++) {
dataToWrite.add(new Tuple2<Object, Object>(i, bigByteArray));
@@ -445,8 +443,7 @@ public class UnsafeShuffleWriterSuite {
@Test
public void writeRecordsThatAreBiggerThanDiskWriteBufferSize() throws Exception {
final UnsafeShuffleWriter<Object, Object> writer = createWriter(false);
- final ArrayList<Product2<Object, Object>> dataToWrite =
- new ArrayList<Product2<Object, Object>>();
+ final ArrayList<Product2<Object, Object>> dataToWrite = new ArrayList<>();
final byte[] bytes = new byte[(int) (ShuffleExternalSorter.DISK_WRITE_BUFFER_SIZE * 2.5)];
new Random(42).nextBytes(bytes);
dataToWrite.add(new Tuple2<Object, Object>(1, ByteBuffer.wrap(bytes)));
@@ -461,7 +458,7 @@ public class UnsafeShuffleWriterSuite {
@Test
public void writeRecordsThatAreBiggerThanMaxRecordSize() throws Exception {
final UnsafeShuffleWriter<Object, Object> writer = createWriter(false);
- final ArrayList<Product2<Object, Object>> dataToWrite = new ArrayList<Product2<Object, Object>>();
+ final ArrayList<Product2<Object, Object>> dataToWrite = new ArrayList<>();
dataToWrite.add(new Tuple2<Object, Object>(1, ByteBuffer.wrap(new byte[1])));
// We should be able to write a record that's right _at_ the max record size
final byte[] atMaxRecordSize = new byte[(int) taskMemoryManager.pageSizeBytes() - 4];
@@ -498,7 +495,7 @@ public class UnsafeShuffleWriterSuite {
taskMemoryManager = spy(taskMemoryManager);
when(taskMemoryManager.pageSizeBytes()).thenReturn(pageSizeBytes);
final UnsafeShuffleWriter<Object, Object> writer =
- new UnsafeShuffleWriter<Object, Object>(
+ new UnsafeShuffleWriter<>(
blockManager,
shuffleBlockResolver,
taskMemoryManager,
diff --git a/core/src/test/java/org/apache/spark/unsafe/map/AbstractBytesToBytesMapSuite.java b/core/src/test/java/org/apache/spark/unsafe/map/AbstractBytesToBytesMapSuite.java
index 61b94b736d..9aab2265c9 100644
--- a/core/src/test/java/org/apache/spark/unsafe/map/AbstractBytesToBytesMapSuite.java
+++ b/core/src/test/java/org/apache/spark/unsafe/map/AbstractBytesToBytesMapSuite.java
@@ -66,7 +66,7 @@ public abstract class AbstractBytesToBytesMapSuite {
private TaskMemoryManager taskMemoryManager;
private static final long PAGE_SIZE_BYTES = 1L << 26; // 64 megabytes
- final LinkedList<File> spillFilesCreated = new LinkedList<File>();
+ final LinkedList<File> spillFilesCreated = new LinkedList<>();
File tempDir;
@Mock(answer = RETURNS_SMART_NULLS) BlockManager blockManager;
@@ -397,7 +397,7 @@ public abstract class AbstractBytesToBytesMapSuite {
final int size = 65536;
// Java arrays' hashCodes() aren't based on the arrays' contents, so we need to wrap arrays
// into ByteBuffers in order to use them as keys here.
- final Map<ByteBuffer, byte[]> expected = new HashMap<ByteBuffer, byte[]>();
+ final Map<ByteBuffer, byte[]> expected = new HashMap<>();
final BytesToBytesMap map = new BytesToBytesMap(taskMemoryManager, size, PAGE_SIZE_BYTES);
try {
// Fill the map to 90% full so that we can trigger probing
@@ -453,7 +453,7 @@ public abstract class AbstractBytesToBytesMapSuite {
final BytesToBytesMap map = new BytesToBytesMap(taskMemoryManager, 64, pageSizeBytes);
// Java arrays' hashCodes() aren't based on the arrays' contents, so we need to wrap arrays
// into ByteBuffers in order to use them as keys here.
- final Map<ByteBuffer, byte[]> expected = new HashMap<ByteBuffer, byte[]>();
+ final Map<ByteBuffer, byte[]> expected = new HashMap<>();
try {
for (int i = 0; i < 1000; i++) {
final byte[] key = getRandomByteArray(rand.nextInt(128));
diff --git a/core/src/test/java/org/apache/spark/util/collection/TestTimSort.java b/core/src/test/java/org/apache/spark/util/collection/TestTimSort.java
index 45772b6d3c..e884b1bc12 100644
--- a/core/src/test/java/org/apache/spark/util/collection/TestTimSort.java
+++ b/core/src/test/java/org/apache/spark/util/collection/TestTimSort.java
@@ -76,7 +76,7 @@ public class TestTimSort {
* @param length The sum of all run lengths that will be added to <code>runs</code>.
*/
private static List<Long> runsJDKWorstCase(int minRun, int length) {
- List<Long> runs = new ArrayList<Long>();
+ List<Long> runs = new ArrayList<>();
long runningTotal = 0, Y = minRun + 4, X = minRun;
diff --git a/core/src/test/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorterSuite.java b/core/src/test/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorterSuite.java
index 492fe49ba4..b757ddc3b3 100644
--- a/core/src/test/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorterSuite.java
+++ b/core/src/test/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorterSuite.java
@@ -56,7 +56,7 @@ import static org.mockito.Mockito.*;
public class UnsafeExternalSorterSuite {
- final LinkedList<File> spillFilesCreated = new LinkedList<File>();
+ final LinkedList<File> spillFilesCreated = new LinkedList<>();
final TestMemoryManager memoryManager =
new TestMemoryManager(new SparkConf().set("spark.memory.offHeap.enabled", "false"));
final TaskMemoryManager taskMemoryManager = new TaskMemoryManager(memoryManager, 0);