aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorReynold Xin <rxin@databricks.com>2015-08-11 08:41:06 -0700
committerDavies Liu <davies.liu@gmail.com>2015-08-11 08:41:06 -0700
commitd378396f86f625f006738d87fe5dbc2ff8fd913d (patch)
tree488e09f59d69d52f776b7587c9c0afead051b541 /core
parent600031ebe27473d8fffe6ea436c2149223b82896 (diff)
downloadspark-d378396f86f625f006738d87fe5dbc2ff8fd913d.tar.gz
spark-d378396f86f625f006738d87fe5dbc2ff8fd913d.tar.bz2
spark-d378396f86f625f006738d87fe5dbc2ff8fd913d.zip
[SPARK-9815] Rename PlatformDependent.UNSAFE -> Platform.
PlatformDependent.UNSAFE is way too verbose. Author: Reynold Xin <rxin@databricks.com> Closes #8094 from rxin/SPARK-9815 and squashes the following commits: 229b603 [Reynold Xin] [SPARK-9815] Rename PlatformDependent.UNSAFE -> Platform.
Diffstat (limited to 'core')
-rw-r--r--core/src/main/java/org/apache/spark/serializer/DummySerializerInstance.java6
-rw-r--r--core/src/main/java/org/apache/spark/shuffle/unsafe/UnsafeShuffleExternalSorter.java22
-rw-r--r--core/src/main/java/org/apache/spark/shuffle/unsafe/UnsafeShuffleWriter.java4
-rw-r--r--core/src/main/java/org/apache/spark/unsafe/map/BytesToBytesMap.java20
-rw-r--r--core/src/main/java/org/apache/spark/util/collection/unsafe/sort/PrefixComparators.java5
-rw-r--r--core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java22
-rw-r--r--core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeInMemorySorter.java4
-rw-r--r--core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSorterSpillReader.java4
-rw-r--r--core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSorterSpillWriter.java6
-rw-r--r--core/src/test/java/org/apache/spark/shuffle/unsafe/UnsafeShuffleInMemorySorterSuite.java20
-rw-r--r--core/src/test/java/org/apache/spark/unsafe/map/AbstractBytesToBytesMapSuite.java94
-rw-r--r--core/src/test/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorterSuite.java20
-rw-r--r--core/src/test/java/org/apache/spark/util/collection/unsafe/sort/UnsafeInMemorySorterSuite.java20
13 files changed, 105 insertions, 142 deletions
diff --git a/core/src/main/java/org/apache/spark/serializer/DummySerializerInstance.java b/core/src/main/java/org/apache/spark/serializer/DummySerializerInstance.java
index 0399abc63c..0e58bb4f71 100644
--- a/core/src/main/java/org/apache/spark/serializer/DummySerializerInstance.java
+++ b/core/src/main/java/org/apache/spark/serializer/DummySerializerInstance.java
@@ -25,7 +25,7 @@ import java.nio.ByteBuffer;
import scala.reflect.ClassTag;
import org.apache.spark.annotation.Private;
-import org.apache.spark.unsafe.PlatformDependent;
+import org.apache.spark.unsafe.Platform;
/**
* Unfortunately, we need a serializer instance in order to construct a DiskBlockObjectWriter.
@@ -49,7 +49,7 @@ public final class DummySerializerInstance extends SerializerInstance {
try {
s.flush();
} catch (IOException e) {
- PlatformDependent.throwException(e);
+ Platform.throwException(e);
}
}
@@ -64,7 +64,7 @@ public final class DummySerializerInstance extends SerializerInstance {
try {
s.close();
} catch (IOException e) {
- PlatformDependent.throwException(e);
+ Platform.throwException(e);
}
}
};
diff --git a/core/src/main/java/org/apache/spark/shuffle/unsafe/UnsafeShuffleExternalSorter.java b/core/src/main/java/org/apache/spark/shuffle/unsafe/UnsafeShuffleExternalSorter.java
index 925b60a145..3d1ef0c48a 100644
--- a/core/src/main/java/org/apache/spark/shuffle/unsafe/UnsafeShuffleExternalSorter.java
+++ b/core/src/main/java/org/apache/spark/shuffle/unsafe/UnsafeShuffleExternalSorter.java
@@ -37,7 +37,7 @@ import org.apache.spark.shuffle.ShuffleMemoryManager;
import org.apache.spark.storage.BlockManager;
import org.apache.spark.storage.DiskBlockObjectWriter;
import org.apache.spark.storage.TempShuffleBlockId;
-import org.apache.spark.unsafe.PlatformDependent;
+import org.apache.spark.unsafe.Platform;
import org.apache.spark.unsafe.array.ByteArrayMethods;
import org.apache.spark.unsafe.memory.MemoryBlock;
import org.apache.spark.unsafe.memory.TaskMemoryManager;
@@ -211,16 +211,12 @@ final class UnsafeShuffleExternalSorter {
final long recordPointer = sortedRecords.packedRecordPointer.getRecordPointer();
final Object recordPage = taskMemoryManager.getPage(recordPointer);
final long recordOffsetInPage = taskMemoryManager.getOffsetInPage(recordPointer);
- int dataRemaining = PlatformDependent.UNSAFE.getInt(recordPage, recordOffsetInPage);
+ int dataRemaining = Platform.getInt(recordPage, recordOffsetInPage);
long recordReadPosition = recordOffsetInPage + 4; // skip over record length
while (dataRemaining > 0) {
final int toTransfer = Math.min(DISK_WRITE_BUFFER_SIZE, dataRemaining);
- PlatformDependent.copyMemory(
- recordPage,
- recordReadPosition,
- writeBuffer,
- PlatformDependent.BYTE_ARRAY_OFFSET,
- toTransfer);
+ Platform.copyMemory(
+ recordPage, recordReadPosition, writeBuffer, Platform.BYTE_ARRAY_OFFSET, toTransfer);
writer.write(writeBuffer, 0, toTransfer);
recordReadPosition += toTransfer;
dataRemaining -= toTransfer;
@@ -447,14 +443,10 @@ final class UnsafeShuffleExternalSorter {
final long recordAddress =
taskMemoryManager.encodePageNumberAndOffset(dataPage, dataPagePosition);
- PlatformDependent.UNSAFE.putInt(dataPageBaseObject, dataPagePosition, lengthInBytes);
+ Platform.putInt(dataPageBaseObject, dataPagePosition, lengthInBytes);
dataPagePosition += 4;
- PlatformDependent.copyMemory(
- recordBaseObject,
- recordBaseOffset,
- dataPageBaseObject,
- dataPagePosition,
- lengthInBytes);
+ Platform.copyMemory(
+ recordBaseObject, recordBaseOffset, dataPageBaseObject, dataPagePosition, lengthInBytes);
assert(inMemSorter != null);
inMemSorter.insertRecord(recordAddress, partitionId);
}
diff --git a/core/src/main/java/org/apache/spark/shuffle/unsafe/UnsafeShuffleWriter.java b/core/src/main/java/org/apache/spark/shuffle/unsafe/UnsafeShuffleWriter.java
index 02084f9122..2389c28b28 100644
--- a/core/src/main/java/org/apache/spark/shuffle/unsafe/UnsafeShuffleWriter.java
+++ b/core/src/main/java/org/apache/spark/shuffle/unsafe/UnsafeShuffleWriter.java
@@ -53,7 +53,7 @@ import org.apache.spark.shuffle.ShuffleMemoryManager;
import org.apache.spark.shuffle.ShuffleWriter;
import org.apache.spark.storage.BlockManager;
import org.apache.spark.storage.TimeTrackingOutputStream;
-import org.apache.spark.unsafe.PlatformDependent;
+import org.apache.spark.unsafe.Platform;
import org.apache.spark.unsafe.memory.TaskMemoryManager;
@Private
@@ -244,7 +244,7 @@ public class UnsafeShuffleWriter<K, V> extends ShuffleWriter<K, V> {
assert (serializedRecordSize > 0);
sorter.insertRecord(
- serBuffer.getBuf(), PlatformDependent.BYTE_ARRAY_OFFSET, serializedRecordSize, partitionId);
+ serBuffer.getBuf(), Platform.BYTE_ARRAY_OFFSET, serializedRecordSize, partitionId);
}
@VisibleForTesting
diff --git a/core/src/main/java/org/apache/spark/unsafe/map/BytesToBytesMap.java b/core/src/main/java/org/apache/spark/unsafe/map/BytesToBytesMap.java
index 7f79cd13aa..85b46ec8bf 100644
--- a/core/src/main/java/org/apache/spark/unsafe/map/BytesToBytesMap.java
+++ b/core/src/main/java/org/apache/spark/unsafe/map/BytesToBytesMap.java
@@ -270,10 +270,10 @@ public final class BytesToBytesMap {
@Override
public Location next() {
- int totalLength = PlatformDependent.UNSAFE.getInt(pageBaseObject, offsetInPage);
+ int totalLength = Platform.getInt(pageBaseObject, offsetInPage);
if (totalLength == END_OF_PAGE_MARKER) {
advanceToNextPage();
- totalLength = PlatformDependent.UNSAFE.getInt(pageBaseObject, offsetInPage);
+ totalLength = Platform.getInt(pageBaseObject, offsetInPage);
}
loc.with(currentPage, offsetInPage);
offsetInPage += 4 + totalLength;
@@ -402,9 +402,9 @@ public final class BytesToBytesMap {
private void updateAddressesAndSizes(final Object page, final long offsetInPage) {
long position = offsetInPage;
- final int totalLength = PlatformDependent.UNSAFE.getInt(page, position);
+ final int totalLength = Platform.getInt(page, position);
position += 4;
- keyLength = PlatformDependent.UNSAFE.getInt(page, position);
+ keyLength = Platform.getInt(page, position);
position += 4;
valueLength = totalLength - keyLength - 4;
@@ -572,7 +572,7 @@ public final class BytesToBytesMap {
// There wasn't enough space in the current page, so write an end-of-page marker:
final Object pageBaseObject = currentDataPage.getBaseObject();
final long lengthOffsetInPage = currentDataPage.getBaseOffset() + pageCursor;
- PlatformDependent.UNSAFE.putInt(pageBaseObject, lengthOffsetInPage, END_OF_PAGE_MARKER);
+ Platform.putInt(pageBaseObject, lengthOffsetInPage, END_OF_PAGE_MARKER);
}
final long memoryGranted = shuffleMemoryManager.tryToAcquire(pageSizeBytes);
if (memoryGranted != pageSizeBytes) {
@@ -608,21 +608,21 @@ public final class BytesToBytesMap {
final long valueDataOffsetInPage = insertCursor;
insertCursor += valueLengthBytes; // word used to store the value size
- PlatformDependent.UNSAFE.putInt(dataPageBaseObject, recordOffset,
+ Platform.putInt(dataPageBaseObject, recordOffset,
keyLengthBytes + valueLengthBytes + 4);
- PlatformDependent.UNSAFE.putInt(dataPageBaseObject, keyLengthOffset, keyLengthBytes);
+ Platform.putInt(dataPageBaseObject, keyLengthOffset, keyLengthBytes);
// Copy the key
- PlatformDependent.copyMemory(
+ Platform.copyMemory(
keyBaseObject, keyBaseOffset, dataPageBaseObject, keyDataOffsetInPage, keyLengthBytes);
// Copy the value
- PlatformDependent.copyMemory(valueBaseObject, valueBaseOffset, dataPageBaseObject,
+ Platform.copyMemory(valueBaseObject, valueBaseOffset, dataPageBaseObject,
valueDataOffsetInPage, valueLengthBytes);
// --- Update bookeeping data structures -----------------------------------------------------
if (useOverflowPage) {
// Store the end-of-page marker at the end of the data page
- PlatformDependent.UNSAFE.putInt(dataPageBaseObject, insertCursor, END_OF_PAGE_MARKER);
+ Platform.putInt(dataPageBaseObject, insertCursor, END_OF_PAGE_MARKER);
} else {
pageCursor += requiredSize;
}
diff --git a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/PrefixComparators.java b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/PrefixComparators.java
index 5e002ae1b7..71b76d5ddf 100644
--- a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/PrefixComparators.java
+++ b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/PrefixComparators.java
@@ -20,10 +20,9 @@ package org.apache.spark.util.collection.unsafe.sort;
import com.google.common.primitives.UnsignedLongs;
import org.apache.spark.annotation.Private;
-import org.apache.spark.unsafe.PlatformDependent;
+import org.apache.spark.unsafe.Platform;
import org.apache.spark.unsafe.types.UTF8String;
import org.apache.spark.util.Utils;
-import static org.apache.spark.unsafe.PlatformDependent.BYTE_ARRAY_OFFSET;
@Private
public class PrefixComparators {
@@ -73,7 +72,7 @@ public class PrefixComparators {
final int minLen = Math.min(bytes.length, 8);
long p = 0;
for (int i = 0; i < minLen; ++i) {
- p |= (128L + PlatformDependent.UNSAFE.getByte(bytes, BYTE_ARRAY_OFFSET + i))
+ p |= (128L + Platform.getByte(bytes, Platform.BYTE_ARRAY_OFFSET + i))
<< (56 - 8 * i);
}
return p;
diff --git a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java
index 5ebbf9b068..9601aafe55 100644
--- a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java
+++ b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java
@@ -35,7 +35,7 @@ import org.apache.spark.executor.ShuffleWriteMetrics;
import org.apache.spark.shuffle.ShuffleMemoryManager;
import org.apache.spark.storage.BlockManager;
import org.apache.spark.unsafe.array.ByteArrayMethods;
-import org.apache.spark.unsafe.PlatformDependent;
+import org.apache.spark.unsafe.Platform;
import org.apache.spark.unsafe.memory.MemoryBlock;
import org.apache.spark.unsafe.memory.TaskMemoryManager;
import org.apache.spark.util.Utils;
@@ -427,14 +427,10 @@ public final class UnsafeExternalSorter {
final long recordAddress =
taskMemoryManager.encodePageNumberAndOffset(dataPage, dataPagePosition);
- PlatformDependent.UNSAFE.putInt(dataPageBaseObject, dataPagePosition, lengthInBytes);
+ Platform.putInt(dataPageBaseObject, dataPagePosition, lengthInBytes);
dataPagePosition += 4;
- PlatformDependent.copyMemory(
- recordBaseObject,
- recordBaseOffset,
- dataPageBaseObject,
- dataPagePosition,
- lengthInBytes);
+ Platform.copyMemory(
+ recordBaseObject, recordBaseOffset, dataPageBaseObject, dataPagePosition, lengthInBytes);
assert(inMemSorter != null);
inMemSorter.insertRecord(recordAddress, prefix);
}
@@ -493,18 +489,16 @@ public final class UnsafeExternalSorter {
final long recordAddress =
taskMemoryManager.encodePageNumberAndOffset(dataPage, dataPagePosition);
- PlatformDependent.UNSAFE.putInt(dataPageBaseObject, dataPagePosition, keyLen + valueLen + 4);
+ Platform.putInt(dataPageBaseObject, dataPagePosition, keyLen + valueLen + 4);
dataPagePosition += 4;
- PlatformDependent.UNSAFE.putInt(dataPageBaseObject, dataPagePosition, keyLen);
+ Platform.putInt(dataPageBaseObject, dataPagePosition, keyLen);
dataPagePosition += 4;
- PlatformDependent.copyMemory(
- keyBaseObj, keyOffset, dataPageBaseObject, dataPagePosition, keyLen);
+ Platform.copyMemory(keyBaseObj, keyOffset, dataPageBaseObject, dataPagePosition, keyLen);
dataPagePosition += keyLen;
- PlatformDependent.copyMemory(
- valueBaseObj, valueOffset, dataPageBaseObject, dataPagePosition, valueLen);
+ Platform.copyMemory(valueBaseObj, valueOffset, dataPageBaseObject, dataPagePosition, valueLen);
assert(inMemSorter != null);
inMemSorter.insertRecord(recordAddress, prefix);
diff --git a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeInMemorySorter.java b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeInMemorySorter.java
index 1e4b8a116e..f7787e1019 100644
--- a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeInMemorySorter.java
+++ b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeInMemorySorter.java
@@ -19,7 +19,7 @@ package org.apache.spark.util.collection.unsafe.sort;
import java.util.Comparator;
-import org.apache.spark.unsafe.PlatformDependent;
+import org.apache.spark.unsafe.Platform;
import org.apache.spark.util.collection.Sorter;
import org.apache.spark.unsafe.memory.TaskMemoryManager;
@@ -164,7 +164,7 @@ public final class UnsafeInMemorySorter {
final long recordPointer = sortBuffer[position];
baseObject = memoryManager.getPage(recordPointer);
baseOffset = memoryManager.getOffsetInPage(recordPointer) + 4; // Skip over record length
- recordLength = PlatformDependent.UNSAFE.getInt(baseObject, baseOffset - 4);
+ recordLength = Platform.getInt(baseObject, baseOffset - 4);
keyPrefix = sortBuffer[position + 1];
position += 2;
}
diff --git a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSorterSpillReader.java b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSorterSpillReader.java
index ca1ccedc93..4989b05d63 100644
--- a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSorterSpillReader.java
+++ b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSorterSpillReader.java
@@ -23,7 +23,7 @@ import com.google.common.io.ByteStreams;
import org.apache.spark.storage.BlockId;
import org.apache.spark.storage.BlockManager;
-import org.apache.spark.unsafe.PlatformDependent;
+import org.apache.spark.unsafe.Platform;
/**
* Reads spill files written by {@link UnsafeSorterSpillWriter} (see that class for a description
@@ -42,7 +42,7 @@ final class UnsafeSorterSpillReader extends UnsafeSorterIterator {
private byte[] arr = new byte[1024 * 1024];
private Object baseObject = arr;
- private final long baseOffset = PlatformDependent.BYTE_ARRAY_OFFSET;
+ private final long baseOffset = Platform.BYTE_ARRAY_OFFSET;
public UnsafeSorterSpillReader(
BlockManager blockManager,
diff --git a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSorterSpillWriter.java b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSorterSpillWriter.java
index 44cf6c756d..e59a84ff8d 100644
--- a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSorterSpillWriter.java
+++ b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSorterSpillWriter.java
@@ -28,7 +28,7 @@ import org.apache.spark.storage.BlockId;
import org.apache.spark.storage.BlockManager;
import org.apache.spark.storage.DiskBlockObjectWriter;
import org.apache.spark.storage.TempLocalBlockId;
-import org.apache.spark.unsafe.PlatformDependent;
+import org.apache.spark.unsafe.Platform;
/**
* Spills a list of sorted records to disk. Spill files have the following format:
@@ -117,11 +117,11 @@ final class UnsafeSorterSpillWriter {
long recordReadPosition = baseOffset;
while (dataRemaining > 0) {
final int toTransfer = Math.min(freeSpaceInWriteBuffer, dataRemaining);
- PlatformDependent.copyMemory(
+ Platform.copyMemory(
baseObject,
recordReadPosition,
writeBuffer,
- PlatformDependent.BYTE_ARRAY_OFFSET + (DISK_WRITE_BUFFER_SIZE - freeSpaceInWriteBuffer),
+ Platform.BYTE_ARRAY_OFFSET + (DISK_WRITE_BUFFER_SIZE - freeSpaceInWriteBuffer),
toTransfer);
writer.write(writeBuffer, 0, (DISK_WRITE_BUFFER_SIZE - freeSpaceInWriteBuffer) + toTransfer);
recordReadPosition += toTransfer;
diff --git a/core/src/test/java/org/apache/spark/shuffle/unsafe/UnsafeShuffleInMemorySorterSuite.java b/core/src/test/java/org/apache/spark/shuffle/unsafe/UnsafeShuffleInMemorySorterSuite.java
index 8fa72597db..40fefe2c9d 100644
--- a/core/src/test/java/org/apache/spark/shuffle/unsafe/UnsafeShuffleInMemorySorterSuite.java
+++ b/core/src/test/java/org/apache/spark/shuffle/unsafe/UnsafeShuffleInMemorySorterSuite.java
@@ -24,7 +24,7 @@ import org.junit.Assert;
import org.junit.Test;
import org.apache.spark.HashPartitioner;
-import org.apache.spark.unsafe.PlatformDependent;
+import org.apache.spark.unsafe.Platform;
import org.apache.spark.unsafe.memory.ExecutorMemoryManager;
import org.apache.spark.unsafe.memory.MemoryAllocator;
import org.apache.spark.unsafe.memory.MemoryBlock;
@@ -34,11 +34,7 @@ public class UnsafeShuffleInMemorySorterSuite {
private static String getStringFromDataPage(Object baseObject, long baseOffset, int strLength) {
final byte[] strBytes = new byte[strLength];
- PlatformDependent.copyMemory(
- baseObject,
- baseOffset,
- strBytes,
- PlatformDependent.BYTE_ARRAY_OFFSET, strLength);
+ Platform.copyMemory(baseObject, baseOffset, strBytes, Platform.BYTE_ARRAY_OFFSET, strLength);
return new String(strBytes);
}
@@ -74,14 +70,10 @@ public class UnsafeShuffleInMemorySorterSuite {
for (String str : dataToSort) {
final long recordAddress = memoryManager.encodePageNumberAndOffset(dataPage, position);
final byte[] strBytes = str.getBytes("utf-8");
- PlatformDependent.UNSAFE.putInt(baseObject, position, strBytes.length);
+ Platform.putInt(baseObject, position, strBytes.length);
position += 4;
- PlatformDependent.copyMemory(
- strBytes,
- PlatformDependent.BYTE_ARRAY_OFFSET,
- baseObject,
- position,
- strBytes.length);
+ Platform.copyMemory(
+ strBytes, Platform.BYTE_ARRAY_OFFSET, baseObject, position, strBytes.length);
position += strBytes.length;
sorter.insertRecord(recordAddress, hashPartitioner.getPartition(str));
}
@@ -98,7 +90,7 @@ public class UnsafeShuffleInMemorySorterSuite {
Assert.assertTrue("Partition id " + partitionId + " should be >= prev id " + prevPartitionId,
partitionId >= prevPartitionId);
final long recordAddress = iter.packedRecordPointer.getRecordPointer();
- final int recordLength = PlatformDependent.UNSAFE.getInt(
+ final int recordLength = Platform.getInt(
memoryManager.getPage(recordAddress), memoryManager.getOffsetInPage(recordAddress));
final String str = getStringFromDataPage(
memoryManager.getPage(recordAddress),
diff --git a/core/src/test/java/org/apache/spark/unsafe/map/AbstractBytesToBytesMapSuite.java b/core/src/test/java/org/apache/spark/unsafe/map/AbstractBytesToBytesMapSuite.java
index e56a3f0b6d..1a79c20c35 100644
--- a/core/src/test/java/org/apache/spark/unsafe/map/AbstractBytesToBytesMapSuite.java
+++ b/core/src/test/java/org/apache/spark/unsafe/map/AbstractBytesToBytesMapSuite.java
@@ -32,9 +32,7 @@ import static org.mockito.Mockito.*;
import org.apache.spark.shuffle.ShuffleMemoryManager;
import org.apache.spark.unsafe.array.ByteArrayMethods;
import org.apache.spark.unsafe.memory.*;
-import org.apache.spark.unsafe.PlatformDependent;
-import static org.apache.spark.unsafe.PlatformDependent.BYTE_ARRAY_OFFSET;
-import static org.apache.spark.unsafe.PlatformDependent.LONG_ARRAY_OFFSET;
+import org.apache.spark.unsafe.Platform;
public abstract class AbstractBytesToBytesMapSuite {
@@ -80,13 +78,8 @@ public abstract class AbstractBytesToBytesMapSuite {
private static byte[] getByteArray(MemoryLocation loc, int size) {
final byte[] arr = new byte[size];
- PlatformDependent.copyMemory(
- loc.getBaseObject(),
- loc.getBaseOffset(),
- arr,
- BYTE_ARRAY_OFFSET,
- size
- );
+ Platform.copyMemory(
+ loc.getBaseObject(), loc.getBaseOffset(), arr, Platform.BYTE_ARRAY_OFFSET, size);
return arr;
}
@@ -108,7 +101,7 @@ public abstract class AbstractBytesToBytesMapSuite {
long actualLengthBytes) {
return (actualLengthBytes == expected.length) && ByteArrayMethods.arrayEquals(
expected,
- BYTE_ARRAY_OFFSET,
+ Platform.BYTE_ARRAY_OFFSET,
actualAddr.getBaseObject(),
actualAddr.getBaseOffset(),
expected.length
@@ -124,7 +117,7 @@ public abstract class AbstractBytesToBytesMapSuite {
final int keyLengthInWords = 10;
final int keyLengthInBytes = keyLengthInWords * 8;
final byte[] key = getRandomByteArray(keyLengthInWords);
- Assert.assertFalse(map.lookup(key, BYTE_ARRAY_OFFSET, keyLengthInBytes).isDefined());
+ Assert.assertFalse(map.lookup(key, Platform.BYTE_ARRAY_OFFSET, keyLengthInBytes).isDefined());
Assert.assertFalse(map.iterator().hasNext());
} finally {
map.free();
@@ -141,14 +134,14 @@ public abstract class AbstractBytesToBytesMapSuite {
final byte[] valueData = getRandomByteArray(recordLengthWords);
try {
final BytesToBytesMap.Location loc =
- map.lookup(keyData, BYTE_ARRAY_OFFSET, recordLengthBytes);
+ map.lookup(keyData, Platform.BYTE_ARRAY_OFFSET, recordLengthBytes);
Assert.assertFalse(loc.isDefined());
Assert.assertTrue(loc.putNewKey(
keyData,
- BYTE_ARRAY_OFFSET,
+ Platform.BYTE_ARRAY_OFFSET,
recordLengthBytes,
valueData,
- BYTE_ARRAY_OFFSET,
+ Platform.BYTE_ARRAY_OFFSET,
recordLengthBytes
));
// After storing the key and value, the other location methods should return results that
@@ -159,7 +152,8 @@ public abstract class AbstractBytesToBytesMapSuite {
Assert.assertArrayEquals(valueData, getByteArray(loc.getValueAddress(), recordLengthBytes));
// After calling lookup() the location should still point to the correct data.
- Assert.assertTrue(map.lookup(keyData, BYTE_ARRAY_OFFSET, recordLengthBytes).isDefined());
+ Assert.assertTrue(
+ map.lookup(keyData, Platform.BYTE_ARRAY_OFFSET, recordLengthBytes).isDefined());
Assert.assertEquals(recordLengthBytes, loc.getKeyLength());
Assert.assertEquals(recordLengthBytes, loc.getValueLength());
Assert.assertArrayEquals(keyData, getByteArray(loc.getKeyAddress(), recordLengthBytes));
@@ -168,10 +162,10 @@ public abstract class AbstractBytesToBytesMapSuite {
try {
Assert.assertTrue(loc.putNewKey(
keyData,
- BYTE_ARRAY_OFFSET,
+ Platform.BYTE_ARRAY_OFFSET,
recordLengthBytes,
valueData,
- BYTE_ARRAY_OFFSET,
+ Platform.BYTE_ARRAY_OFFSET,
recordLengthBytes
));
Assert.fail("Should not be able to set a new value for a key");
@@ -191,25 +185,25 @@ public abstract class AbstractBytesToBytesMapSuite {
for (long i = 0; i < size; i++) {
final long[] value = new long[] { i };
final BytesToBytesMap.Location loc =
- map.lookup(value, PlatformDependent.LONG_ARRAY_OFFSET, 8);
+ map.lookup(value, Platform.LONG_ARRAY_OFFSET, 8);
Assert.assertFalse(loc.isDefined());
// Ensure that we store some zero-length keys
if (i % 5 == 0) {
Assert.assertTrue(loc.putNewKey(
null,
- PlatformDependent.LONG_ARRAY_OFFSET,
+ Platform.LONG_ARRAY_OFFSET,
0,
value,
- PlatformDependent.LONG_ARRAY_OFFSET,
+ Platform.LONG_ARRAY_OFFSET,
8
));
} else {
Assert.assertTrue(loc.putNewKey(
value,
- PlatformDependent.LONG_ARRAY_OFFSET,
+ Platform.LONG_ARRAY_OFFSET,
8,
value,
- PlatformDependent.LONG_ARRAY_OFFSET,
+ Platform.LONG_ARRAY_OFFSET,
8
));
}
@@ -228,14 +222,13 @@ public abstract class AbstractBytesToBytesMapSuite {
Assert.assertTrue(loc.isDefined());
final MemoryLocation keyAddress = loc.getKeyAddress();
final MemoryLocation valueAddress = loc.getValueAddress();
- final long value = PlatformDependent.UNSAFE.getLong(
+ final long value = Platform.getLong(
valueAddress.getBaseObject(), valueAddress.getBaseOffset());
final long keyLength = loc.getKeyLength();
if (keyLength == 0) {
Assert.assertTrue("value " + value + " was not divisible by 5", value % 5 == 0);
} else {
- final long key = PlatformDependent.UNSAFE.getLong(
- keyAddress.getBaseObject(), keyAddress.getBaseOffset());
+ final long key = Platform.getLong(keyAddress.getBaseObject(), keyAddress.getBaseOffset());
Assert.assertEquals(value, key);
}
valuesSeen.set((int) value);
@@ -284,16 +277,16 @@ public abstract class AbstractBytesToBytesMapSuite {
final long[] value = new long[] { i, i, i, i, i }; // 5 * 8 = 40 bytes
final BytesToBytesMap.Location loc = map.lookup(
key,
- LONG_ARRAY_OFFSET,
+ Platform.LONG_ARRAY_OFFSET,
KEY_LENGTH
);
Assert.assertFalse(loc.isDefined());
Assert.assertTrue(loc.putNewKey(
key,
- LONG_ARRAY_OFFSET,
+ Platform.LONG_ARRAY_OFFSET,
KEY_LENGTH,
value,
- LONG_ARRAY_OFFSET,
+ Platform.LONG_ARRAY_OFFSET,
VALUE_LENGTH
));
}
@@ -308,18 +301,18 @@ public abstract class AbstractBytesToBytesMapSuite {
Assert.assertTrue(loc.isDefined());
Assert.assertEquals(KEY_LENGTH, loc.getKeyLength());
Assert.assertEquals(VALUE_LENGTH, loc.getValueLength());
- PlatformDependent.copyMemory(
+ Platform.copyMemory(
loc.getKeyAddress().getBaseObject(),
loc.getKeyAddress().getBaseOffset(),
key,
- LONG_ARRAY_OFFSET,
+ Platform.LONG_ARRAY_OFFSET,
KEY_LENGTH
);
- PlatformDependent.copyMemory(
+ Platform.copyMemory(
loc.getValueAddress().getBaseObject(),
loc.getValueAddress().getBaseOffset(),
value,
- LONG_ARRAY_OFFSET,
+ Platform.LONG_ARRAY_OFFSET,
VALUE_LENGTH
);
for (long j : key) {
@@ -354,16 +347,16 @@ public abstract class AbstractBytesToBytesMapSuite {
expected.put(ByteBuffer.wrap(key), value);
final BytesToBytesMap.Location loc = map.lookup(
key,
- BYTE_ARRAY_OFFSET,
+ Platform.BYTE_ARRAY_OFFSET,
key.length
);
Assert.assertFalse(loc.isDefined());
Assert.assertTrue(loc.putNewKey(
key,
- BYTE_ARRAY_OFFSET,
+ Platform.BYTE_ARRAY_OFFSET,
key.length,
value,
- BYTE_ARRAY_OFFSET,
+ Platform.BYTE_ARRAY_OFFSET,
value.length
));
// After calling putNewKey, the following should be true, even before calling
@@ -379,7 +372,8 @@ public abstract class AbstractBytesToBytesMapSuite {
for (Map.Entry<ByteBuffer, byte[]> entry : expected.entrySet()) {
final byte[] key = entry.getKey().array();
final byte[] value = entry.getValue();
- final BytesToBytesMap.Location loc = map.lookup(key, BYTE_ARRAY_OFFSET, key.length);
+ final BytesToBytesMap.Location loc =
+ map.lookup(key, Platform.BYTE_ARRAY_OFFSET, key.length);
Assert.assertTrue(loc.isDefined());
Assert.assertTrue(arrayEquals(key, loc.getKeyAddress(), loc.getKeyLength()));
Assert.assertTrue(arrayEquals(value, loc.getValueAddress(), loc.getValueLength()));
@@ -405,16 +399,16 @@ public abstract class AbstractBytesToBytesMapSuite {
expected.put(ByteBuffer.wrap(key), value);
final BytesToBytesMap.Location loc = map.lookup(
key,
- BYTE_ARRAY_OFFSET,
+ Platform.BYTE_ARRAY_OFFSET,
key.length
);
Assert.assertFalse(loc.isDefined());
Assert.assertTrue(loc.putNewKey(
key,
- BYTE_ARRAY_OFFSET,
+ Platform.BYTE_ARRAY_OFFSET,
key.length,
value,
- BYTE_ARRAY_OFFSET,
+ Platform.BYTE_ARRAY_OFFSET,
value.length
));
// After calling putNewKey, the following should be true, even before calling
@@ -429,7 +423,8 @@ public abstract class AbstractBytesToBytesMapSuite {
for (Map.Entry<ByteBuffer, byte[]> entry : expected.entrySet()) {
final byte[] key = entry.getKey().array();
final byte[] value = entry.getValue();
- final BytesToBytesMap.Location loc = map.lookup(key, BYTE_ARRAY_OFFSET, key.length);
+ final BytesToBytesMap.Location loc =
+ map.lookup(key, Platform.BYTE_ARRAY_OFFSET, key.length);
Assert.assertTrue(loc.isDefined());
Assert.assertTrue(arrayEquals(key, loc.getKeyAddress(), loc.getKeyLength()));
Assert.assertTrue(arrayEquals(value, loc.getValueAddress(), loc.getValueLength()));
@@ -447,12 +442,10 @@ public abstract class AbstractBytesToBytesMapSuite {
try {
final long[] emptyArray = new long[0];
final BytesToBytesMap.Location loc =
- map.lookup(emptyArray, PlatformDependent.LONG_ARRAY_OFFSET, 0);
+ map.lookup(emptyArray, Platform.LONG_ARRAY_OFFSET, 0);
Assert.assertFalse(loc.isDefined());
Assert.assertFalse(loc.putNewKey(
- emptyArray, LONG_ARRAY_OFFSET, 0,
- emptyArray, LONG_ARRAY_OFFSET, 0
- ));
+ emptyArray, Platform.LONG_ARRAY_OFFSET, 0, emptyArray, Platform.LONG_ARRAY_OFFSET, 0));
} finally {
map.free();
}
@@ -468,8 +461,9 @@ public abstract class AbstractBytesToBytesMapSuite {
int i;
for (i = 0; i < 1024; i++) {
final long[] arr = new long[]{i};
- final BytesToBytesMap.Location loc = map.lookup(arr, PlatformDependent.LONG_ARRAY_OFFSET, 8);
- success = loc.putNewKey(arr, LONG_ARRAY_OFFSET, 8, arr, LONG_ARRAY_OFFSET, 8);
+ final BytesToBytesMap.Location loc = map.lookup(arr, Platform.LONG_ARRAY_OFFSET, 8);
+ success =
+ loc.putNewKey(arr, Platform.LONG_ARRAY_OFFSET, 8, arr, Platform.LONG_ARRAY_OFFSET, 8);
if (!success) {
break;
}
@@ -541,12 +535,12 @@ public abstract class AbstractBytesToBytesMapSuite {
try {
for (long i = 0; i < numRecordsPerPage * 10; i++) {
final long[] value = new long[]{i};
- map.lookup(value, PlatformDependent.LONG_ARRAY_OFFSET, 8).putNewKey(
+ map.lookup(value, Platform.LONG_ARRAY_OFFSET, 8).putNewKey(
value,
- PlatformDependent.LONG_ARRAY_OFFSET,
+ Platform.LONG_ARRAY_OFFSET,
8,
value,
- PlatformDependent.LONG_ARRAY_OFFSET,
+ Platform.LONG_ARRAY_OFFSET,
8);
newPeakMemory = map.getPeakMemoryUsedBytes();
if (i % numRecordsPerPage == 0) {
diff --git a/core/src/test/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorterSuite.java b/core/src/test/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorterSuite.java
index 83049b8a21..445a37b83e 100644
--- a/core/src/test/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorterSuite.java
+++ b/core/src/test/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorterSuite.java
@@ -49,7 +49,7 @@ import org.apache.spark.executor.TaskMetrics;
import org.apache.spark.serializer.SerializerInstance;
import org.apache.spark.shuffle.ShuffleMemoryManager;
import org.apache.spark.storage.*;
-import org.apache.spark.unsafe.PlatformDependent;
+import org.apache.spark.unsafe.Platform;
import org.apache.spark.unsafe.memory.ExecutorMemoryManager;
import org.apache.spark.unsafe.memory.MemoryAllocator;
import org.apache.spark.unsafe.memory.TaskMemoryManager;
@@ -166,14 +166,14 @@ public class UnsafeExternalSorterSuite {
private static void insertNumber(UnsafeExternalSorter sorter, int value) throws Exception {
final int[] arr = new int[]{ value };
- sorter.insertRecord(arr, PlatformDependent.INT_ARRAY_OFFSET, 4, value);
+ sorter.insertRecord(arr, Platform.INT_ARRAY_OFFSET, 4, value);
}
private static void insertRecord(
UnsafeExternalSorter sorter,
int[] record,
long prefix) throws IOException {
- sorter.insertRecord(record, PlatformDependent.INT_ARRAY_OFFSET, record.length * 4, prefix);
+ sorter.insertRecord(record, Platform.INT_ARRAY_OFFSET, record.length * 4, prefix);
}
private UnsafeExternalSorter newSorter() throws IOException {
@@ -205,7 +205,7 @@ public class UnsafeExternalSorterSuite {
iter.loadNext();
assertEquals(i, iter.getKeyPrefix());
assertEquals(4, iter.getRecordLength());
- assertEquals(i, PlatformDependent.UNSAFE.getInt(iter.getBaseObject(), iter.getBaseOffset()));
+ assertEquals(i, Platform.getInt(iter.getBaseObject(), iter.getBaseOffset()));
}
sorter.cleanupResources();
@@ -253,7 +253,7 @@ public class UnsafeExternalSorterSuite {
iter.loadNext();
assertEquals(i, iter.getKeyPrefix());
assertEquals(4, iter.getRecordLength());
- assertEquals(i, PlatformDependent.UNSAFE.getInt(iter.getBaseObject(), iter.getBaseOffset()));
+ assertEquals(i, Platform.getInt(iter.getBaseObject(), iter.getBaseOffset()));
i++;
}
sorter.cleanupResources();
@@ -265,7 +265,7 @@ public class UnsafeExternalSorterSuite {
final UnsafeExternalSorter sorter = newSorter();
byte[] record = new byte[16];
while (sorter.getNumberOfAllocatedPages() < 2) {
- sorter.insertRecord(record, PlatformDependent.BYTE_ARRAY_OFFSET, record.length, 0);
+ sorter.insertRecord(record, Platform.BYTE_ARRAY_OFFSET, record.length, 0);
}
sorter.cleanupResources();
assertSpillFilesWereCleanedUp();
@@ -292,25 +292,25 @@ public class UnsafeExternalSorterSuite {
iter.loadNext();
assertEquals(123, iter.getKeyPrefix());
assertEquals(smallRecord.length * 4, iter.getRecordLength());
- assertEquals(123, PlatformDependent.UNSAFE.getInt(iter.getBaseObject(), iter.getBaseOffset()));
+ assertEquals(123, Platform.getInt(iter.getBaseObject(), iter.getBaseOffset()));
// Small record
assertTrue(iter.hasNext());
iter.loadNext();
assertEquals(123, iter.getKeyPrefix());
assertEquals(smallRecord.length * 4, iter.getRecordLength());
- assertEquals(123, PlatformDependent.UNSAFE.getInt(iter.getBaseObject(), iter.getBaseOffset()));
+ assertEquals(123, Platform.getInt(iter.getBaseObject(), iter.getBaseOffset()));
// Large record
assertTrue(iter.hasNext());
iter.loadNext();
assertEquals(456, iter.getKeyPrefix());
assertEquals(largeRecord.length * 4, iter.getRecordLength());
- assertEquals(456, PlatformDependent.UNSAFE.getInt(iter.getBaseObject(), iter.getBaseOffset()));
+ assertEquals(456, Platform.getInt(iter.getBaseObject(), iter.getBaseOffset()));
// Large record
assertTrue(iter.hasNext());
iter.loadNext();
assertEquals(456, iter.getKeyPrefix());
assertEquals(largeRecord.length * 4, iter.getRecordLength());
- assertEquals(456, PlatformDependent.UNSAFE.getInt(iter.getBaseObject(), iter.getBaseOffset()));
+ assertEquals(456, Platform.getInt(iter.getBaseObject(), iter.getBaseOffset()));
assertFalse(iter.hasNext());
sorter.cleanupResources();
diff --git a/core/src/test/java/org/apache/spark/util/collection/unsafe/sort/UnsafeInMemorySorterSuite.java b/core/src/test/java/org/apache/spark/util/collection/unsafe/sort/UnsafeInMemorySorterSuite.java
index 9095009305..778e813df6 100644
--- a/core/src/test/java/org/apache/spark/util/collection/unsafe/sort/UnsafeInMemorySorterSuite.java
+++ b/core/src/test/java/org/apache/spark/util/collection/unsafe/sort/UnsafeInMemorySorterSuite.java
@@ -26,7 +26,7 @@ import static org.junit.Assert.*;
import static org.mockito.Mockito.mock;
import org.apache.spark.HashPartitioner;
-import org.apache.spark.unsafe.PlatformDependent;
+import org.apache.spark.unsafe.Platform;
import org.apache.spark.unsafe.memory.ExecutorMemoryManager;
import org.apache.spark.unsafe.memory.MemoryAllocator;
import org.apache.spark.unsafe.memory.MemoryBlock;
@@ -36,11 +36,7 @@ public class UnsafeInMemorySorterSuite {
private static String getStringFromDataPage(Object baseObject, long baseOffset, int length) {
final byte[] strBytes = new byte[length];
- PlatformDependent.copyMemory(
- baseObject,
- baseOffset,
- strBytes,
- PlatformDependent.BYTE_ARRAY_OFFSET, length);
+ Platform.copyMemory(baseObject, baseOffset, strBytes, Platform.BYTE_ARRAY_OFFSET, length);
return new String(strBytes);
}
@@ -76,14 +72,10 @@ public class UnsafeInMemorySorterSuite {
long position = dataPage.getBaseOffset();
for (String str : dataToSort) {
final byte[] strBytes = str.getBytes("utf-8");
- PlatformDependent.UNSAFE.putInt(baseObject, position, strBytes.length);
+ Platform.putInt(baseObject, position, strBytes.length);
position += 4;
- PlatformDependent.copyMemory(
- strBytes,
- PlatformDependent.BYTE_ARRAY_OFFSET,
- baseObject,
- position,
- strBytes.length);
+ Platform.copyMemory(
+ strBytes, Platform.BYTE_ARRAY_OFFSET, baseObject, position, strBytes.length);
position += strBytes.length;
}
// Since the key fits within the 8-byte prefix, we don't need to do any record comparison, so
@@ -113,7 +105,7 @@ public class UnsafeInMemorySorterSuite {
position = dataPage.getBaseOffset();
for (int i = 0; i < dataToSort.length; i++) {
// position now points to the start of a record (which holds its length).
- final int recordLength = PlatformDependent.UNSAFE.getInt(baseObject, position);
+ final int recordLength = Platform.getInt(baseObject, position);
final long address = memoryManager.encodePageNumberAndOffset(dataPage, position);
final String str = getStringFromDataPage(baseObject, position + 4, recordLength);
final int partitionId = hashPartitioner.getPartition(str);