aboutsummaryrefslogtreecommitdiff
path: root/core/src/main/java
diff options
context:
space:
mode:
authorReynold Xin <rxin@databricks.com>2015-08-11 08:41:06 -0700
committerDavies Liu <davies.liu@gmail.com>2015-08-11 08:41:06 -0700
commitd378396f86f625f006738d87fe5dbc2ff8fd913d (patch)
tree488e09f59d69d52f776b7587c9c0afead051b541 /core/src/main/java
parent600031ebe27473d8fffe6ea436c2149223b82896 (diff)
downloadspark-d378396f86f625f006738d87fe5dbc2ff8fd913d.tar.gz
spark-d378396f86f625f006738d87fe5dbc2ff8fd913d.tar.bz2
spark-d378396f86f625f006738d87fe5dbc2ff8fd913d.zip
[SPARK-9815] Rename PlatformDependent.UNSAFE -> Platform.
PlatformDependent.UNSAFE is way too verbose. Author: Reynold Xin <rxin@databricks.com> Closes #8094 from rxin/SPARK-9815 and squashes the following commits: 229b603 [Reynold Xin] [SPARK-9815] Rename PlatformDependent.UNSAFE -> Platform.
Diffstat (limited to 'core/src/main/java')
-rw-r--r--core/src/main/java/org/apache/spark/serializer/DummySerializerInstance.java6
-rw-r--r--core/src/main/java/org/apache/spark/shuffle/unsafe/UnsafeShuffleExternalSorter.java22
-rw-r--r--core/src/main/java/org/apache/spark/shuffle/unsafe/UnsafeShuffleWriter.java4
-rw-r--r--core/src/main/java/org/apache/spark/unsafe/map/BytesToBytesMap.java20
-rw-r--r--core/src/main/java/org/apache/spark/util/collection/unsafe/sort/PrefixComparators.java5
-rw-r--r--core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java22
-rw-r--r--core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeInMemorySorter.java4
-rw-r--r--core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSorterSpillReader.java4
-rw-r--r--core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSorterSpillWriter.java6
9 files changed, 39 insertions, 54 deletions
diff --git a/core/src/main/java/org/apache/spark/serializer/DummySerializerInstance.java b/core/src/main/java/org/apache/spark/serializer/DummySerializerInstance.java
index 0399abc63c..0e58bb4f71 100644
--- a/core/src/main/java/org/apache/spark/serializer/DummySerializerInstance.java
+++ b/core/src/main/java/org/apache/spark/serializer/DummySerializerInstance.java
@@ -25,7 +25,7 @@ import java.nio.ByteBuffer;
import scala.reflect.ClassTag;
import org.apache.spark.annotation.Private;
-import org.apache.spark.unsafe.PlatformDependent;
+import org.apache.spark.unsafe.Platform;
/**
* Unfortunately, we need a serializer instance in order to construct a DiskBlockObjectWriter.
@@ -49,7 +49,7 @@ public final class DummySerializerInstance extends SerializerInstance {
try {
s.flush();
} catch (IOException e) {
- PlatformDependent.throwException(e);
+ Platform.throwException(e);
}
}
@@ -64,7 +64,7 @@ public final class DummySerializerInstance extends SerializerInstance {
try {
s.close();
} catch (IOException e) {
- PlatformDependent.throwException(e);
+ Platform.throwException(e);
}
}
};
diff --git a/core/src/main/java/org/apache/spark/shuffle/unsafe/UnsafeShuffleExternalSorter.java b/core/src/main/java/org/apache/spark/shuffle/unsafe/UnsafeShuffleExternalSorter.java
index 925b60a145..3d1ef0c48a 100644
--- a/core/src/main/java/org/apache/spark/shuffle/unsafe/UnsafeShuffleExternalSorter.java
+++ b/core/src/main/java/org/apache/spark/shuffle/unsafe/UnsafeShuffleExternalSorter.java
@@ -37,7 +37,7 @@ import org.apache.spark.shuffle.ShuffleMemoryManager;
import org.apache.spark.storage.BlockManager;
import org.apache.spark.storage.DiskBlockObjectWriter;
import org.apache.spark.storage.TempShuffleBlockId;
-import org.apache.spark.unsafe.PlatformDependent;
+import org.apache.spark.unsafe.Platform;
import org.apache.spark.unsafe.array.ByteArrayMethods;
import org.apache.spark.unsafe.memory.MemoryBlock;
import org.apache.spark.unsafe.memory.TaskMemoryManager;
@@ -211,16 +211,12 @@ final class UnsafeShuffleExternalSorter {
final long recordPointer = sortedRecords.packedRecordPointer.getRecordPointer();
final Object recordPage = taskMemoryManager.getPage(recordPointer);
final long recordOffsetInPage = taskMemoryManager.getOffsetInPage(recordPointer);
- int dataRemaining = PlatformDependent.UNSAFE.getInt(recordPage, recordOffsetInPage);
+ int dataRemaining = Platform.getInt(recordPage, recordOffsetInPage);
long recordReadPosition = recordOffsetInPage + 4; // skip over record length
while (dataRemaining > 0) {
final int toTransfer = Math.min(DISK_WRITE_BUFFER_SIZE, dataRemaining);
- PlatformDependent.copyMemory(
- recordPage,
- recordReadPosition,
- writeBuffer,
- PlatformDependent.BYTE_ARRAY_OFFSET,
- toTransfer);
+ Platform.copyMemory(
+ recordPage, recordReadPosition, writeBuffer, Platform.BYTE_ARRAY_OFFSET, toTransfer);
writer.write(writeBuffer, 0, toTransfer);
recordReadPosition += toTransfer;
dataRemaining -= toTransfer;
@@ -447,14 +443,10 @@ final class UnsafeShuffleExternalSorter {
final long recordAddress =
taskMemoryManager.encodePageNumberAndOffset(dataPage, dataPagePosition);
- PlatformDependent.UNSAFE.putInt(dataPageBaseObject, dataPagePosition, lengthInBytes);
+ Platform.putInt(dataPageBaseObject, dataPagePosition, lengthInBytes);
dataPagePosition += 4;
- PlatformDependent.copyMemory(
- recordBaseObject,
- recordBaseOffset,
- dataPageBaseObject,
- dataPagePosition,
- lengthInBytes);
+ Platform.copyMemory(
+ recordBaseObject, recordBaseOffset, dataPageBaseObject, dataPagePosition, lengthInBytes);
assert(inMemSorter != null);
inMemSorter.insertRecord(recordAddress, partitionId);
}
diff --git a/core/src/main/java/org/apache/spark/shuffle/unsafe/UnsafeShuffleWriter.java b/core/src/main/java/org/apache/spark/shuffle/unsafe/UnsafeShuffleWriter.java
index 02084f9122..2389c28b28 100644
--- a/core/src/main/java/org/apache/spark/shuffle/unsafe/UnsafeShuffleWriter.java
+++ b/core/src/main/java/org/apache/spark/shuffle/unsafe/UnsafeShuffleWriter.java
@@ -53,7 +53,7 @@ import org.apache.spark.shuffle.ShuffleMemoryManager;
import org.apache.spark.shuffle.ShuffleWriter;
import org.apache.spark.storage.BlockManager;
import org.apache.spark.storage.TimeTrackingOutputStream;
-import org.apache.spark.unsafe.PlatformDependent;
+import org.apache.spark.unsafe.Platform;
import org.apache.spark.unsafe.memory.TaskMemoryManager;
@Private
@@ -244,7 +244,7 @@ public class UnsafeShuffleWriter<K, V> extends ShuffleWriter<K, V> {
assert (serializedRecordSize > 0);
sorter.insertRecord(
- serBuffer.getBuf(), PlatformDependent.BYTE_ARRAY_OFFSET, serializedRecordSize, partitionId);
+ serBuffer.getBuf(), Platform.BYTE_ARRAY_OFFSET, serializedRecordSize, partitionId);
}
@VisibleForTesting
diff --git a/core/src/main/java/org/apache/spark/unsafe/map/BytesToBytesMap.java b/core/src/main/java/org/apache/spark/unsafe/map/BytesToBytesMap.java
index 7f79cd13aa..85b46ec8bf 100644
--- a/core/src/main/java/org/apache/spark/unsafe/map/BytesToBytesMap.java
+++ b/core/src/main/java/org/apache/spark/unsafe/map/BytesToBytesMap.java
@@ -270,10 +270,10 @@ public final class BytesToBytesMap {
@Override
public Location next() {
- int totalLength = PlatformDependent.UNSAFE.getInt(pageBaseObject, offsetInPage);
+ int totalLength = Platform.getInt(pageBaseObject, offsetInPage);
if (totalLength == END_OF_PAGE_MARKER) {
advanceToNextPage();
- totalLength = PlatformDependent.UNSAFE.getInt(pageBaseObject, offsetInPage);
+ totalLength = Platform.getInt(pageBaseObject, offsetInPage);
}
loc.with(currentPage, offsetInPage);
offsetInPage += 4 + totalLength;
@@ -402,9 +402,9 @@ public final class BytesToBytesMap {
private void updateAddressesAndSizes(final Object page, final long offsetInPage) {
long position = offsetInPage;
- final int totalLength = PlatformDependent.UNSAFE.getInt(page, position);
+ final int totalLength = Platform.getInt(page, position);
position += 4;
- keyLength = PlatformDependent.UNSAFE.getInt(page, position);
+ keyLength = Platform.getInt(page, position);
position += 4;
valueLength = totalLength - keyLength - 4;
@@ -572,7 +572,7 @@ public final class BytesToBytesMap {
// There wasn't enough space in the current page, so write an end-of-page marker:
final Object pageBaseObject = currentDataPage.getBaseObject();
final long lengthOffsetInPage = currentDataPage.getBaseOffset() + pageCursor;
- PlatformDependent.UNSAFE.putInt(pageBaseObject, lengthOffsetInPage, END_OF_PAGE_MARKER);
+ Platform.putInt(pageBaseObject, lengthOffsetInPage, END_OF_PAGE_MARKER);
}
final long memoryGranted = shuffleMemoryManager.tryToAcquire(pageSizeBytes);
if (memoryGranted != pageSizeBytes) {
@@ -608,21 +608,21 @@ public final class BytesToBytesMap {
final long valueDataOffsetInPage = insertCursor;
insertCursor += valueLengthBytes; // word used to store the value size
- PlatformDependent.UNSAFE.putInt(dataPageBaseObject, recordOffset,
+ Platform.putInt(dataPageBaseObject, recordOffset,
keyLengthBytes + valueLengthBytes + 4);
- PlatformDependent.UNSAFE.putInt(dataPageBaseObject, keyLengthOffset, keyLengthBytes);
+ Platform.putInt(dataPageBaseObject, keyLengthOffset, keyLengthBytes);
// Copy the key
- PlatformDependent.copyMemory(
+ Platform.copyMemory(
keyBaseObject, keyBaseOffset, dataPageBaseObject, keyDataOffsetInPage, keyLengthBytes);
// Copy the value
- PlatformDependent.copyMemory(valueBaseObject, valueBaseOffset, dataPageBaseObject,
+ Platform.copyMemory(valueBaseObject, valueBaseOffset, dataPageBaseObject,
valueDataOffsetInPage, valueLengthBytes);
// --- Update bookeeping data structures -----------------------------------------------------
if (useOverflowPage) {
// Store the end-of-page marker at the end of the data page
- PlatformDependent.UNSAFE.putInt(dataPageBaseObject, insertCursor, END_OF_PAGE_MARKER);
+ Platform.putInt(dataPageBaseObject, insertCursor, END_OF_PAGE_MARKER);
} else {
pageCursor += requiredSize;
}
diff --git a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/PrefixComparators.java b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/PrefixComparators.java
index 5e002ae1b7..71b76d5ddf 100644
--- a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/PrefixComparators.java
+++ b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/PrefixComparators.java
@@ -20,10 +20,9 @@ package org.apache.spark.util.collection.unsafe.sort;
import com.google.common.primitives.UnsignedLongs;
import org.apache.spark.annotation.Private;
-import org.apache.spark.unsafe.PlatformDependent;
+import org.apache.spark.unsafe.Platform;
import org.apache.spark.unsafe.types.UTF8String;
import org.apache.spark.util.Utils;
-import static org.apache.spark.unsafe.PlatformDependent.BYTE_ARRAY_OFFSET;
@Private
public class PrefixComparators {
@@ -73,7 +72,7 @@ public class PrefixComparators {
final int minLen = Math.min(bytes.length, 8);
long p = 0;
for (int i = 0; i < minLen; ++i) {
- p |= (128L + PlatformDependent.UNSAFE.getByte(bytes, BYTE_ARRAY_OFFSET + i))
+ p |= (128L + Platform.getByte(bytes, Platform.BYTE_ARRAY_OFFSET + i))
<< (56 - 8 * i);
}
return p;
diff --git a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java
index 5ebbf9b068..9601aafe55 100644
--- a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java
+++ b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java
@@ -35,7 +35,7 @@ import org.apache.spark.executor.ShuffleWriteMetrics;
import org.apache.spark.shuffle.ShuffleMemoryManager;
import org.apache.spark.storage.BlockManager;
import org.apache.spark.unsafe.array.ByteArrayMethods;
-import org.apache.spark.unsafe.PlatformDependent;
+import org.apache.spark.unsafe.Platform;
import org.apache.spark.unsafe.memory.MemoryBlock;
import org.apache.spark.unsafe.memory.TaskMemoryManager;
import org.apache.spark.util.Utils;
@@ -427,14 +427,10 @@ public final class UnsafeExternalSorter {
final long recordAddress =
taskMemoryManager.encodePageNumberAndOffset(dataPage, dataPagePosition);
- PlatformDependent.UNSAFE.putInt(dataPageBaseObject, dataPagePosition, lengthInBytes);
+ Platform.putInt(dataPageBaseObject, dataPagePosition, lengthInBytes);
dataPagePosition += 4;
- PlatformDependent.copyMemory(
- recordBaseObject,
- recordBaseOffset,
- dataPageBaseObject,
- dataPagePosition,
- lengthInBytes);
+ Platform.copyMemory(
+ recordBaseObject, recordBaseOffset, dataPageBaseObject, dataPagePosition, lengthInBytes);
assert(inMemSorter != null);
inMemSorter.insertRecord(recordAddress, prefix);
}
@@ -493,18 +489,16 @@ public final class UnsafeExternalSorter {
final long recordAddress =
taskMemoryManager.encodePageNumberAndOffset(dataPage, dataPagePosition);
- PlatformDependent.UNSAFE.putInt(dataPageBaseObject, dataPagePosition, keyLen + valueLen + 4);
+ Platform.putInt(dataPageBaseObject, dataPagePosition, keyLen + valueLen + 4);
dataPagePosition += 4;
- PlatformDependent.UNSAFE.putInt(dataPageBaseObject, dataPagePosition, keyLen);
+ Platform.putInt(dataPageBaseObject, dataPagePosition, keyLen);
dataPagePosition += 4;
- PlatformDependent.copyMemory(
- keyBaseObj, keyOffset, dataPageBaseObject, dataPagePosition, keyLen);
+ Platform.copyMemory(keyBaseObj, keyOffset, dataPageBaseObject, dataPagePosition, keyLen);
dataPagePosition += keyLen;
- PlatformDependent.copyMemory(
- valueBaseObj, valueOffset, dataPageBaseObject, dataPagePosition, valueLen);
+ Platform.copyMemory(valueBaseObj, valueOffset, dataPageBaseObject, dataPagePosition, valueLen);
assert(inMemSorter != null);
inMemSorter.insertRecord(recordAddress, prefix);
diff --git a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeInMemorySorter.java b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeInMemorySorter.java
index 1e4b8a116e..f7787e1019 100644
--- a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeInMemorySorter.java
+++ b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeInMemorySorter.java
@@ -19,7 +19,7 @@ package org.apache.spark.util.collection.unsafe.sort;
import java.util.Comparator;
-import org.apache.spark.unsafe.PlatformDependent;
+import org.apache.spark.unsafe.Platform;
import org.apache.spark.util.collection.Sorter;
import org.apache.spark.unsafe.memory.TaskMemoryManager;
@@ -164,7 +164,7 @@ public final class UnsafeInMemorySorter {
final long recordPointer = sortBuffer[position];
baseObject = memoryManager.getPage(recordPointer);
baseOffset = memoryManager.getOffsetInPage(recordPointer) + 4; // Skip over record length
- recordLength = PlatformDependent.UNSAFE.getInt(baseObject, baseOffset - 4);
+ recordLength = Platform.getInt(baseObject, baseOffset - 4);
keyPrefix = sortBuffer[position + 1];
position += 2;
}
diff --git a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSorterSpillReader.java b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSorterSpillReader.java
index ca1ccedc93..4989b05d63 100644
--- a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSorterSpillReader.java
+++ b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSorterSpillReader.java
@@ -23,7 +23,7 @@ import com.google.common.io.ByteStreams;
import org.apache.spark.storage.BlockId;
import org.apache.spark.storage.BlockManager;
-import org.apache.spark.unsafe.PlatformDependent;
+import org.apache.spark.unsafe.Platform;
/**
* Reads spill files written by {@link UnsafeSorterSpillWriter} (see that class for a description
@@ -42,7 +42,7 @@ final class UnsafeSorterSpillReader extends UnsafeSorterIterator {
private byte[] arr = new byte[1024 * 1024];
private Object baseObject = arr;
- private final long baseOffset = PlatformDependent.BYTE_ARRAY_OFFSET;
+ private final long baseOffset = Platform.BYTE_ARRAY_OFFSET;
public UnsafeSorterSpillReader(
BlockManager blockManager,
diff --git a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSorterSpillWriter.java b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSorterSpillWriter.java
index 44cf6c756d..e59a84ff8d 100644
--- a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSorterSpillWriter.java
+++ b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSorterSpillWriter.java
@@ -28,7 +28,7 @@ import org.apache.spark.storage.BlockId;
import org.apache.spark.storage.BlockManager;
import org.apache.spark.storage.DiskBlockObjectWriter;
import org.apache.spark.storage.TempLocalBlockId;
-import org.apache.spark.unsafe.PlatformDependent;
+import org.apache.spark.unsafe.Platform;
/**
* Spills a list of sorted records to disk. Spill files have the following format:
@@ -117,11 +117,11 @@ final class UnsafeSorterSpillWriter {
long recordReadPosition = baseOffset;
while (dataRemaining > 0) {
final int toTransfer = Math.min(freeSpaceInWriteBuffer, dataRemaining);
- PlatformDependent.copyMemory(
+ Platform.copyMemory(
baseObject,
recordReadPosition,
writeBuffer,
- PlatformDependent.BYTE_ARRAY_OFFSET + (DISK_WRITE_BUFFER_SIZE - freeSpaceInWriteBuffer),
+ Platform.BYTE_ARRAY_OFFSET + (DISK_WRITE_BUFFER_SIZE - freeSpaceInWriteBuffer),
toTransfer);
writer.write(writeBuffer, 0, (DISK_WRITE_BUFFER_SIZE - freeSpaceInWriteBuffer) + toTransfer);
recordReadPosition += toTransfer;