aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorJosh Rosen <joshrosen@databricks.com>2015-09-22 11:50:22 -0700
committerReynold Xin <rxin@databricks.com>2015-09-22 11:50:22 -0700
commit1ca5e2e0b8d8d406c02a74c76ae9d7fc5637c8d3 (patch)
treeb6c3ee6d3601b9bd510efd3d58a50299f244cca8 /core
parent22d40159e60dd27a428e4051ef607292cbffbff3 (diff)
downloadspark-1ca5e2e0b8d8d406c02a74c76ae9d7fc5637c8d3.tar.gz
spark-1ca5e2e0b8d8d406c02a74c76ae9d7fc5637c8d3.tar.bz2
spark-1ca5e2e0b8d8d406c02a74c76ae9d7fc5637c8d3.zip
[SPARK-10704] Rename HashShuffleReader to BlockStoreShuffleReader
The current shuffle code has an interface named ShuffleReader with only one implementation, HashShuffleReader. This naming is confusing, since the same read path code is used for both sort- and hash-based shuffle. This patch addresses this by renaming HashShuffleReader to BlockStoreShuffleReader. Author: Josh Rosen <joshrosen@databricks.com> Closes #8825 from JoshRosen/shuffle-reader-cleanup.
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/org/apache/spark/shuffle/BlockStoreShuffleReader.scala (renamed from core/src/main/scala/org/apache/spark/shuffle/hash/HashShuffleReader.scala)5
-rw-r--r--core/src/main/scala/org/apache/spark/shuffle/hash/HashShuffleManager.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/shuffle/sort/SortShuffleManager.scala3
-rw-r--r--core/src/test/scala/org/apache/spark/shuffle/BlockStoreShuffleReaderSuite.scala (renamed from core/src/test/scala/org/apache/spark/shuffle/hash/HashShuffleReaderSuite.scala)7
4 files changed, 7 insertions, 10 deletions
diff --git a/core/src/main/scala/org/apache/spark/shuffle/hash/HashShuffleReader.scala b/core/src/main/scala/org/apache/spark/shuffle/BlockStoreShuffleReader.scala
index 0c8f08f0f3..6dc9a16e58 100644
--- a/core/src/main/scala/org/apache/spark/shuffle/hash/HashShuffleReader.scala
+++ b/core/src/main/scala/org/apache/spark/shuffle/BlockStoreShuffleReader.scala
@@ -15,16 +15,15 @@
* limitations under the License.
*/
-package org.apache.spark.shuffle.hash
+package org.apache.spark.shuffle
import org.apache.spark._
import org.apache.spark.serializer.Serializer
-import org.apache.spark.shuffle.{BaseShuffleHandle, ShuffleReader}
import org.apache.spark.storage.{BlockManager, ShuffleBlockFetcherIterator}
import org.apache.spark.util.CompletionIterator
import org.apache.spark.util.collection.ExternalSorter
-private[spark] class HashShuffleReader[K, C](
+private[spark] class BlockStoreShuffleReader[K, C](
handle: BaseShuffleHandle[K, _, C],
startPartition: Int,
endPartition: Int,
diff --git a/core/src/main/scala/org/apache/spark/shuffle/hash/HashShuffleManager.scala b/core/src/main/scala/org/apache/spark/shuffle/hash/HashShuffleManager.scala
index 0b46634b8b..d2e2fc4c11 100644
--- a/core/src/main/scala/org/apache/spark/shuffle/hash/HashShuffleManager.scala
+++ b/core/src/main/scala/org/apache/spark/shuffle/hash/HashShuffleManager.scala
@@ -51,7 +51,7 @@ private[spark] class HashShuffleManager(conf: SparkConf) extends ShuffleManager
startPartition: Int,
endPartition: Int,
context: TaskContext): ShuffleReader[K, C] = {
- new HashShuffleReader(
+ new BlockStoreShuffleReader(
handle.asInstanceOf[BaseShuffleHandle[K, _, C]], startPartition, endPartition, context)
}
diff --git a/core/src/main/scala/org/apache/spark/shuffle/sort/SortShuffleManager.scala b/core/src/main/scala/org/apache/spark/shuffle/sort/SortShuffleManager.scala
index 476cc1f303..9df4e55166 100644
--- a/core/src/main/scala/org/apache/spark/shuffle/sort/SortShuffleManager.scala
+++ b/core/src/main/scala/org/apache/spark/shuffle/sort/SortShuffleManager.scala
@@ -21,7 +21,6 @@ import java.util.concurrent.ConcurrentHashMap
import org.apache.spark.{Logging, SparkConf, TaskContext, ShuffleDependency}
import org.apache.spark.shuffle._
-import org.apache.spark.shuffle.hash.HashShuffleReader
private[spark] class SortShuffleManager(conf: SparkConf) extends ShuffleManager with Logging {
@@ -54,7 +53,7 @@ private[spark] class SortShuffleManager(conf: SparkConf) extends ShuffleManager
endPartition: Int,
context: TaskContext): ShuffleReader[K, C] = {
// We currently use the same block store shuffle fetcher as the hash-based shuffle.
- new HashShuffleReader(
+ new BlockStoreShuffleReader(
handle.asInstanceOf[BaseShuffleHandle[K, _, C]], startPartition, endPartition, context)
}
diff --git a/core/src/test/scala/org/apache/spark/shuffle/hash/HashShuffleReaderSuite.scala b/core/src/test/scala/org/apache/spark/shuffle/BlockStoreShuffleReaderSuite.scala
index 05b3afef5b..a5eafb1b55 100644
--- a/core/src/test/scala/org/apache/spark/shuffle/hash/HashShuffleReaderSuite.scala
+++ b/core/src/test/scala/org/apache/spark/shuffle/BlockStoreShuffleReaderSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package org.apache.spark.shuffle.hash
+package org.apache.spark.shuffle
import java.io.{ByteArrayOutputStream, InputStream}
import java.nio.ByteBuffer
@@ -28,7 +28,6 @@ import org.mockito.stubbing.Answer
import org.apache.spark._
import org.apache.spark.network.buffer.{ManagedBuffer, NioManagedBuffer}
import org.apache.spark.serializer.JavaSerializer
-import org.apache.spark.shuffle.BaseShuffleHandle
import org.apache.spark.storage.{BlockManager, BlockManagerId, ShuffleBlockId}
/**
@@ -56,7 +55,7 @@ class RecordingManagedBuffer(underlyingBuffer: NioManagedBuffer) extends Managed
}
}
-class HashShuffleReaderSuite extends SparkFunSuite with LocalSparkContext {
+class BlockStoreShuffleReaderSuite extends SparkFunSuite with LocalSparkContext {
/**
* This test makes sure that, when data is read from a HashShuffleReader, the underlying
@@ -134,7 +133,7 @@ class HashShuffleReaderSuite extends SparkFunSuite with LocalSparkContext {
new BaseShuffleHandle(shuffleId, numMaps, dependency)
}
- val shuffleReader = new HashShuffleReader(
+ val shuffleReader = new BlockStoreShuffleReader(
shuffleHandle,
reduceId,
reduceId + 1,