aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala3
-rw-r--r--core/src/main/scala/org/apache/spark/storage/ShuffleBlockManager.scala7
-rw-r--r--core/src/main/scala/org/apache/spark/util/collection/PrimitiveVector.scala (renamed from core/src/main/scala/org/apache/spark/util/PrimitiveVector.scala)7
3 files changed, 9 insertions, 8 deletions
diff --git a/core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala b/core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala
index 4f9537d1c7..bde3d1f592 100644
--- a/core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala
+++ b/core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala
@@ -34,7 +34,8 @@ import org.apache.spark.util.Utils
*
* @param rootDirs The directories to use for storing block files. Data will be hashed among these.
*/
-private[spark] class DiskBlockManager(shuffleManager: ShuffleBlockManager, rootDirs: String) extends PathResolver with Logging {
+private[spark] class DiskBlockManager(shuffleManager: ShuffleBlockManager, rootDirs: String)
+ extends PathResolver with Logging {
private val MAX_DIR_CREATION_ATTEMPTS: Int = 10
private val subDirsPerLocalDir = System.getProperty("spark.diskStore.subDirectories", "64").toInt
diff --git a/core/src/main/scala/org/apache/spark/storage/ShuffleBlockManager.scala b/core/src/main/scala/org/apache/spark/storage/ShuffleBlockManager.scala
index c61febf830..d718c87cab 100644
--- a/core/src/main/scala/org/apache/spark/storage/ShuffleBlockManager.scala
+++ b/core/src/main/scala/org/apache/spark/storage/ShuffleBlockManager.scala
@@ -27,7 +27,8 @@ import scala.collection.mutable
import org.apache.spark.Logging
import org.apache.spark.serializer.Serializer
-import org.apache.spark.util.{MetadataCleanerType, MetadataCleaner, AGodDamnPrimitiveVector, TimeStampedHashMap}
+import org.apache.spark.util.{MetadataCleanerType, MetadataCleaner, TimeStampedHashMap}
+import org.apache.spark.util.collection.PrimitiveVector
private[spark]
class ShuffleWriterGroup(
@@ -203,7 +204,7 @@ class ShuffleBlockManager(blockManager: BlockManager) extends Logging {
*/
private[spark]
class ShuffleFileGroup(val shuffleId: Int, val fileId: Int, val files: Array[ShuffleFile]) {
- private val mapIds = new AGodDamnPrimitiveVector[Int]()
+ private val mapIds = new PrimitiveVector[Int]()
files.foreach(_.setShuffleFileGroup(this))
@@ -238,7 +239,7 @@ class ShuffleFile(val file: File) {
* Consecutive offsets of blocks into the file, ordered by position in the file.
* This ordering allows us to compute block lengths by examining the following block offset.
*/
- val blockOffsets = new AGodDamnPrimitiveVector[Long]()
+ val blockOffsets = new PrimitiveVector[Long]()
/** Back pointer to whichever ShuffleFileGroup this file is a part of. */
private var shuffleFileGroup : ShuffleFileGroup = _
diff --git a/core/src/main/scala/org/apache/spark/util/PrimitiveVector.scala b/core/src/main/scala/org/apache/spark/util/collection/PrimitiveVector.scala
index d316601b90..721f12b711 100644
--- a/core/src/main/scala/org/apache/spark/util/PrimitiveVector.scala
+++ b/core/src/main/scala/org/apache/spark/util/collection/PrimitiveVector.scala
@@ -15,12 +15,11 @@
* limitations under the License.
*/
-package org.apache.spark.util
+package org.apache.spark.util.collection
/** Provides a simple, non-threadsafe, array-backed vector that can store primitives. */
-class AGodDamnPrimitiveVector[@specialized(Long, Int, Double) V: ClassManifest]
- (initialSize: Int = 64)
-{
+private[spark]
+class PrimitiveVector[@specialized(Long, Int, Double) V: ClassManifest](initialSize: Int = 64) {
private var numElements = 0
private var array = new Array[V](initialSize)