aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--core/src/main/scala/org/apache/spark/storage/DiskStore.scala3
-rw-r--r--docs/configuration.md2
2 files changed, 3 insertions, 2 deletions
diff --git a/core/src/main/scala/org/apache/spark/storage/DiskStore.scala b/core/src/main/scala/org/apache/spark/storage/DiskStore.scala
index 8dadf67940..61ef5ff168 100644
--- a/core/src/main/scala/org/apache/spark/storage/DiskStore.scala
+++ b/core/src/main/scala/org/apache/spark/storage/DiskStore.scala
@@ -31,7 +31,8 @@ import org.apache.spark.util.Utils
private[spark] class DiskStore(blockManager: BlockManager, diskManager: DiskBlockManager)
extends BlockStore(blockManager) with Logging {
- val minMemoryMapBytes = blockManager.conf.getLong("spark.storage.memoryMapThreshold", 2 * 4096L)
+ val minMemoryMapBytes = blockManager.conf.getLong(
+ "spark.storage.memoryMapThreshold", 2 * 1024L * 1024L)
override def getSize(blockId: BlockId): Long = {
diskManager.getFile(blockId.name).length
diff --git a/docs/configuration.md b/docs/configuration.md
index 2add48569b..f292bfbb7d 100644
--- a/docs/configuration.md
+++ b/docs/configuration.md
@@ -678,7 +678,7 @@ Apart from these, the following properties are also available, and may be useful
</tr>
<tr>
<td><code>spark.storage.memoryMapThreshold</code></td>
- <td>8192</td>
+ <td>2097152</td>
<td>
Size of a block, in bytes, above which Spark memory maps when reading a block from disk.
This prevents Spark from memory mapping very small blocks. In general, memory