aboutsummaryrefslogtreecommitdiff
path: root/core/src/main/scala
diff options
context:
space:
mode:
authorReynold Xin <rxin@apache.org>2014-07-15 18:47:39 -0700
committerReynold Xin <rxin@apache.org>2014-07-15 18:47:39 -0700
commit4576d80a5155c9fbfebe9c36cca06c208bca5bd3 (patch)
treecdda518b057bb1e0c8770d737fcac25f4b8f1c78 /core/src/main/scala
parentc2048a5165b270f5baf2003fdfef7bc6c5875715 (diff)
downloadspark-4576d80a5155c9fbfebe9c36cca06c208bca5bd3.tar.gz
spark-4576d80a5155c9fbfebe9c36cca06c208bca5bd3.tar.bz2
spark-4576d80a5155c9fbfebe9c36cca06c208bca5bd3.zip
[SPARK-2469] Use Snappy (instead of LZF) for default shuffle compression codec
This reduces shuffle compression memory usage by 3x. Author: Reynold Xin <rxin@apache.org> Closes #1415 from rxin/snappy and squashes the following commits: 06c1a01 [Reynold Xin] SPARK-2469: Use Snappy (instead of LZF) for default shuffle compression codec.
Diffstat (limited to 'core/src/main/scala')
-rw-r--r--core/src/main/scala/org/apache/spark/io/CompressionCodec.scala4
1 files changed, 2 insertions, 2 deletions
diff --git a/core/src/main/scala/org/apache/spark/io/CompressionCodec.scala b/core/src/main/scala/org/apache/spark/io/CompressionCodec.scala
index 33402c927c..1b66218d86 100644
--- a/core/src/main/scala/org/apache/spark/io/CompressionCodec.scala
+++ b/core/src/main/scala/org/apache/spark/io/CompressionCodec.scala
@@ -56,7 +56,7 @@ private[spark] object CompressionCodec {
ctor.newInstance(conf).asInstanceOf[CompressionCodec]
}
- val DEFAULT_COMPRESSION_CODEC = classOf[LZFCompressionCodec].getName
+ val DEFAULT_COMPRESSION_CODEC = classOf[SnappyCompressionCodec].getName
}
@@ -103,7 +103,7 @@ class LZFCompressionCodec(conf: SparkConf) extends CompressionCodec {
/**
* :: DeveloperApi ::
* Snappy implementation of [[org.apache.spark.io.CompressionCodec]].
- * Block size can be configured by spark.io.compression.snappy.block.size.
+ * Block size can be configured by `spark.io.compression.snappy.block.size`.
*
* Note: The wire protocol for this codec is not guaranteed to be compatible across versions
* of Spark. This is intended for use as an internal compression utility within a single Spark