diff options
author | Idan Zalzberg <idanzalz@gmail.com> | 2015-01-25 11:28:05 -0800 |
---|---|---|
committer | Josh Rosen <joshrosen@databricks.com> | 2015-01-25 11:28:05 -0800 |
commit | 412a58e118ef083ea1d1d6daccd9c531852baf53 (patch) | |
tree | 5cc6cb2b62fb1394e4356eba308c74a6320c65e5 /core | |
parent | d22ca1e921d792e49600c4c484a846e739c340ca (diff) | |
download | spark-412a58e118ef083ea1d1d6daccd9c531852baf53.tar.gz spark-412a58e118ef083ea1d1d6daccd9c531852baf53.tar.bz2 spark-412a58e118ef083ea1d1d6daccd9c531852baf53.zip |
Add comment about defaultMinPartitions
Added a comment about using math.min for choosing default partition count
Author: Idan Zalzberg <idanzalz@gmail.com>
Closes #4102 from idanz/patch-2 and squashes the following commits:
50e9d58 [Idan Zalzberg] Update SparkContext.scala
Diffstat (limited to 'core')
-rw-r--r-- | core/src/main/scala/org/apache/spark/SparkContext.scala | 6 |
1 files changed, 5 insertions, 1 deletions
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala index 8175d175b1..4c4ee04cc5 100644 --- a/core/src/main/scala/org/apache/spark/SparkContext.scala +++ b/core/src/main/scala/org/apache/spark/SparkContext.scala @@ -1514,7 +1514,11 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli @deprecated("use defaultMinPartitions", "1.0.0") def defaultMinSplits: Int = math.min(defaultParallelism, 2) - /** Default min number of partitions for Hadoop RDDs when not given by user */ + /** + * Default min number of partitions for Hadoop RDDs when not given by user + * Notice that we use math.min so the "defaultMinPartitions" cannot be higher than 2. + * The reasons for this are discussed in https://github.com/mesos/spark/pull/718 + */ def defaultMinPartitions: Int = math.min(defaultParallelism, 2) private val nextShuffleId = new AtomicInteger(0) |