aboutsummaryrefslogtreecommitdiff
path: root/python/pyspark/shuffle.py
diff options
context:
space:
mode:
authorDavies Liu <davies.liu@gmail.com>2014-10-06 14:07:53 -0700
committerJosh Rosen <joshrosen@apache.org>2014-10-06 14:07:53 -0700
commit4f01265f7d62e070ba42c251255e385644c1b16c (patch)
treee6dbec031ebe0653ab232ac613548289c720eb48 /python/pyspark/shuffle.py
parent20ea54cc7a5176ebc63bfa9393a9bf84619bfc66 (diff)
downloadspark-4f01265f7d62e070ba42c251255e385644c1b16c.tar.gz
spark-4f01265f7d62e070ba42c251255e385644c1b16c.tar.bz2
spark-4f01265f7d62e070ba42c251255e385644c1b16c.zip
[SPARK-3786] [PySpark] speedup tests
This patch try to speed up tests of PySpark, re-use the SparkContext in tests.py and mllib/tests.py to reduce the overhead of create SparkContext, remove some test cases, which did not make sense. It also improve the performance of some cases, such as MergerTests and SortTests. before this patch: real 21m27.320s user 4m42.967s sys 0m17.343s after this patch: real 9m47.541s user 2m12.947s sys 0m14.543s It almost cut the time by half. Author: Davies Liu <davies.liu@gmail.com> Closes #2646 from davies/tests and squashes the following commits: c54de60 [Davies Liu] revert change about memory limit 6a2a4b0 [Davies Liu] refactor of tests, speedup 100%
Diffstat (limited to 'python/pyspark/shuffle.py')
-rw-r--r--python/pyspark/shuffle.py5
1 files changed, 2 insertions, 3 deletions
diff --git a/python/pyspark/shuffle.py b/python/pyspark/shuffle.py
index ce597cbe91..d57a802e47 100644
--- a/python/pyspark/shuffle.py
+++ b/python/pyspark/shuffle.py
@@ -396,7 +396,6 @@ class ExternalMerger(Merger):
for v in self.data.iteritems():
yield v
self.data.clear()
- gc.collect()
# remove the merged partition
for j in range(self.spills):
@@ -428,7 +427,7 @@ class ExternalMerger(Merger):
subdirs = [os.path.join(d, "parts", str(i))
for d in self.localdirs]
m = ExternalMerger(self.agg, self.memory_limit, self.serializer,
- subdirs, self.scale * self.partitions)
+ subdirs, self.scale * self.partitions, self.partitions)
m.pdata = [{} for _ in range(self.partitions)]
limit = self._next_limit()
@@ -486,7 +485,7 @@ class ExternalSorter(object):
goes above the limit.
"""
global MemoryBytesSpilled, DiskBytesSpilled
- batch = 10
+ batch = 100
chunks, current_chunk = [], []
iterator = iter(iterator)
while True: