From 92132d6efd281cbbd7dcfcb2853d4d9183370d45 Mon Sep 17 00:00:00 2001 From: Aleksandar Pokopec Date: Mon, 10 Jan 2011 10:47:20 +0000 Subject: Enabled parallel group by. No review. --- src/library/scala/collection/parallel/ParIterableLike.scala | 10 +++++----- src/library/scala/collection/parallel/ParSeqViewLike.scala | 2 +- .../src/scala/collection/parallel/Benchmarking.scala | 1 + test/files/scalacheck/parallel-collections/pc.scala | 2 +- 4 files changed, 8 insertions(+), 7 deletions(-) diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala index 9f894c0af8..190752a3ca 100644 --- a/src/library/scala/collection/parallel/ParIterableLike.scala +++ b/src/library/scala/collection/parallel/ParIterableLike.scala @@ -520,11 +520,11 @@ self => executeAndWaitResult(new Partition(pred, cbfactory, parallelIterator) mapResult { p => (p._1.result, p._2.result) }) } - // override def groupBy[K](f: T => K): immutable.ParMap[K, Repr] = { - // executeAndWaitResult(new GroupBy(f, () => HashMapCombiner[K, T], parallelIterator) mapResult { - // rcb => rcb.groupByKey(cbfactory) - // }) - // } + override def groupBy[K](f: T => K): immutable.ParMap[K, Repr] = { + executeAndWaitResult(new GroupBy(f, () => HashMapCombiner[K, T], parallelIterator) mapResult { + rcb => rcb.groupByKey(cbfactory) + }) + } override def take(n: Int): Repr = { val actualn = if (size > n) n else size diff --git a/src/library/scala/collection/parallel/ParSeqViewLike.scala b/src/library/scala/collection/parallel/ParSeqViewLike.scala index 1b5ae06c42..7b564a9ad4 100644 --- a/src/library/scala/collection/parallel/ParSeqViewLike.scala +++ b/src/library/scala/collection/parallel/ParSeqViewLike.scala @@ -160,7 +160,7 @@ self => override def collect[S, That](pf: PartialFunction[T, S])(implicit bf: CanBuildFrom[This, S, That]): That = filter(pf.isDefinedAt).map(pf)(bf) override def scanLeft[S, That](z: S)(op: (S, T) => S)(implicit bf: CanBuildFrom[This, S, That]): That = newForced(thisParSeq.scanLeft(z)(op)).asInstanceOf[That] override def scanRight[S, That](z: S)(op: (T, S) => S)(implicit bf: CanBuildFrom[This, S, That]): That = newForced(thisParSeq.scanRight(z)(op)).asInstanceOf[That] - override def groupBy[K](f: T => K): collection.immutable.Map[K, This] = thisParSeq.groupBy(f).mapValues(xs => newForced(xs).asInstanceOf[This]) + override def groupBy[K](f: T => K): immutable.ParMap[K, This] = thisParSeq.groupBy(f).map(kv => (kv._1, newForced(kv._2).asInstanceOf[This])) override def force[U >: T, That](implicit bf: CanBuildFrom[Coll, U, That]) = bf ifParallel { pbf => executeAndWaitResult(new Force(pbf, parallelIterator).mapResult(_.result).asInstanceOf[Task[That, _]]) } otherwise { diff --git a/test/benchmarks/src/scala/collection/parallel/Benchmarking.scala b/test/benchmarks/src/scala/collection/parallel/Benchmarking.scala index cbda3551e0..e413fb537e 100644 --- a/test/benchmarks/src/scala/collection/parallel/Benchmarking.scala +++ b/test/benchmarks/src/scala/collection/parallel/Benchmarking.scala @@ -131,6 +131,7 @@ trait BenchmarkRegister { // general examples register(misc.Coder) + register(misc.Loader) } diff --git a/test/files/scalacheck/parallel-collections/pc.scala b/test/files/scalacheck/parallel-collections/pc.scala index 598c5a3751..4be7b0ec4d 100644 --- a/test/files/scalacheck/parallel-collections/pc.scala +++ b/test/files/scalacheck/parallel-collections/pc.scala @@ -53,7 +53,7 @@ object Test { workers = 1, minSize = 0, maxSize = 4000, - minSuccessfulTests = 10 + minSuccessfulTests = 5 ), pc ) -- cgit v1.2.3