summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAleksandar Pokopec <aleksandar.prokopec@epfl.ch>2010-07-26 16:31:47 +0000
committerAleksandar Pokopec <aleksandar.prokopec@epfl.ch>2010-07-26 16:31:47 +0000
commitc6cc8c72820a5d540b9bfa4a8dc52eb0923936f1 (patch)
tree965972aad5bbd243e9438ea9bbe73a1d5766135c
parent0ce0ad51284ba129a7553ea2ec98d1be45660706 (diff)
downloadscala-c6cc8c72820a5d540b9bfa4a8dc52eb0923936f1.tar.gz
scala-c6cc8c72820a5d540b9bfa4a8dc52eb0923936f1.tar.bz2
scala-c6cc8c72820a5d540b9bfa4a8dc52eb0923936f1.zip
Refactored benchmarks.
-rw-r--r--src/library/scala/collection/parallel/ParIterableLike.scala95
-rw-r--r--src/library/scala/collection/parallel/RemainsIterator.scala11
-rw-r--r--src/library/scala/collection/parallel/mutable/ParArray.scala42
-rw-r--r--test/benchmarks/src/scala/collection/parallel/Benchmarking.scala13
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/generic/ParallelBenches.scala28
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/ParallelHashTries.scala18
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/AggregateLight.scala2
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Companion.scala2
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Corresponds.scala2
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/DiffHalf.scala2
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/IntersectHalf.scala2
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MatrixMultiplication.scala4
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PadToDouble.scala2
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PatchHalf.scala2
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PlusPlus.scala4
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceLight.scala2
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Resettable.scala6
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SameElementsLong.scala2
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ScanLight.scala40
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SequentialOps.scala15
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_range/RangeBenches.scala22
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_view/SeqViewBenches.scala4
22 files changed, 260 insertions, 60 deletions
diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala
index 82e1b60e25..0769181150 100644
--- a/src/library/scala/collection/parallel/ParIterableLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableLike.scala
@@ -518,6 +518,29 @@ extends IterableLike[T, Repr]
executeAndWaitResult(new SplitAt(n, cbfactory, parallelIterator) mapResult { p => (p._1.result, p._2.result) })
}
+ /** Computes a prefix scan of the elements of the collection.
+ *
+ * Note: The neutral element `z` may be applied more than once.
+ *
+ * @tparam U element type of the resulting collection
+ * @tparam That type of the resulting collection
+ * @param z neutral element for the operator `op`
+ * @param op the associative operator for the scan
+ * @param cbf combiner factory which provides a combiner
+ * @return a collection containing the prefix scan of the elements in the original collection
+ *
+ * @usecase def scan(z: T)(op: (T, T) => T): $Coll[T]
+ *
+ * @return a new $coll containing the prefix scan of the elements in this $coll
+ */
+ def scan[U >: T, That](z: U)(op: (U, U) => U)(implicit cbf: CanCombineFrom[Repr, U, That]): That = {
+ val array = new Array[Any](size + 1)
+ array(0) = z
+ executeAndWaitResult(new ScanToArray[U, Any](z, op, 1, size, array, parallelIterator) mapResult { u =>
+ executeAndWaitResult(new FromArray(array, 0, size + 1, cbf) mapResult { _.result })
+ })
+ }
+
/** Takes the longest prefix of elements that satisfy the predicate.
*
* $indexsignalling
@@ -899,7 +922,7 @@ extends IterableLike[T, Repr]
extends Accessor[Unit, CopyToArray[U, This]] {
var result: Unit = ()
def leaf(prev: Option[Unit]) = pit.copyToArray(array, from, len)
- def newSubtask(p: ParIterator) = throw new UnsupportedOperationException
+ def newSubtask(p: ParIterator) = unsupported
override def split = {
val pits = pit.split
for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining); if untilp < len) yield {
@@ -909,6 +932,76 @@ extends IterableLike[T, Repr]
}
}
+ protected[this] class ScanToArray[U >: T, A >: U](z: U, op: (U, U) => U, val from: Int, val len: Int, array: Array[A], val pit: ParIterator)
+ extends Accessor[Boolean, ScanToArray[U, A]] {
+ var result: Boolean = false // whether it was prefix-scanned, because previous result was already available
+ def leaf(prev: Option[Boolean]) = if (prev.isDefined) { // use prev result as an optimisation
+ val lastelem = array(from - 1)
+ pit.scanToArray(lastelem.asInstanceOf[U], op, array, from)
+ result = true
+ } else pit.scanToArray(z, op, array, from)
+ def newSubtask(p: ParIterator) = unsupported
+ override def shouldSplitFurther = len > size / 2
+ override def split = {
+ val pits = pit.split
+ for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining); if untilp < len) yield {
+ val plen = p.remaining min (len - untilp)
+ new ScanToArray[U, A](z, op, from + untilp, plen, array, p)
+ }
+ }
+ override def merge(that: ScanToArray[U, A]) = if (!that.result) { // if previous result wasn't available when task was initiated
+ // apply the rightmost element of this array part to all the elements of `that`
+ executeAndWait(new ApplyToArray(array(that.from - 1).asInstanceOf[U], op, that.from, that.len, array))
+ }
+ }
+
+ protected[this] class ApplyToArray[U >: T, A >: U](elem: U, op: (U, U) => U, from: Int, len: Int, array: Array[A])
+ extends super.Task[Unit, ApplyToArray[U, A]] {
+ var result: Unit = ()
+ def leaf(prev: Option[Unit]) = {
+ var i = from
+ val until = from + len
+ while (i < until) {
+ array(i) = op(elem, array(i).asInstanceOf[U])
+ i += 1
+ }
+ }
+ def shouldSplitFurther = len > threshold(size, parallelismLevel)
+ def split = {
+ val fp = len / 2
+ val sp = len - fp
+ Seq(
+ new ApplyToArray(elem, op, from, fp, array),
+ new ApplyToArray(elem, op, from + fp, sp, array)
+ )
+ }
+ }
+
+ protected[this] class FromArray[S, A, That](array: Array[A], from: Int, len: Int, cbf: CanCombineFrom[Repr, S, That])
+ extends super.Task[Combiner[S, That], FromArray[S, A, That]] {
+ var result: Result = null
+ def leaf(prev: Option[Result]) = {
+ val cb = prev getOrElse cbf(self.repr)
+ var i = from
+ val until = from + len
+ while (i < until) {
+ cb += array(i).asInstanceOf[S]
+ i += 1
+ }
+ result = cb
+ }
+ def shouldSplitFurther = len > threshold(size, parallelismLevel)
+ def split = {
+ val fp = len / 2
+ val sp = len - fp
+ Seq(
+ new FromArray(array, from, fp, cbf),
+ new FromArray(array, from + fp, sp, cbf)
+ )
+ }
+ override def merge(that: FromArray[S, A, That]) = result = result combine that.result
+ }
+
}
diff --git a/src/library/scala/collection/parallel/RemainsIterator.scala b/src/library/scala/collection/parallel/RemainsIterator.scala
index bf8ae4a834..18878a3bba 100644
--- a/src/library/scala/collection/parallel/RemainsIterator.scala
+++ b/src/library/scala/collection/parallel/RemainsIterator.scala
@@ -209,6 +209,17 @@ trait AugmentedIterableIterator[+T, +Repr <: Parallel] extends RemainsIterator[T
while (hasNext) after += next
(before, after)
}
+
+ def scanToArray[U >: T, A >: U](z: U, op: (U, U) => U, array: Array[A], from: Int) {
+ var last = z
+ var i = from
+ while (hasNext) {
+ last = op(last, next)
+ array(i) = last
+ i += 1
+ }
+ }
+
}
diff --git a/src/library/scala/collection/parallel/mutable/ParArray.scala b/src/library/scala/collection/parallel/mutable/ParArray.scala
index 2443888465..da99db860b 100644
--- a/src/library/scala/collection/parallel/mutable/ParArray.scala
+++ b/src/library/scala/collection/parallel/mutable/ParArray.scala
@@ -510,6 +510,34 @@ extends ParSeq[T]
}
}
+ override def scanToArray[U >: T, A >: U](z: U, op: (U, U) => U, destarr: Array[A], from: Int) {
+ // var last = z
+ // var j = from
+ // var k = i
+ // val ntil = until
+ // val a = arr
+ // while (k < ntil) {
+ // last = op(last, a(k).asInstanceOf[U])
+ // destarr(j) = last
+ // k += 1
+ // }
+ // i = k
+ scanToArray_quick[U](array, destarr.asInstanceOf[Array[Any]], op, z, i, until, from)
+ i = until
+ }
+
+ protected def scanToArray_quick[U](srcarr: Array[Any], destarr: Array[Any], op: (U, U) => U, z: U, srcfrom: Int, srcntil: Int, destfrom: Int) {
+ var last = z
+ var j = srcfrom
+ var k = destfrom
+ while (j < srcntil) {
+ last = op(last, srcarr(j).asInstanceOf[U])
+ destarr(k) = last
+ j += 1
+ k += 1
+ }
+ }
+
}
/* operations */
@@ -517,7 +545,7 @@ extends ParSeq[T]
private def buildsArray[S, That](c: Builder[S, That]) = c.isInstanceOf[ParArrayCombiner[_]]
override def map[S, That](f: T => S)(implicit bf: CanBuildFrom[ParArray[T], S, That]) = if (buildsArray(bf(repr))) {
- // reserve array
+ // reserve an array
val targetarr = new Array[Any](length)
// fill it in parallel
@@ -527,6 +555,18 @@ extends ParSeq[T]
(new ParArray[S](new ExposedArraySeq[S](targetarr.asInstanceOf[Array[AnyRef]], length))).asInstanceOf[That]
} else super.map(f)(bf)
+ override def scan[U >: T, That](z: U)(op: (U, U) => U)(implicit cbf: CanCombineFrom[ParArray[T], U, That]): That = if (buildsArray(cbf(repr))) {
+ // reserve an array
+ val targetarr = new Array[Any](length + 1)
+ targetarr(0) = z
+
+ // do a parallel prefix scan
+ executeAndWait(new ScanToArray[U, Any](z, op, 1, size, targetarr, parallelIterator))
+
+ // wrap the array into a parallel array
+ (new ParArray[U](new ExposedArraySeq[U](targetarr.asInstanceOf[Array[AnyRef]], length + 1))).asInstanceOf[That]
+ } else super.scan(z)(op)(cbf)
+
/* tasks */
class Map[S](f: T => S, targetarr: Array[Any], offset: Int, howmany: Int) extends super.Task[Unit, Map[S]] {
diff --git a/test/benchmarks/src/scala/collection/parallel/Benchmarking.scala b/test/benchmarks/src/scala/collection/parallel/Benchmarking.scala
index 0054893b8a..89b5696f9d 100644
--- a/test/benchmarks/src/scala/collection/parallel/Benchmarking.scala
+++ b/test/benchmarks/src/scala/collection/parallel/Benchmarking.scala
@@ -64,6 +64,7 @@ trait BenchmarkRegister {
register(parallel_array.PatchHalf)
register(parallel_array.PadToDouble)
register(parallel_array.AggregateLight)
+ register(parallel_array.ScanLight)
register(parallel_array.MatrixMultiplication)
// parallel views
@@ -96,10 +97,10 @@ trait BenchmarkRegister {
register(hashtries.MultipleCombine)
// parallel hash trie benchmarks
- register(hashtries.RefParallelHashTrieBenches.Reduce)
- register(hashtries.RefParallelHashTrieBenches.ReduceMedium)
- register(hashtries.RefParallelHashTrieBenches.Map)
- register(hashtries.RefParallelHashTrieBenches.Map2)
+ register(hashtries.RefParHashTrieBenches.Reduce)
+ register(hashtries.RefParHashTrieBenches.ReduceMedium)
+ register(hashtries.RefParHashTrieBenches.Map)
+ register(hashtries.RefParHashTrieBenches.Map2)
}
@@ -110,8 +111,8 @@ object Benchmarking extends BenchmarkRegister {
def printHelp {
println("Must enter at least four arguments: <collection> <benchmark> <size of the collection> <type>")
- println(" Example: ParallelArray reduce-light 50000 par")
- println(" Example: ParallelArray -all 50000 par")
+ println(" Example: ParArray reduce-light 50000 par")
+ println(" Example: ParArray -all 50000 par")
println
println("General synthax: <collection> <benchmark> <size> <type> <parallelism-level>")
println(" <collection> - name of the collection to test, `-all` runs benchmarks for all collections")
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/generic/ParallelBenches.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/generic/ParallelBenches.scala
index 3db33ebaed..da5a2a63ca 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/generic/ParallelBenches.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/generic/ParallelBenches.scala
@@ -8,7 +8,7 @@ package generic
-trait ParallelIterableBench[T, Coll <: ParallelIterable[T]] extends collection.parallel.benchmarks.Bench {
+trait ParIterableBench[T, Coll <: ParIterable[T]] extends collection.parallel.benchmarks.Bench {
self =>
protected var seqcoll: Iterable[T] = null
@@ -31,7 +31,7 @@ self =>
def collectionName = self.nameOfCollection
}
- trait IterableBench extends ParallelIterableBench[T, Coll] {
+ trait IterableBench extends ParIterableBench[T, Coll] {
def nameOfCollection = self.nameOfCollection
def operators = self.operators
def createSequential(sz: Int, p: Int) = self.createSequential(size, parallelism)
@@ -51,7 +51,7 @@ self =>
}
-trait ParallelSeqBench[T, Coll <: ParallelSeq[T]] extends ParallelIterableBench[T, Coll] {
+trait ParSeqBench[T, Coll <: ParSeq[T]] extends ParIterableBench[T, Coll] {
self =>
def createSequential(sz: Int, p: Int): Seq[T]
@@ -60,7 +60,7 @@ trait ParallelSeqBench[T, Coll <: ParallelSeq[T]] extends ParallelIterableBench[
def collectionName = self.nameOfCollection
}
- trait SeqBench extends IterableBench with ParallelSeqBench[T, Coll] {
+ trait SeqBench extends IterableBench with ParSeqBench[T, Coll] {
override def createSequential(sz: Int, p: Int) = self.createSequential(size, parallelism)
}
@@ -80,7 +80,7 @@ trait NotBenchmark {
/**
* Standard benchmarks for collections.
*/
-trait StandardParallelIterableBench[T, Coll <: ParallelIterable[T]] extends ParallelIterableBench[T, Coll] {
+trait StandardParIterableBench[T, Coll <: ParIterable[T]] extends ParIterableBench[T, Coll] {
object Reduce extends IterableBenchCompanion {
override def defaultSize = 50000
@@ -89,7 +89,7 @@ trait StandardParallelIterableBench[T, Coll <: ParallelIterable[T]] extends Para
}
class Reduce(val size: Int, val parallelism: Int, val runWhat: String)
- extends IterableBench with StandardParallelIterableBench[T, Coll] {
+ extends IterableBench with StandardParIterableBench[T, Coll] {
def comparisonMap = collection.Map()
def runseq = this.seqcoll.reduceLeft(operators.reducer)
def runpar = this.parcoll.reduce(operators.reducer)
@@ -103,7 +103,7 @@ trait StandardParallelIterableBench[T, Coll <: ParallelIterable[T]] extends Para
}
class ReduceMedium(val size: Int, val parallelism: Int, val runWhat: String)
- extends IterableBench with StandardParallelIterableBench[T, Coll] {
+ extends IterableBench with StandardParIterableBench[T, Coll] {
def comparisonMap = collection.Map()
def runseq = this.seqcoll.reduceLeft(operators.mediumreducer)
def runpar = this.parcoll.reduce(operators.mediumreducer)
@@ -117,7 +117,7 @@ trait StandardParallelIterableBench[T, Coll <: ParallelIterable[T]] extends Para
}
class Map(val size: Int, val parallelism: Int, val runWhat: String)
- extends IterableBench with StandardParallelIterableBench[T, Coll] {
+ extends IterableBench with StandardParIterableBench[T, Coll] {
def comparisonMap = collection.Map()
def runseq = this.seqcoll.map(operators.mapper)
def runpar = this.parcoll.map(operators.mapper)
@@ -131,7 +131,7 @@ trait StandardParallelIterableBench[T, Coll <: ParallelIterable[T]] extends Para
/**
* Benchmarks for sequence views.
*/
-trait ParallelSeqViewBench[T, Coll <: ParallelSeqView[T, ParallelSeq[T], CollSeq], CollSeq] extends ParallelSeqBench[T, Coll] {
+trait ParSeqViewBench[T, Coll <: ParSeqView[T, ParSeq[T], CollSeq], CollSeq] extends ParSeqBench[T, Coll] {
object Reduce extends IterableBenchCompanion {
override def defaultSize = 50000
@@ -140,7 +140,7 @@ trait ParallelSeqViewBench[T, Coll <: ParallelSeqView[T, ParallelSeq[T], CollSeq
}
class Reduce(val size: Int, val parallelism: Int, val runWhat: String)
- extends SeqBench with ParallelSeqViewBench[T, Coll, CollSeq] {
+ extends SeqBench with ParSeqViewBench[T, Coll, CollSeq] {
def comparisonMap = collection.Map()
def runseq = this.seqcoll.reduceLeft(operators.reducer)
def runpar = this.parcoll.reduce(operators.reducer)
@@ -154,7 +154,7 @@ trait ParallelSeqViewBench[T, Coll <: ParallelSeqView[T, ParallelSeq[T], CollSeq
}
class MediumReduce(val size: Int, val parallelism: Int, val runWhat: String)
- extends SeqBench with ParallelSeqViewBench[T, Coll, CollSeq] {
+ extends SeqBench with ParSeqViewBench[T, Coll, CollSeq] {
def comparisonMap = collection.Map()
def runseq = this.seqcoll.reduceLeft(operators.mediumreducer)
def runpar = this.parcoll.reduce(operators.mediumreducer)
@@ -168,7 +168,7 @@ trait ParallelSeqViewBench[T, Coll <: ParallelSeqView[T, ParallelSeq[T], CollSeq
}
class ModifyThenReduce(val size: Int, val parallelism: Int, val runWhat: String)
- extends SeqBench with ParallelSeqViewBench[T, Coll, CollSeq] {
+ extends SeqBench with ParSeqViewBench[T, Coll, CollSeq] {
val toadd = createSequential(size, parallelism)
def comparisonMap = collection.Map()
def runseq = {
@@ -189,12 +189,12 @@ trait ParallelSeqViewBench[T, Coll <: ParallelSeqView[T, ParallelSeq[T], CollSeq
}
class ModifyThenForce(val size: Int, val parallelism: Int, val runWhat: String)
- extends SeqBench with ParallelSeqViewBench[T, Coll, CollSeq] {
+ extends SeqBench with ParSeqViewBench[T, Coll, CollSeq] {
val toadd = createSequential(size, parallelism)
def comparisonMap = collection.Map()
def runseq = (seqcoll ++ toadd).drop(size).map(operators.mapper).++(toadd).take(size)
def runpar = {
- val r: ParallelSeqView[T, ParallelSeq[T], Seq[T]] = (parcoll ++ toadd).drop(size).map(operators.mapper).++(toadd).take(size)
+ val r: ParSeqView[T, ParSeq[T], Seq[T]] = (parcoll ++ toadd).drop(size).map(operators.mapper).++(toadd).take(size)
r.force
}
def companion = ModifyThenForce
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/ParallelHashTries.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/ParallelHashTries.scala
index c617f69161..bec8ba6650 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/ParallelHashTries.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/ParallelHashTries.scala
@@ -3,19 +3,19 @@ package scala.collection.parallel.benchmarks.hashtries
-import scala.collection.parallel.benchmarks.generic.StandardParallelIterableBench
+import scala.collection.parallel.benchmarks.generic.StandardParIterableBench
import scala.collection.parallel.benchmarks.generic.NotBenchmark
import scala.collection.parallel.benchmarks.generic.Dummy
import scala.collection.parallel.benchmarks.generic.Operators
-import scala.collection.parallel.immutable.ParallelHashTrie
+import scala.collection.parallel.immutable.ParHashTrie
-trait ParallelHashTrieBenches[K, V] extends StandardParallelIterableBench[(K, V), ParallelHashTrie[K, V]] {
+trait ParHashTrieBenches[K, V] extends StandardParIterableBench[(K, V), ParHashTrie[K, V]] {
- def nameOfCollection = "ParallelHashTrie"
+ def nameOfCollection = "ParHashTrie"
def comparisonMap = collection.Map()
val forkJoinPool = new scala.concurrent.forkjoin.ForkJoinPool
@@ -26,7 +26,7 @@ trait ParallelHashTrieBenches[K, V] extends StandardParallelIterableBench[(K, V)
}
class Map2(val size: Int, val parallelism: Int, val runWhat: String)
- extends IterableBench with StandardParallelIterableBench[(K, V), ParallelHashTrie[K, V]] {
+ extends IterableBench with StandardParIterableBench[(K, V), ParHashTrie[K, V]] {
var result: Int = 0
def comparisonMap = collection.Map()
def runseq = {
@@ -35,13 +35,13 @@ trait ParallelHashTrieBenches[K, V] extends StandardParallelIterableBench[(K, V)
}
def runpar = {
result = this.parcoll.map(operators.mapper2).size
- //println(collection.parallel.immutable.ParallelHashTrie.totalcombines)
+ //println(collection.parallel.immutable.ParHashTrie.totalcombines)
//System.exit(1)
}
def companion = Map2
override def repetitionsPerRun = 50
override def printResults {
- println("Total combines: " + collection.parallel.immutable.ParallelHashTrie.totalcombines)
+ println("Total combines: " + collection.parallel.immutable.ParHashTrie.totalcombines)
println("Size of last result: " + result)
}
}
@@ -52,7 +52,7 @@ trait ParallelHashTrieBenches[K, V] extends StandardParallelIterableBench[(K, V)
-object RefParallelHashTrieBenches extends ParallelHashTrieBenches[Dummy, Dummy] with NotBenchmark {
+object RefParHashTrieBenches extends ParHashTrieBenches[Dummy, Dummy] with NotBenchmark {
type DPair = (Dummy, Dummy)
@@ -111,7 +111,7 @@ object RefParallelHashTrieBenches extends ParallelHashTrieBenches[Dummy, Dummy]
}
def createParallel(sz: Int, p: Int) = {
- var pht = new ParallelHashTrie[Dummy, Dummy]
+ var pht = new ParHashTrie[Dummy, Dummy]
for (i <- 0 until sz) pht += ((new Dummy(i), new Dummy(i)))
forkJoinPool.setParallelism(p)
pht.environment = forkJoinPool
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/AggregateLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/AggregateLight.scala
index 540c7550c7..446f4a6735 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/AggregateLight.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/AggregateLight.scala
@@ -2,7 +2,7 @@ package scala.collection.parallel.benchmarks.parallel_array
import scala.collection.parallel.benchmarks._
-import scala.collection.parallel.mutable.ParallelArray
+import scala.collection.parallel.mutable.ParArray
import extra166y.{ParallelArray => JSR166Array}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Companion.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Companion.scala
index 744351a39b..f5d6c75abb 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Companion.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Companion.scala
@@ -5,5 +5,5 @@ import scala.collection.parallel.benchmarks._
trait Companion extends BenchCompanion {
- def collectionName = "ParallelArray"
+ def collectionName = "ParArray"
}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Corresponds.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Corresponds.scala
index 2e461460a8..ae50958bdc 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Corresponds.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Corresponds.scala
@@ -14,7 +14,7 @@ with HavingResult[Boolean] {
override def repetitionsPerRun = 400
val same = {
- val p = new collection.parallel.mutable.ParallelArray[Cont](sz)
+ val p = new collection.parallel.mutable.ParArray[Cont](sz)
for (i <- 0 until sz) p(i) = what match {
case "seq" => arr(i).asInstanceOf[Cont]
case "par" => pa(i)
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/DiffHalf.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/DiffHalf.scala
index 3d4221d945..bc0ff3fe74 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/DiffHalf.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/DiffHalf.scala
@@ -14,7 +14,7 @@ with HavingResult[Int] {
override def repetitionsPerRun = 400
val similar = {
- val p = new collection.parallel.mutable.ParallelArray[Cont](sz)
+ val p = new collection.parallel.mutable.ParArray[Cont](sz)
for (i <- 0 until sz) p(i) = what match {
case "seq" => arr(i).asInstanceOf[Cont]
case "par" => pa(i)
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/IntersectHalf.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/IntersectHalf.scala
index 4d71bf8590..fa0b9f8baf 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/IntersectHalf.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/IntersectHalf.scala
@@ -14,7 +14,7 @@ with HavingResult[Int] {
override def repetitionsPerRun = 400
val similar = {
- val p = new collection.parallel.mutable.ParallelArray[Cont](sz)
+ val p = new collection.parallel.mutable.ParArray[Cont](sz)
for (i <- 0 until sz) p(i) = what match {
case "seq" => arr(i).asInstanceOf[Cont]
case "par" => pa(i)
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MatrixMultiplication.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MatrixMultiplication.scala
index a8bb5ea1ca..be49995589 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MatrixMultiplication.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MatrixMultiplication.scala
@@ -2,7 +2,7 @@ package scala.collection.parallel.benchmarks.parallel_array
-import collection.parallel.immutable.ParallelRange
+import collection.parallel.immutable.ParRange
object MatrixMultiplication extends Companion {
@@ -38,7 +38,7 @@ extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
}
def assignProduct(a: Matrix[T], b: Matrix[T]) = {
- val range = new ParallelRange(0, n * n, 1, false)
+ val range = new ParRange(0, n * n, 1, false)
range.environment = forkjoinpool
for (i <- range) this(i / n, i % n) = calcProduct(a, b, i / n, i % n);
}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PadToDouble.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PadToDouble.scala
index 3bbe99516b..26c5dd2114 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PadToDouble.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PadToDouble.scala
@@ -14,7 +14,7 @@ with HavingResult[Int] {
override def repetitionsPerRun = 400
val similar = {
- val p = new collection.parallel.mutable.ParallelArray[Cont](sz)
+ val p = new collection.parallel.mutable.ParArray[Cont](sz)
for (i <- 0 until sz) p(i) = what match {
case "seq" => arr(i).asInstanceOf[Cont]
case "par" => pa(i)
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PatchHalf.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PatchHalf.scala
index e1fc0f9f2a..73bbc5ee1e 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PatchHalf.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PatchHalf.scala
@@ -14,7 +14,7 @@ with HavingResult[Int] {
override def repetitionsPerRun = 400
val similar = {
- val p = new collection.parallel.mutable.ParallelArray[Cont](sz)
+ val p = new collection.parallel.mutable.ParArray[Cont](sz)
for (i <- 0 until sz) p(i) = what match {
case "seq" => arr(i).asInstanceOf[Cont]
case "par" => pa(i)
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PlusPlus.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PlusPlus.scala
index ca500281e3..f9d6e277a5 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PlusPlus.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PlusPlus.scala
@@ -1,7 +1,7 @@
package scala.collection.parallel.benchmarks.parallel_array
-import scala.collection.parallel.mutable.ParallelArray
+import scala.collection.parallel.mutable.ParArray
object PlusPlus extends Companion {
@@ -16,7 +16,7 @@ extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
def companion = PlusPlus
val thatarr = new Array[Cont](sz)
- val thatpa = new ParallelArray[Cont](sz)
+ val thatpa = new ParArray[Cont](sz)
def runpar = pa ++ thatpa
def runseq = arr ++ thatarr
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceLight.scala
index 890cbf5108..668590d463 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceLight.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceLight.scala
@@ -2,7 +2,7 @@ package scala.collection.parallel.benchmarks.parallel_array
import scala.collection.parallel.benchmarks._
-import scala.collection.parallel.mutable.ParallelArray
+import scala.collection.parallel.mutable.ParArray
import extra166y.{ParallelArray => JSR166Array}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Resettable.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Resettable.scala
index 83168ca979..68ceac2b53 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Resettable.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Resettable.scala
@@ -2,7 +2,7 @@ package scala.collection.parallel.benchmarks.parallel_array
import scala.collection.parallel.benchmarks._
-import scala.collection.parallel.mutable.ParallelArray
+import scala.collection.parallel.mutable.ParArray
import extra166y.{ParallelArray => JSR166Array}
@@ -67,7 +67,7 @@ extends Bench with SequentialOps[T] {
val papool = new jsr166y.ForkJoinPool(parallelism)
papool.setMaximumPoolSize(parallelism)
- var pa: ParallelArray[T] = null
+ var pa: ParArray[T] = null
var jsrarr: JSR166Array[T] = null
reset
@@ -76,7 +76,7 @@ extends Bench with SequentialOps[T] {
arr = arrcreator(size)
for (i <- 0 until size) arr(i) = elemcreator(i)
case "par" =>
- pa = new ParallelArray[T](size)
+ pa = new ParArray[T](size)
pa.environment = forkjoinpool
for (i <- 0 until size) pa(i) = elemcreator(i)
case "jsr" =>
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SameElementsLong.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SameElementsLong.scala
index 54f9519a1b..8ca0bedfde 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SameElementsLong.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SameElementsLong.scala
@@ -14,7 +14,7 @@ with HavingResult[Boolean] {
override def repetitionsPerRun = 400
val same = {
- val p = new collection.parallel.mutable.ParallelArray[Cont](sz)
+ val p = new collection.parallel.mutable.ParArray[Cont](sz)
for (i <- 0 until sz) p(i) = what match {
case "seq" => arr(i).asInstanceOf[Cont]
case "par" => pa(i)
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ScanLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ScanLight.scala
new file mode 100644
index 0000000000..0aa9a71cb8
--- /dev/null
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ScanLight.scala
@@ -0,0 +1,40 @@
+package scala.collection.parallel.benchmarks.parallel_array
+
+
+import scala.collection.parallel.benchmarks._
+import scala.collection.parallel.mutable.ParArray
+
+
+object ScanLight extends Companion {
+ def benchName = "scan-light";
+ def apply(sz: Int, parallelism: Int, what: String) = new ScanLight(sz, parallelism, what)
+ override def comparisons = List()
+ override def defaultSize = 40000
+
+ val op = (a: Cont, b: Cont) => {
+ val m = if (a.in < 0) 1 else 0
+ new Cont(a.in + b.in + m * (0 until 100).reduceLeft(_ + _))
+ }
+}
+
+
+class ScanLight(sz: Int, p: Int, what: String)
+extends Resettable[Cont](sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
+ def companion = AggregateLight
+ override def repetitionsPerRun = 10
+ override val runs = 10
+
+ def runpar = pa.scan(new Cont(0))(ScanLight.op)
+ def runseq = sequentialScan(new Cont(0), ScanLight.op, sz)
+ override def comparisonMap = collection.Map()
+}
+
+
+
+
+
+
+
+
+
+
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SequentialOps.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SequentialOps.scala
index 36e1d22d7e..18568ab7e9 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SequentialOps.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SequentialOps.scala
@@ -17,6 +17,21 @@ trait SequentialOps[T] {
sum
}
+ def sequentialScan(z: T, op: (T, T) => T, sz: Int) = {
+ var outarr = new Array[Any](sz + 1)
+ outarr(0) = z
+ var last = z
+ var i = 0
+ var j = 1
+ val until = sz
+ while (i < until) {
+ last = op(last, arr(i).asInstanceOf[T])
+ outarr(j) = last
+ i += 1
+ j += 1
+ }
+ }
+
def sequentialCount(pred: T => Boolean, sz: Int) = {
var i = 0
val until = sz
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_range/RangeBenches.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_range/RangeBenches.scala
index ccc0568b2b..8a01d668fb 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_range/RangeBenches.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_range/RangeBenches.scala
@@ -5,22 +5,22 @@ package scala.collection.parallel.benchmarks.parallel_range
import scala.collection.parallel.benchmarks.generic._
-import scala.collection.parallel.immutable.ParallelRange
+import scala.collection.parallel.immutable.ParRange
-object RangeBenches extends StandardParallelIterableBench[Int, ParallelRange] with NotBenchmark {
+object RangeBenches extends StandardParIterableBench[Int, ParRange] with NotBenchmark {
- def nameOfCollection = "ParallelRange"
+ def nameOfCollection = "ParRange"
def operators = new IntOperators {}
def comparisonMap = collection.Map()
val forkJoinPool = new scala.concurrent.forkjoin.ForkJoinPool
def createSequential(sz: Int, p: Int) = new collection.immutable.Range(0, sz, 1)
def createParallel(sz: Int, p: Int) = {
- val pr = new collection.parallel.immutable.ParallelRange(0, sz, 1, false)
+ val pr = new collection.parallel.immutable.ParRange(0, sz, 1, false)
forkJoinPool.setParallelism(p)
pr.environment = forkJoinPool
pr
@@ -33,7 +33,7 @@ object RangeBenches extends StandardParallelIterableBench[Int, ParallelRange] wi
}
class MapLight(val size: Int, val parallelism: Int, val runWhat: String)
- extends IterableBench with StandardParallelIterableBench[Int, ParallelRange] {
+ extends IterableBench with StandardParIterableBench[Int, ParRange] {
def calc(n: Int) = n % 2 + 1
def comparisonMap = collection.Map()
@@ -49,7 +49,7 @@ object RangeBenches extends StandardParallelIterableBench[Int, ParallelRange] wi
}
class MapMedium(val size: Int, val parallelism: Int, val runWhat: String)
- extends IterableBench with StandardParallelIterableBench[Int, ParallelRange] {
+ extends IterableBench with StandardParIterableBench[Int, ParRange] {
def calc(n: Int) = {
var i = 0
var sum = n
@@ -73,7 +73,7 @@ object RangeBenches extends StandardParallelIterableBench[Int, ParallelRange] wi
}
class ForeachModify(val size: Int, val parallelism: Int, val runWhat: String)
- extends IterableBench with StandardParallelIterableBench[Int, ParallelRange] {
+ extends IterableBench with StandardParIterableBench[Int, ParRange] {
val array = new Array[Int](size)
def modify(n: Int) = array(n) += 1
@@ -90,7 +90,7 @@ object RangeBenches extends StandardParallelIterableBench[Int, ParallelRange] wi
}
class ForeachModifyMedium(val size: Int, val parallelism: Int, val runWhat: String)
- extends IterableBench with StandardParallelIterableBench[Int, ParallelRange] {
+ extends IterableBench with StandardParIterableBench[Int, ParRange] {
val array = new Array[Int](size)
def modify(n: Int) = array(n) = {
var i = 0
@@ -115,7 +115,7 @@ object RangeBenches extends StandardParallelIterableBench[Int, ParallelRange] wi
}
class ForeachModifyHeavy(val size: Int, val parallelism: Int, val runWhat: String)
- extends IterableBench with StandardParallelIterableBench[Int, ParallelRange] {
+ extends IterableBench with StandardParIterableBench[Int, ParRange] {
val array = new Array[Int](size)
def modify(n: Int) = array(n) = collatz(10000 + array(n))
@@ -133,7 +133,7 @@ object RangeBenches extends StandardParallelIterableBench[Int, ParallelRange] wi
}
class ForeachAdd(val size: Int, val parallelism: Int, val runWhat: String)
- extends IterableBench with StandardParallelIterableBench[Int, ParallelRange] {
+ extends IterableBench with StandardParIterableBench[Int, ParRange] {
val cmap = new java.util.concurrent.ConcurrentHashMap[Int, Int]
val hmap = new java.util.HashMap[Int, Int]
@@ -157,7 +157,7 @@ object RangeBenches extends StandardParallelIterableBench[Int, ParallelRange] wi
}
class ForeachAddCollatz(val size: Int, val parallelism: Int, val runWhat: String)
- extends IterableBench with StandardParallelIterableBench[Int, ParallelRange] {
+ extends IterableBench with StandardParIterableBench[Int, ParRange] {
val cmap = new java.util.concurrent.ConcurrentHashMap[Int, Int]
val hmap = new java.util.HashMap[Int, Int]
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_view/SeqViewBenches.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_view/SeqViewBenches.scala
index f174dec7db..1328416b78 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_view/SeqViewBenches.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_view/SeqViewBenches.scala
@@ -16,7 +16,7 @@ import scala.collection.SeqView
trait DummyViewBenches
-extends ParallelSeqViewBench[Dummy, ParallelSeqView[Dummy, ParallelSeq[Dummy], Seq[Dummy]], Seq[Dummy]] {
+extends ParSeqViewBench[Dummy, ParSeqView[Dummy, ParSeq[Dummy], Seq[Dummy]], Seq[Dummy]] {
def nameOfCollection = "ParallelView"
def operators = DummyOperators
def comparisonMap = collection.Map()
@@ -27,7 +27,7 @@ extends ParallelSeqViewBench[Dummy, ParallelSeqView[Dummy, ParallelSeq[Dummy], S
s
}
def createParallel(sz: Int, p: Int) = {
- val pa = new collection.parallel.mutable.ParallelArray[Dummy](sz)
+ val pa = new collection.parallel.mutable.ParArray[Dummy](sz)
forkJoinPool.setParallelism(p)
for (i <- 0 until sz) pa(i) = new Dummy(i)
val v = pa.view