summaryrefslogtreecommitdiff
path: root/test/benchmarks
diff options
context:
space:
mode:
authorHubert Plociniczak <hubert.plociniczak@epfl.ch>2011-11-02 14:34:35 +0000
committerHubert Plociniczak <hubert.plociniczak@epfl.ch>2011-11-02 14:34:35 +0000
commitb6778be91900b8161e705dc2598ef7af86842b0b (patch)
treed15e8ec18a37eec212f50f1ace27714d7e7d4d34 /test/benchmarks
parentac6c76f26d884a94d0c9ff54f055d3f9ab750bac (diff)
downloadscala-b6778be91900b8161e705dc2598ef7af86842b0b.tar.gz
scala-b6778be91900b8161e705dc2598ef7af86842b0b.tar.bz2
scala-b6778be91900b8161e705dc2598ef7af86842b0b.zip
Begone t1737...
Diffstat (limited to 'test/benchmarks')
-rw-r--r--test/benchmarks/src/scala/collection/mutable/hashtable-bench.scala16
-rw-r--r--test/benchmarks/src/scala/collection/parallel/Benchmarking.scala38
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/Bench.scala46
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/Arrays.scala10
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/IntAccess.scala14
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/ObjectAccess.scala14
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/Resetting.scala6
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/UnknownManif.scala4
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/generic/Operators.scala8
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/generic/ParallelBenches.scala100
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/hashtables/ParallelHashTableSets.scala34
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/hashtables/ParallelHashTables.scala42
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Combine.scala2
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Construct.scala4
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/IntInit.scala2
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/MultipleCombine.scala6
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/ParallelHashTries.scala26
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/misc/Coder.scala72
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/misc/Loader.scala24
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/AggregateLight.scala4
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CopyToArray.scala2
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Corresponds.scala6
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CountHeavy.scala6
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CountLight.scala2
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CountList.scala6
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/DiffHalf.scala6
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/DropMany.scala2
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ExistsLight.scala4
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/FilterLight.scala6
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/FindLight.scala4
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/FlatMapLight.scala4
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallHeavy.scala6
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallLight.scala4
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallQuickStop.scala4
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallStop80k.scala4
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForeachHeavy.scala10
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForeachLight.scala4
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/GroupBy.scala6
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/IndexWhere.scala4
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/IntersectHalf.scala6
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/LastIndexWhere.scala4
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MapLight.scala4
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MatrixMultiplication.scala22
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MinLight.scala2
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PadToDouble.scala6
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PartialMapLight.scala4
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PartitionLight.scala6
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PatchHalf.scala4
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PlusPlus.scala4
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceHeavy.scala2
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceLight.scala10
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceList.scala2
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceNew.scala4
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReducePrime.scala6
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/RemoveDuplicates.scala2
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Resettable.scala32
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Reverse.scala2
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReverseMap.scala4
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SameElementsLong.scala4
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ScanLight.scala4
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ScanMedium.scala4
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SegmentLength.scala4
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SequentialOps.scala142
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SliceFew.scala2
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SliceMany.scala2
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SliceMedium.scala2
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SpanLight.scala6
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SplitHalf.scala2
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SumLight.scala2
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/TakeMany.scala2
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/TakeWhileLight.scala6
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_range/RangeBenches.scala54
-rw-r--r--test/benchmarks/src/scala/util/HashSpeedTest.scala30
73 files changed, 477 insertions, 477 deletions
diff --git a/test/benchmarks/src/scala/collection/mutable/hashtable-bench.scala b/test/benchmarks/src/scala/collection/mutable/hashtable-bench.scala
index bea1b1df46..c01e7cb46e 100644
--- a/test/benchmarks/src/scala/collection/mutable/hashtable-bench.scala
+++ b/test/benchmarks/src/scala/collection/mutable/hashtable-bench.scala
@@ -16,20 +16,20 @@ object Test {
def go(xs: Iterable[Int], reps: Int) = {
_foreach = 0L
- _iterator = 0L
+ _iterator = 0L
0 until reps foreach { _ =>
_foreach += timed(xs foreach (dummy += _))
_iterator += timed(xs.iterator foreach (dummy += _))
}
-
+
" foreach avg " + (_foreach / reps) + "\n iterator avg " + (_iterator / reps) + "\n"
}
-
+
def go2(xs: collection.Map[Int, Int], reps: Int) = {
_foreach = 0L
- _iterator = 0L
-
+ _iterator = 0L
+
def incDummy(nums: (Int, Int)) = {
dummy += nums._1
dummy -= nums._2
@@ -47,15 +47,15 @@ object Test {
println("map.keys:")
go(map.keys, 10) // warm
println(go(map.keys, 10))
-
+
println("map.values:")
go(map.values, 10) // warm
println(go(map.values, 10))
-
+
println("map:")
go2(map, 10) // warm
println(go2(map, 10))
-
+
println("// pay me no mind ... " + dummy)
}
}
diff --git a/test/benchmarks/src/scala/collection/parallel/Benchmarking.scala b/test/benchmarks/src/scala/collection/parallel/Benchmarking.scala
index c3811601e3..bd75764636 100644
--- a/test/benchmarks/src/scala/collection/parallel/Benchmarking.scala
+++ b/test/benchmarks/src/scala/collection/parallel/Benchmarking.scala
@@ -8,15 +8,15 @@ import benchmarks._
/**
* All benchmarks are registered here.
- *
+ *
* @author prokopec
*/
trait BenchmarkRegister {
-
+
val benchcreators = LinkedHashSet[BenchCompanion]()
-
+
def register(companion: BenchCompanion) = benchcreators += companion
-
+
// parallel array benchmarks
register(parallel_array.ReduceLight)
register(parallel_array.ReduceNew)
@@ -68,7 +68,7 @@ trait BenchmarkRegister {
register(parallel_array.ScanMedium)
register(parallel_array.GroupByLight)
register(parallel_array.MatrixMultiplication)
-
+
// parallel views
register(parallel_view.DummyViewBenchList.Reduce)
register(parallel_view.DummyViewBenchList.MediumReduce)
@@ -80,7 +80,7 @@ trait BenchmarkRegister {
register(parallel_view.DummyViewBenchList.IterationA)
register(parallel_view.DummyViewBenchList.IterationZ)
register(parallel_view.DummyViewBenchList.IterationP)
-
+
// parallel ranges
register(parallel_range.RangeBenches.Reduce)
register(parallel_range.RangeBenches.ReduceMedium)
@@ -91,11 +91,11 @@ trait BenchmarkRegister {
register(parallel_range.RangeBenches.ForeachModifyHeavy)
register(parallel_range.RangeBenches.MapLight)
register(parallel_range.RangeBenches.MapMedium)
-
+
// array benchmarks
register(arrays.ObjectAccess)
register(arrays.IntAccess)
-
+
// hash benchmarks
register(hashtries.Foreach)
register(hashtries.Iterate)
@@ -103,14 +103,14 @@ trait BenchmarkRegister {
register(hashtries.Lookup)
register(hashtries.Combine)
register(hashtries.MultipleCombine)
-
+
// parallel hash trie benchmarks
register(hashtries.RefParHashTrieBenches.Reduce)
register(hashtries.RefParHashTrieBenches.ReduceMedium)
register(hashtries.RefParHashTrieBenches.Reduce2)
register(hashtries.RefParHashTrieBenches.Map)
register(hashtries.RefParHashTrieBenches.Map2)
-
+
// parallel hash table map benchmarks
register(hashtables.RefParHashTableBenches.Reduce)
register(hashtables.RefParHashTableBenches.Reduce2)
@@ -133,7 +133,7 @@ trait BenchmarkRegister {
register(hashtables.RefParHashTableSetBenches.HeavyMap)
register(hashtables.RefParHashTableSetBenches.Filter)
register(hashtables.RefParHashTableSetBenches.FlatMap)
-
+
// general examples
register(misc.Coder)
register(misc.Loader)
@@ -144,7 +144,7 @@ trait BenchmarkRegister {
* Serves as an entrypoint to run all the benchmarks.
*/
object Benchmarking extends BenchmarkRegister {
-
+
def printHelp {
println("Must enter at least four arguments: <collection> <benchmark> <size of the collection> <type>")
println(" Example: ParArray reduce-light 50000 par")
@@ -160,7 +160,7 @@ object Benchmarking extends BenchmarkRegister {
println(" `-all` for running sequential, parallel and comparison benchmarks")
println(" <parallelism-level> - the level of parallelism used (default 2)")
}
-
+
def otherOptions(args: Array[String]) {
if (args.length == 0) printHelp
else args(0) match {
@@ -169,19 +169,19 @@ object Benchmarking extends BenchmarkRegister {
case _ => printHelp
}
}
-
+
def main(args: Array[String]) {
if (args.length < 4) {
otherOptions(args)
return
}
-
+
val collname = args(0)
val benchname = args(1)
val size = if (args(2) == "-default") -1 else args(2).toInt
val tpe = args(3)
val parlevel = if (args.length >= 5) args(4).toInt else 2
-
+
// find all benchmarks to run
val benches = benchcreators.filter(comp => {
(collname, benchname) match {
@@ -193,14 +193,14 @@ object Benchmarking extends BenchmarkRegister {
}).flatMap(comp => {
val collsz = if (size != -1) size else comp.defaultSize
if (tpe != "-all") List(comp.apply(collsz, parlevel, tpe))
- else for (benchtype <- "seq" :: "par" :: comp.comparisons)
+ else for (benchtype <- "seq" :: "par" :: comp.comparisons)
yield comp.apply(collsz, parlevel, benchtype)
})
-
+
println("Running benchmarks...")
for (b <- benches) b.executeBenchmark
}
-
+
}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/Bench.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/Bench.scala
index 29a34e513d..e8dfe0ac50 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/Bench.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/Bench.scala
@@ -22,88 +22,88 @@ trait BenchCompanion {
*/
trait Bench extends Benchmark {
val size: Int
-
+
val parallelism: Int
-
+
val runWhat: String
-
+
/**
* Name of the benchmark. Convention is for it to start with the name of the collection being
- * tested, continuing '.' and ending with the name of the specific functionality being benchmarked.
+ * tested, continuing '.' and ending with the name of the specific functionality being benchmarked.
* @return
*/
def name: String = companion.fullname
def collectionName: String = companion.collectionName
def benchName: String = companion.benchName
-
+
def companion: BenchCompanion
-
+
def runseq: Unit
-
+
def runpar: Unit
-
+
/**
* Describes the number of runs of the test.
*/
val runs = 10
-
+
/**
* Returns the number of repetitions for this benchmark.
*/
def repetitionsPerRun = 500
-
+
/**
* Resets the benchmark object. Typically, this means recreating
* the collection being tested.
*/
def reset: Unit
-
+
/**
* Returns a map of available comparison tests.
*/
def comparisons: List[String] = companion.comparisons
-
+
def comparison(name: String): Option[() => Unit] = comparisonMap.get(name)
-
+
def comparisonMap: Map[String, () => Unit]
-
+
def run = runWhat match {
case "seq" => for (i <- 0 until repetitionsPerRun) runseq
case "par" => for (i <- 0 until repetitionsPerRun) runpar
case _ => comparison(runWhat) match {
case Some(fun) => for (i <- 0 until repetitionsPerRun) fun()
- case None => throw new IllegalArgumentException("Unknown bench option: `" + runWhat +
+ case None => throw new IllegalArgumentException("Unknown bench option: `" + runWhat +
"`, need `seq`, `par` or one of: " + comparisons.mkString("`", "`, `", "`"))
}
}
-
+
/**
* Prints results of the benchmark. May be overidden in benchmarks.
*/
def printResults {}
-
+
def onEnd {}
-
+
def executeBenchmark = {
println("-----------------------")
print(name + ", " + runWhat + ", par.=" + parallelism + ", sz=" + niceSize + ": ")
-
+
val times = runBenchmark(runs)
-
+
onEnd
-
+
for (t <- times) print(t + " ")
println
printResults
}
-
+
private def niceSize = if (size < 1000 || size % 1000 != 0) size.toString else size / 1000 + "k"
}
trait HavingResult[T] extends Bench {
var runresult: T = null.asInstanceOf[T]
-
+
abstract override def printResults {
println("result: " + (if (runresult != null) runresult else "<not set>"))
super.printResults
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/Arrays.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/Arrays.scala
index fd3b4aab08..39232122a9 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/Arrays.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/Arrays.scala
@@ -7,12 +7,12 @@ package scala.collection.parallel.benchmarks.arrays
object Arrays {
-
+
@inline def genericApply[T](xs: Array[T], idx: Int): T = xs.asInstanceOf[AnyRef] match {
case x: Array[AnyRef] => x(idx).asInstanceOf[T]
case _ => genericApplyNotAnyRef(xs, idx)
}
-
+
@noinline private def genericApplyNotAnyRef[T](xs: Array[T], idx: Int): T = xs.asInstanceOf[AnyRef] match {
case x: Array[Int] => x(idx).asInstanceOf[T]
case x: Array[Double] => x(idx).asInstanceOf[T]
@@ -25,12 +25,12 @@ object Arrays {
case x: Array[Unit] => x(idx).asInstanceOf[T]
case null => throw new NullPointerException
}
-
+
@inline def apply(xs: AnyRef, idx: Int): Any = xs match {
case x: Array[AnyRef] => x(idx).asInstanceOf[Any]
case _ => applyNotAnyRef(xs, idx)
}
-
+
@noinline private def applyNotAnyRef(xs: AnyRef, idx: Int): Any = xs match {
case x: Array[Int] => x(idx).asInstanceOf[Any]
case x: Array[Double] => x(idx).asInstanceOf[Any]
@@ -43,7 +43,7 @@ object Arrays {
case x: Array[Unit] => x(idx).asInstanceOf[Any]
case null => throw new NullPointerException
}
-
+
}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/IntAccess.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/IntAccess.scala
index 948ecb419e..81f0e4da03 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/IntAccess.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/IntAccess.scala
@@ -17,10 +17,10 @@ object IntAccess extends BenchCompanion {
class IntAccess(sz: Int, p: Int, what: String)
extends Resetting(n => n, sz, p, what) with UnknownManif[Int] {
def companion = IntAccess
-
+
def runseq {}
def runpar {}
-
+
def runany = {
var i = 0
while (i < sz) {
@@ -28,7 +28,7 @@ extends Resetting(n => n, sz, p, what) with UnknownManif[Int] {
i += 1
}
}
-
+
def runcast = {
var i = 0
while (i < sz) {
@@ -36,7 +36,7 @@ extends Resetting(n => n, sz, p, what) with UnknownManif[Int] {
i += 1
}
}
-
+
def runmanif = {
var i = 0
while (i < sz) {
@@ -45,12 +45,12 @@ extends Resetting(n => n, sz, p, what) with UnknownManif[Int] {
i += 1
}
}
-
+
def op(a: Int) = a < 0
-
+
def comparisonMap = collection.Map("any" -> runany _, "cast" -> runcast _,
"manif" -> runmanif _, "unknown" -> rununknown _)
-
+
}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/ObjectAccess.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/ObjectAccess.scala
index 3cc38f1b58..29251f3719 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/ObjectAccess.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/ObjectAccess.scala
@@ -17,10 +17,10 @@ object ObjectAccess extends BenchCompanion {
class ObjectAccess(sz: Int, p: Int, what: String)
extends Resetting(Dummy(_), sz, p, what) with UnknownManif[Dummy] {
def companion = ObjectAccess
-
+
def runseq {}
def runpar {}
-
+
def runany = {
var i = 0
while (i < sz) {
@@ -29,7 +29,7 @@ extends Resetting(Dummy(_), sz, p, what) with UnknownManif[Dummy] {
i += 1
}
}
-
+
def runcast = {
var i = 0
while (i < sz) {
@@ -37,7 +37,7 @@ extends Resetting(Dummy(_), sz, p, what) with UnknownManif[Dummy] {
i += 1
}
}
-
+
def rungenericcast = {
var i = 0
while (i < sz) {
@@ -45,7 +45,7 @@ extends Resetting(Dummy(_), sz, p, what) with UnknownManif[Dummy] {
i += 1
}
}
-
+
def runmanif = {
var i = 0
while (i < sz) {
@@ -54,10 +54,10 @@ extends Resetting(Dummy(_), sz, p, what) with UnknownManif[Dummy] {
i += 1
}
}
-
+
def comparisonMap = collection.Map("any" -> runany _, "cast" -> runcast _, "gencast" -> rungenericcast _,
"manif" -> runmanif _, "unknown" -> rununknown _)
-
+
}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/Resetting.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/Resetting.scala
index 9e6102fb94..e6feb59fcd 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/Resetting.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/Resetting.scala
@@ -9,14 +9,14 @@ extends Bench {
val size = sz
val parallelism = p
val runWhat = what
-
+
var anyarray: Array[Any] = null
var castarray: AnyRef = null
var gencastarray: Array[T] = null
var manifarray: Array[T] = null
-
+
reset
-
+
def reset = what match {
case "any" =>
anyarray = new Array[Any](sz)
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/UnknownManif.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/UnknownManif.scala
index d7196c0277..46a28ae111 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/UnknownManif.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/UnknownManif.scala
@@ -6,7 +6,7 @@ package scala.collection.parallel.benchmarks.arrays
trait UnknownManif[T] {
def manifarray: Array[T]
def size: Int
-
+
def rununknown {
val arr = manifarray
val sz = size
@@ -17,7 +17,7 @@ trait UnknownManif[T] {
i += 1
}
}
-
+
def op(d: Any) {}
}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/generic/Operators.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/generic/Operators.scala
index 4fb76542e1..539e3f6972 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/generic/Operators.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/generic/Operators.scala
@@ -6,7 +6,7 @@ package scala.collection.parallel.benchmarks.generic
trait Operators[T] {
-
+
def foreachFun: T => Unit
def reducer: (T, T) => T
def mediumreducer: (T, T) => T
@@ -19,13 +19,13 @@ trait Operators[T] {
def eachFun: T => Unit
def eachPairFun: ((T, T)) => Unit = error("unsupported")
def sequence(sz: Int): Seq[T] = error("unsupported")
-
+
}
trait IntOperators extends Operators[Int] {
-
+
val foreachFun: Int => Unit = x => ()
val reducer: (Int, Int) => Int = _ + _
val mediumreducer: (Int, Int) => Int = (a: Int, b: Int) => {
@@ -52,7 +52,7 @@ trait IntOperators extends Operators[Int] {
val eachFun: Int => Unit = { n =>
n % 2 == 0
}
-
+
}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/generic/ParallelBenches.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/generic/ParallelBenches.scala
index b14a0b3aab..c39c384927 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/generic/ParallelBenches.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/generic/ParallelBenches.scala
@@ -10,77 +10,77 @@ import scala.collection.SeqView
trait ParIterableBenches[T, Coll <: ParIterable[T]] {
self =>
-
+
def createSequential(sz: Int, p: Int): Iterable[T]
def createParallel(sz: Int, p: Int): Coll
def nameOfCollection: String
def operators: Operators[T]
-
+
trait IterableBenchCompanion extends BenchCompanion {
def collectionName = self.nameOfCollection
}
-
+
trait IterableBench extends collection.parallel.benchmarks.Bench {
protected var seqcoll: Iterable[T] = null
protected var parcoll: Coll = null.asInstanceOf[Coll]
-
+
reset
-
+
def reset = runWhat match {
case "seq" => this.seqcoll = createSequential(size, parallelism)
case "par" => this.parcoll = createParallel(size, parallelism)
case _ =>
}
-
+
def nameOfCollection = self.nameOfCollection
def operators = self.operators
def createSequential(sz: Int, p: Int) = self.createSequential(size, parallelism)
def createParallel(sz: Int, p: Int) = self.createParallel(size, parallelism)
def forkJoinPool: scala.concurrent.forkjoin.ForkJoinPool = self.forkJoinPool
-
+
override def printResults {
println(" --- Fork join pool state --- ")
println("Parallelism: " + forkJoinPool.getParallelism)
println("Active threads: " + forkJoinPool.getActiveThreadCount)
println("Work stealings: " + forkJoinPool.getStealCount)
}
-
+
}
-
+
def forkJoinPool: scala.concurrent.forkjoin.ForkJoinPool
-
+
}
trait ParSeqBenches[T, Coll <: ParSeq[T]] extends ParIterableBenches[T, Coll] {
self =>
-
+
def createSequential(sz: Int, p: Int): Seq[T]
-
+
trait SeqBenchCompanion extends BenchCompanion {
def collectionName = self.nameOfCollection
}
-
+
trait SeqBench extends IterableBench {
def seqcollAsSeq = seqcoll.asInstanceOf[Seq[T]]
override def createSequential(sz: Int, p: Int) = self.createSequential(sz, p)
}
-
+
}
/** Standard benchmarks for collections.
- */
+ */
trait StandardParIterableBenches[T, Coll <: ParIterable[T]] extends ParIterableBenches[T, Coll] {
-
+
object Reduce extends IterableBenchCompanion {
override def defaultSize = 50000
def benchName = "reduce";
def apply(sz: Int, p: Int, w: String) = new Reduce(sz, p, w)
}
-
+
class Reduce(val size: Int, val parallelism: Int, val runWhat: String)
extends IterableBench {
def comparisonMap = collection.Map()
@@ -88,13 +88,13 @@ trait StandardParIterableBenches[T, Coll <: ParIterable[T]] extends ParIterableB
def runpar = this.parcoll.reduce(operators.reducer)
def companion = Reduce
}
-
+
object ReduceMedium extends IterableBenchCompanion {
override def defaultSize = 5000
def benchName = "reduce-medium";
def apply(sz: Int, p: Int, w: String) = new ReduceMedium(sz, p, w)
}
-
+
class ReduceMedium(val size: Int, val parallelism: Int, val runWhat: String)
extends IterableBench {
def comparisonMap = collection.Map()
@@ -102,13 +102,13 @@ trait StandardParIterableBenches[T, Coll <: ParIterable[T]] extends ParIterableB
def runpar = this.parcoll.reduce(operators.mediumreducer)
def companion = ReduceMedium
}
-
+
object Map extends IterableBenchCompanion {
override def defaultSize = 5000
def benchName = "map";
def apply(sz: Int, p: Int, w: String) = new Map(sz, p, w)
}
-
+
class Map(val size: Int, val parallelism: Int, val runWhat: String)
extends IterableBench {
def comparisonMap = collection.Map()
@@ -116,13 +116,13 @@ trait StandardParIterableBenches[T, Coll <: ParIterable[T]] extends ParIterableB
def runpar = this.parcoll.map(operators.mapper)
def companion = Map
}
-
+
object Filter extends IterableBenchCompanion {
override def defaultSize = 5000
def benchName = "filter";
def apply(sz: Int, p: Int, w: String) = new Filter(sz, p, w)
}
-
+
class Filter(val size: Int, val parallelism: Int, val runWhat: String)
extends IterableBench {
def comparisonMap = collection.Map()
@@ -130,21 +130,21 @@ trait StandardParIterableBenches[T, Coll <: ParIterable[T]] extends ParIterableB
def runpar = this.parcoll.filter(operators.filterer)
def companion = Filter
}
-
+
object FlatMap extends IterableBenchCompanion {
override def defaultSize = 5000
def benchName = "flatmap";
def apply(sz: Int, p: Int, w: String) = new FlatMap(sz, p, w)
}
-
+
class FlatMap(val size: Int, val parallelism: Int, val runWhat: String)
extends IterableBench {
def comparisonMap = collection.Map()
def runseq = this.seqcoll.flatMap(operators.flatmapper)
def runpar = this.parcoll.flatMap(operators.flatmapper)
def companion = FlatMap
- }
-
+ }
+
}
@@ -153,21 +153,21 @@ trait StandardParIterableBenches[T, Coll <: ParIterable[T]] extends ParIterableB
*/
trait ParSeqViewBenches[T, Coll <: ParSeqView[T, ParSeq[T], CollSeq], CollSeq] extends ParSeqBenches[T, Coll] {
self =>
-
+
trait SeqViewBench extends SeqBench {
lazy val seqview: SeqView[T, Seq[T]] = createSeqView(size, parallelism)
-
+
def createSeqView(sz: Int, p: Int) = self.createSeqView(sz, p)
}
-
+
def createSeqView(sz: Int, p: Int): SeqView[T, Seq[T]]
-
+
object Iteration extends SeqBenchCompanion {
override def defaultSize = 250000
def benchName = "iter"
def apply(sz: Int, p: Int, w: String) = new Iteration(sz, p, w)
}
-
+
class Iteration(val size: Int, val parallelism: Int, val runWhat: String)
extends SeqBench with SeqViewBench {
def comparisonMap = collection.Map("seqview" -> runseqview _)
@@ -178,13 +178,13 @@ self =>
}
def companion = Iteration
}
-
+
object IterationS extends SeqBenchCompanion {
override def defaultSize = 250000
def benchName = "iter-s"
def apply(sz: Int, p: Int, w: String) = new IterationS(sz, p, w)
}
-
+
class IterationS(val size: Int, val parallelism: Int, val runWhat: String)
extends SeqBench with SeqViewBench {
def comparisonMap = collection.Map("seqview" -> runseqview _)
@@ -199,7 +199,7 @@ self =>
def benchName = "iter-m"
def apply(sz: Int, p: Int, w: String) = new IterationM(sz, p, w)
}
-
+
class IterationM(val size: Int, val parallelism: Int, val runWhat: String)
extends SeqBench with SeqViewBench {
def comparisonMap = collection.Map("seqview" -> runseqview _)
@@ -208,13 +208,13 @@ self =>
def runseqview = this.seqview.map(operators.mapper).foreach(operators.eachFun)
def companion = IterationM
}
-
+
object IterationA extends SeqBenchCompanion {
override def defaultSize = 50000
def benchName = "iter-a"
def apply(sz: Int, p: Int, w: String) = new IterationA(sz, p, w)
}
-
+
class IterationA(val size: Int, val parallelism: Int, val runWhat: String)
extends SeqBench with SeqViewBench {
val appended = operators.sequence(size)
@@ -228,13 +228,13 @@ self =>
def runseqview = this.seqview.++(appended).foreach(operators.eachFun)
def companion = IterationA
}
-
+
object IterationZ extends SeqBenchCompanion {
override def defaultSize = 50000
def benchName = "iter-z"
def apply(sz: Int, p: Int, w: String) = new IterationZ(sz, p, w)
}
-
+
class IterationZ(val size: Int, val parallelism: Int, val runWhat: String)
extends SeqBench with SeqViewBench {
val zipped = operators.sequence(size)
@@ -247,13 +247,13 @@ self =>
def runseqview = this.seqview.zip(zipped).foreach(operators.eachPairFun)
def companion = IterationZ
}
-
+
object IterationP extends SeqBenchCompanion {
override def defaultSize = 50000
def benchName = "iter-p"
def apply(sz: Int, p: Int, w: String) = new IterationP(sz, p, w)
}
-
+
class IterationP(val size: Int, val parallelism: Int, val runWhat: String)
extends SeqBench with SeqViewBench {
val patch = operators.sequence(size / 4)
@@ -267,13 +267,13 @@ self =>
def runseqview = this.seqview.patch(size / 4, patch, size / 2).foreach(operators.eachFun)
def companion = IterationP
}
-
+
object Reduce extends SeqBenchCompanion {
override def defaultSize = 50000
def benchName = "reduce";
def apply(sz: Int, p: Int, w: String) = new Reduce(sz, p, w)
}
-
+
class Reduce(val size: Int, val parallelism: Int, val runWhat: String)
extends SeqBench with SeqViewBench {
def comparisonMap = collection.Map()
@@ -281,13 +281,13 @@ self =>
def runpar = this.parcoll.reduce(operators.reducer)
def companion = Reduce
}
-
+
object MediumReduce extends SeqBenchCompanion {
override def defaultSize = 50000
def benchName = "reduce-medium";
def apply(sz: Int, p: Int, w: String) = new MediumReduce(sz, p, w)
}
-
+
class MediumReduce(val size: Int, val parallelism: Int, val runWhat: String)
extends SeqBench with SeqViewBench {
def comparisonMap = collection.Map()
@@ -295,13 +295,13 @@ self =>
def runpar = this.parcoll.reduce(operators.mediumreducer)
def companion = Reduce
}
-
+
object ModifyThenReduce extends SeqBenchCompanion {
override def defaultSize = 20000
def benchName = "modify-then-reduce";
def apply(sz: Int, p: Int, w: String) = new ModifyThenReduce(sz, p, w)
}
-
+
class ModifyThenReduce(val size: Int, val parallelism: Int, val runWhat: String)
extends SeqBench with SeqViewBench {
val toadd = createSequential(size, parallelism)
@@ -316,13 +316,13 @@ self =>
}
def companion = ModifyThenReduce
}
-
+
object ModifyThenForce extends SeqBenchCompanion {
override def defaultSize = 20000
def benchName = "modify-then-force";
def apply(sz: Int, p: Int, w: String) = new ModifyThenForce(sz, p, w)
}
-
+
class ModifyThenForce(val size: Int, val parallelism: Int, val runWhat: String)
extends SeqBench with SeqViewBench {
val toadd = createSequential(size, parallelism)
@@ -334,7 +334,7 @@ self =>
}
def companion = ModifyThenForce
}
-
+
}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtables/ParallelHashTableSets.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtables/ParallelHashTableSets.scala
index 88640f3644..17f0315103 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtables/ParallelHashTableSets.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtables/ParallelHashTableSets.scala
@@ -13,18 +13,18 @@ import scala.collection.parallel.mutable.ParHashSet
trait ParHashTableSetBenches[T] extends StandardParIterableBenches[T, ParHashSet[T]] {
-
+
def nameOfCollection = "mutable.ParHashSet"
def comparisonMap = collection.mutable.Set()
val forkJoinPool = new scala.concurrent.forkjoin.ForkJoinPool
-
+
object Map2 extends IterableBenchCompanion {
override def defaultSize = 50000
override def comparisons = List()
def benchName = "map2";
def apply(sz: Int, p: Int, w: String) = new Map2(sz, p, w)
}
-
+
class Map2(val size: Int, val parallelism: Int, val runWhat: String)
extends IterableBench {
var result: Int = 0
@@ -42,14 +42,14 @@ trait ParHashTableSetBenches[T] extends StandardParIterableBenches[T, ParHashSet
println("Size of last result: " + result)
}
}
-
+
object HeavyMap extends IterableBenchCompanion {
override def defaultSize = 5000
override def comparisons = List()
def benchName = "heavy-map";
def apply(sz: Int, p: Int, w: String) = new HeavyMap(sz, p, w)
}
-
+
class HeavyMap(val size: Int, val parallelism: Int, val runWhat: String)
extends IterableBench {
var result: Int = 0
@@ -64,14 +64,14 @@ trait ParHashTableSetBenches[T] extends StandardParIterableBenches[T, ParHashSet
def companion = HeavyMap
override def repetitionsPerRun = 50
}
-
+
object Reduce2 extends IterableBenchCompanion {
override def defaultSize = 50000
override def comparisons = List()
def benchName = "reduce2";
def apply(sz: Int, p: Int, w: String) = new Reduce2(sz, p, w)
}
-
+
class Reduce2(val size: Int, val parallelism: Int, val runWhat: String)
extends IterableBench {
def comparisonMap = collection.Map()
@@ -79,14 +79,14 @@ trait ParHashTableSetBenches[T] extends StandardParIterableBenches[T, ParHashSet
def runpar = this.parcoll.reduce(operators.mediumreducer)
def companion = Reduce2
}
-
+
object Foreach extends IterableBenchCompanion {
override def defaultSize = 50000
override def comparisons = List()
def benchName = "foreach";
def apply(sz: Int, p: Int, w: String) = new Foreach(sz, p, w)
}
-
+
class Foreach(val size: Int, val parallelism: Int, val runWhat: String)
extends IterableBench {
def comparisonMap = collection.Map()
@@ -94,7 +94,7 @@ trait ParHashTableSetBenches[T] extends StandardParIterableBenches[T, ParHashSet
def runpar = this.parcoll.pforeach(operators.foreachFun)
def companion = Foreach
}
-
+
}
@@ -102,14 +102,14 @@ trait ParHashTableSetBenches[T] extends StandardParIterableBenches[T, ParHashSet
object RefParHashTableSetBenches extends ParHashTableSetBenches[Dummy] {
-
+
object ForeachSet extends IterableBenchCompanion {
override def defaultSize = 50000
override def comparisons = List()
def benchName = "foreach-set";
def apply(sz: Int, p: Int, w: String) = new ForeachSet(sz, p, w)
}
-
+
class ForeachSet(val size: Int, val parallelism: Int, val runWhat: String)
extends IterableBench {
val array = new Array[Int](size)
@@ -117,22 +117,22 @@ object RefParHashTableSetBenches extends ParHashTableSetBenches[Dummy] {
def runseq = for (x <- this.seqcoll) array(x.in) += 1
def runpar = this.parcoll.pforeach { x => array(x.in) += 1 }
def companion = ForeachSet
-
+
override def onEnd {
for (i <- 0 until array.length) {
assert(array(i) == repetitionsPerRun * runs)
}
}
}
-
+
val operators = DummyOperators
-
+
def createSequential(sz: Int, p: Int) = {
val ht = new collection.mutable.HashSet[Dummy]
for (i <- 0 until sz) ht += new Dummy(i)
ht
}
-
+
def createParallel(sz: Int, p: Int) = {
val phm = new ParHashSet[Dummy]
for (i <- 0 until sz) phm += new Dummy(i)
@@ -140,5 +140,5 @@ object RefParHashTableSetBenches extends ParHashTableSetBenches[Dummy] {
collection.parallel.tasksupport.environment = forkJoinPool
phm
}
-
+
}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtables/ParallelHashTables.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtables/ParallelHashTables.scala
index bdb1dff56d..79d038bf3d 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtables/ParallelHashTables.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtables/ParallelHashTables.scala
@@ -13,18 +13,18 @@ import scala.collection.parallel.mutable.ParHashMap
trait ParHashTableBenches[K, V] extends StandardParIterableBenches[(K, V), ParHashMap[K, V]] {
-
+
def nameOfCollection = "mutable.ParHashMap"
def comparisonMap = collection.mutable.Map()
val forkJoinPool = new scala.concurrent.forkjoin.ForkJoinPool
-
+
object Map2 extends IterableBenchCompanion {
override def defaultSize = 40000
override def comparisons = List("jhashtable")
def benchName = "map2";
def apply(sz: Int, p: Int, w: String) = new Map2(sz, p, w)
}
-
+
class Map2(val size: Int, val parallelism: Int, val runWhat: String)
extends IterableBench {
var result: Int = 0
@@ -56,13 +56,13 @@ trait ParHashTableBenches[K, V] extends StandardParIterableBenches[(K, V), ParHa
println("Size of last result: " + result)
}
}
-
+
object FlatMap2 extends IterableBenchCompanion {
override def defaultSize = 5000
def benchName = "flatmap2";
def apply(sz: Int, p: Int, w: String) = new FlatMap2(sz, p, w)
}
-
+
class FlatMap2(val size: Int, val parallelism: Int, val runWhat: String)
extends IterableBench {
def comparisonMap = collection.Map()
@@ -70,15 +70,15 @@ trait ParHashTableBenches[K, V] extends StandardParIterableBenches[(K, V), ParHa
def runseq = this.seqcoll.flatMap(operators.flatmapper)
def runpar = this.parcoll.flatMap(operators.flatmapper)
def companion = FlatMap2
- }
-
+ }
+
object HeavyMap extends IterableBenchCompanion {
override def defaultSize = 5000
override def comparisons = List()
def benchName = "heavy-map";
def apply(sz: Int, p: Int, w: String) = new HeavyMap(sz, p, w)
}
-
+
class HeavyMap(val size: Int, val parallelism: Int, val runWhat: String)
extends IterableBench {
var result: Int = 0
@@ -93,14 +93,14 @@ trait ParHashTableBenches[K, V] extends StandardParIterableBenches[(K, V), ParHa
def companion = HeavyMap
override def repetitionsPerRun = 50
}
-
+
object Reduce2 extends IterableBenchCompanion {
override def defaultSize = 50000
override def comparisons = List()
def benchName = "reduce2";
def apply(sz: Int, p: Int, w: String) = new Reduce2(sz, p, w)
}
-
+
class Reduce2(val size: Int, val parallelism: Int, val runWhat: String)
extends IterableBench {
def comparisonMap = collection.Map()
@@ -108,14 +108,14 @@ trait ParHashTableBenches[K, V] extends StandardParIterableBenches[(K, V), ParHa
def runpar = this.parcoll.reduce(operators.mediumreducer)
def companion = Reduce2
}
-
+
object Foreach extends IterableBenchCompanion {
override def defaultSize = 50000
override def comparisons = List()
def benchName = "foreach";
def apply(sz: Int, p: Int, w: String) = new Foreach(sz, p, w)
}
-
+
class Foreach(val size: Int, val parallelism: Int, val runWhat: String)
extends IterableBench {
def comparisonMap = collection.Map()
@@ -123,7 +123,7 @@ trait ParHashTableBenches[K, V] extends StandardParIterableBenches[(K, V), ParHa
def runpar = this.parcoll.pforeach(operators.foreachFun)
def companion = Foreach
}
-
+
}
@@ -131,16 +131,16 @@ trait ParHashTableBenches[K, V] extends StandardParIterableBenches[(K, V), ParHa
object RefParHashTableBenches extends ParHashTableBenches[Dummy, Dummy] {
-
+
type DPair = (Dummy, Dummy);
-
+
object ForeachSet extends IterableBenchCompanion {
override def defaultSize = 50000
override def comparisons = List()
def benchName = "foreach-set";
def apply(sz: Int, p: Int, w: String) = new ForeachSet(sz, p, w)
}
-
+
class ForeachSet(val size: Int, val parallelism: Int, val runWhat: String)
extends IterableBench {
val array = new Array[Int](size)
@@ -148,14 +148,14 @@ object RefParHashTableBenches extends ParHashTableBenches[Dummy, Dummy] {
def runseq = for (p <- this.seqcoll) array(p._1.in) += 1
def runpar = this.parcoll.pforeach { p => array(p._1.in) += 1 }
def companion = ForeachSet
-
+
override def onEnd {
for (i <- 0 until array.length) {
assert(array(i) == repetitionsPerRun * runs)
}
}
}
-
+
object operators extends Operators[DPair] {
def gcd(a: Int, b: Int): Int = {
val result = if (b == 0) a else {
@@ -214,13 +214,13 @@ object RefParHashTableBenches extends ParHashTableBenches[Dummy, Dummy] {
dp._1.dummy
}
}
-
+
def createSequential(sz: Int, p: Int) = {
val ht = new collection.mutable.HashMap[Dummy, Dummy]
for (i <- 0 until sz) ht += ((new Dummy(i), new Dummy(i)))
ht
}
-
+
def createParallel(sz: Int, p: Int) = {
val phm = new ParHashMap[Dummy, Dummy]
for (i <- 0 until sz) phm += ((new Dummy(i), new Dummy(i)))
@@ -228,5 +228,5 @@ object RefParHashTableBenches extends ParHashTableBenches[Dummy, Dummy] {
collection.parallel.tasksupport.environment = forkJoinPool
phm
}
-
+
}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Combine.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Combine.scala
index 3a070fb6ff..96598840fd 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Combine.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Combine.scala
@@ -17,7 +17,7 @@ class Combine(val size: Int, val parallelism: Int, val runWhat: String) extends
for (i <- size until 2 * size) thattrie += ((i, i))
val thatmap = new HashMap[Int, Int]
for (i <- size until 2 * size) thatmap += ((i, i))
-
+
def runpar = throw new UnsupportedOperationException
def runseq = runhashtrie
def runhashtrie = {
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Construct.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Construct.scala
index 7c15df1fe2..f65a349ec5 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Construct.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Construct.scala
@@ -14,7 +14,7 @@ import collection.mutable.HashMap
class Construct(val size: Int, val parallelism: Int, val runWhat: String) extends Bench {
def reset {}
-
+
def runpar = throw new UnsupportedOperationException
def runseq = throw new UnsupportedOperationException
def runhashmap = {
@@ -25,7 +25,7 @@ class Construct(val size: Int, val parallelism: Int, val runWhat: String) extend
var hashtrie = new HashTrie[Int, Int]
for (i <- 0 until size) hashtrie += ((i, i))
}
-
+
def companion = Construct
def comparisonMap = Map("hashmap" -> runhashmap _, "hashtrie" -> runhashtrie _)
}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/IntInit.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/IntInit.scala
index dbbe64e290..79ebd0e98c 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/IntInit.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/IntInit.scala
@@ -12,7 +12,7 @@ import collection.mutable.HashMap
trait IntInit extends Bench {
var hashmap: HashMap[Int, Int] = null
var hashtrie: HashTrie[Int, Int] = null
-
+
reset
def reset = runWhat match {
case "hashmap" => initHashMap
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/MultipleCombine.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/MultipleCombine.scala
index 033c211849..c08d6b5cad 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/MultipleCombine.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/MultipleCombine.scala
@@ -14,7 +14,7 @@ import collection.mutable.HashMap
class MultipleCombine(val size: Int, val parallelism: Int, val runWhat: String) extends Bench with IntInit {
var combines = 10
-
+
var thattries = new Array[HashTrie[Int, Int]](combines)
def initTries = for (r <- 0 until combines) {
var thattrie = new HashTrie[Int, Int]
@@ -22,7 +22,7 @@ class MultipleCombine(val size: Int, val parallelism: Int, val runWhat: String)
thattries(r) = thattrie
}
initTries
-
+
val thatmaps = new Array[HashMap[Int, Int]](10)
def initMaps = for (r <- 0 until combines) {
var thatmap = new HashMap[Int, Int]
@@ -30,7 +30,7 @@ class MultipleCombine(val size: Int, val parallelism: Int, val runWhat: String)
thatmaps(r) = thatmap
}
initMaps
-
+
override def repetitionsPerRun = 25
def runpar = throw new UnsupportedOperationException
def runseq = runhashtrie
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/ParallelHashTries.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/ParallelHashTries.scala
index 3976b72d1a..dc8804cf57 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/ParallelHashTries.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/ParallelHashTries.scala
@@ -13,18 +13,18 @@ import scala.collection.parallel.immutable.ParHashMap
trait ParHashTrieBenches[K, V] extends StandardParIterableBenches[(K, V), ParHashMap[K, V]] {
-
+
def nameOfCollection = "immutable.ParHashMap"
def comparisonMap = collection.Map()
val forkJoinPool = new scala.concurrent.forkjoin.ForkJoinPool
-
+
object Map2 extends IterableBenchCompanion {
override def defaultSize = 5000
override def comparisons = List("jhashtable", "hashtable")
def benchName = "map2";
def apply(sz: Int, p: Int, w: String) = new Map2(sz, p, w)
}
-
+
class Map2(val size: Int, val parallelism: Int, val runWhat: String)
extends IterableBench {
var result: Int = 0
@@ -65,14 +65,14 @@ trait ParHashTrieBenches[K, V] extends StandardParIterableBenches[(K, V), ParHas
println("Size of last result: " + result)
}
}
-
+
object Reduce2 extends IterableBenchCompanion {
override def defaultSize = 50000
override def comparisons = List("hashtable")
def benchName = "reduce2";
def apply(sz: Int, p: Int, w: String) = new Reduce2(sz, p, w)
}
-
+
class Reduce2(val size: Int, val parallelism: Int, val runWhat: String)
extends IterableBench {
private var ht: collection.mutable.HashMap[K, V] = _
@@ -86,9 +86,9 @@ trait ParHashTrieBenches[K, V] extends StandardParIterableBenches[(K, V), ParHas
}
def companion = Reduce2
}
-
+
def createHashTable(sz: Int): collection.mutable.HashMap[K, V]
-
+
}
@@ -96,9 +96,9 @@ trait ParHashTrieBenches[K, V] extends StandardParIterableBenches[(K, V), ParHas
object RefParHashTrieBenches extends ParHashTrieBenches[Dummy, Dummy] {
-
+
type DPair = (Dummy, Dummy)
-
+
object operators extends Operators[DPair] {
def gcd(a: Int, b: Int): Int = {
val result = if (b == 0) a else {
@@ -156,13 +156,13 @@ object RefParHashTrieBenches extends ParHashTrieBenches[Dummy, Dummy] {
dp._1.dummy
}
}
-
+
def createSequential(sz: Int, p: Int) = {
var ht = new collection.immutable.HashMap[Dummy, Dummy]
for (i <- 0 until sz) ht += ((new Dummy(i), new Dummy(i)))
ht
}
-
+
def createParallel(sz: Int, p: Int) = {
var pht = new ParHashMap[Dummy, Dummy]
for (i <- 0 until sz) pht += ((new Dummy(i), new Dummy(i)))
@@ -170,11 +170,11 @@ object RefParHashTrieBenches extends ParHashTrieBenches[Dummy, Dummy] {
collection.parallel.tasksupport.environment = forkJoinPool
pht
}
-
+
def createHashTable(sz: Int) = {
val hm = collection.mutable.HashMap[Dummy, Dummy]()
for (i <- 0 until sz) hm.put(new Dummy(i), new Dummy(i))
hm
}
-
+
}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/misc/Coder.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/misc/Coder.scala
index 5ed0ca317d..9cc1f4a0f6 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/misc/Coder.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/misc/Coder.scala
@@ -11,27 +11,27 @@ import collection.parallel._//immutable._
class SeqCoder(words: List[String]) {
-
+
private val m = Map(
- '2' -> "ABC", '3' -> "DEF", '4' -> "GHI", '5' -> "JKL",
+ '2' -> "ABC", '3' -> "DEF", '4' -> "GHI", '5' -> "JKL",
'6' -> "MNO", '7' -> "PQRS", '8' -> "TUV", '9' -> "WXYZ")
-
+
/** Invert the mnemnonics map to give a map from chars 'A' ... 'Z' to '2' ... '9' */
- private val charCode: Map[Char, Char] =
+ private val charCode: Map[Char, Char] =
for ((digit, letters) <- m; letter <- letters) yield letter -> digit
-
- /** Maps a word to the digit string it represents,
+
+ /** Maps a word to the digit string it represents,
* e.g. `Java` -> `5282` */
private def wordCode(word: String): String = word.toUpperCase map charCode
-
- /** A map from digit strings to the words that represent
+
+ /** A map from digit strings to the words that represent
* them e.g. `5282` -> List(`Java`, `Kata`, `Lava`, ...)
*/
- val wordsForNum: Map[String, Seq[String]] =
+ val wordsForNum: Map[String, Seq[String]] =
(words groupBy wordCode).map(t => (t._1, t._2.toSeq)) withDefaultValue Seq()
-
+
/** All ways to encode a number as a list of words */
- def encode(number: String): Set[Seq[String]] =
+ def encode(number: String): Set[Seq[String]] =
if (number.isEmpty) Set(Seq())
else {
val splits = (1 to number.length).toSet
@@ -49,34 +49,34 @@ class SeqCoder(words: List[String]) {
})
r
}
-
- /** Maps a number to a list of all word phrases that can
+
+ /** Maps a number to a list of all word phrases that can
* represent it */
def translate(number: String) = encode(number)// map (_ mkString " ")
-
+
def ??? : Nothing = throw new UnsupportedOperationException
}
class ParCoder(words: List[String]) {
-
+
private val m = Map(
- '2' -> "ABC", '3' -> "DEF", '4' -> "GHI", '5' -> "JKL",
+ '2' -> "ABC", '3' -> "DEF", '4' -> "GHI", '5' -> "JKL",
'6' -> "MNO", '7' -> "PQRS", '8' -> "TUV", '9' -> "WXYZ")
-
+
/** Invert the mnemnonics map to give a map from chars 'A' ... 'Z' to '2' ... '9' */
- private val charCode: Map[Char, Char] =
+ private val charCode: Map[Char, Char] =
for ((digit, letters) <- m; letter <- letters) yield letter -> digit
-
- /** Maps a word to the digit string it represents,
+
+ /** Maps a word to the digit string it represents,
* e.g. `Java` -> `5282` */
private def wordCode(word: String): String = word.toUpperCase map charCode
-
- /** A map from digit strings to the words that represent
+
+ /** A map from digit strings to the words that represent
* them e.g. `5282` -> List(`Java`, `Kata`, `Lava`, ...)
*/
- val wordsForNum: Map[String, Seq[String]] =
+ val wordsForNum: Map[String, Seq[String]] =
(words groupBy wordCode).map(t => (t._1, t._2)) withDefaultValue Seq()
-
+
/** All ways to encode a number as a list of words */
def encode(number: String): Set[Seq[String]] = if (number.length > 12) {
if (number.isEmpty) ParSet(ParSeq())
@@ -99,13 +99,13 @@ class ParCoder(words: List[String]) {
} yield word +: rest
}
}
-
- /** Maps a number to a list of all word phrases that can
+
+ /** Maps a number to a list of all word phrases that can
* represent it */
def translate(number: String) = {
encode(number)// map (_ mkString " ")
}
-
+
def ??? : Nothing = throw new UnsupportedOperationException
}
@@ -122,26 +122,26 @@ object Coder extends BenchCompanion {
class Coder(val size: Int, val parallelism: Int, val runWhat: String) extends Bench {
def companion = Coder
-
+
var seqcoder: SeqCoder = null
var parcoder: ParCoder = null
-
+
override def repetitionsPerRun = 1
-
+
val code = "23284374729473626268379762538"
-
+
reset
-
+
def runseq {
val translations = seqcoder.translate(code)
//println(translations)
}
-
+
def runpar {
val translations = parcoder.translate(code)
//println(translations)
}
-
+
def reset = runWhat match {
case "seq" =>
seqcoder = new SeqCoder(Dictionary.wordlist)
@@ -155,8 +155,8 @@ class Coder(val size: Int, val parallelism: Int, val runWhat: String) extends Be
println("Translation check: " + t.size)
//println(t)
}
-
+
def comparisonMap = Map()
-
+
}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/misc/Loader.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/misc/Loader.scala
index 1b73f2b868..c7e4723e64 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/misc/Loader.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/misc/Loader.scala
@@ -25,42 +25,42 @@ object Loader extends BenchCompanion {
class Loader(val size: Int, val parallelism: Int, val runWhat: String) extends Bench {
def companion = Loader
-
+
override def repetitionsPerRun = 1
-
+
reset
-
+
val wa = Dictionary.wordarray ++ Dictionary.wordarray ++ Dictionary.wordarray
-
+
def runseq {
val m = Map(
- '2' -> "ABC", '3' -> "DEF", '4' -> "GHI", '5' -> "JKL",
+ '2' -> "ABC", '3' -> "DEF", '4' -> "GHI", '5' -> "JKL",
'6' -> "MNO", '7' -> "PQRS", '8' -> "TUV", '9' -> "WXYZ"
)
val charCode: Map[Char, Char] = for ((digit, letters) <- m; letter <- letters) yield letter -> digit
def wordCode(word: String): String = (word.toUpperCase.toList map charCode).toString
-
+
wa groupBy wordCode
}
-
+
def runpar {
val m = Map(
- '2' -> "ABC", '3' -> "DEF", '4' -> "GHI", '5' -> "JKL",
+ '2' -> "ABC", '3' -> "DEF", '4' -> "GHI", '5' -> "JKL",
'6' -> "MNO", '7' -> "PQRS", '8' -> "TUV", '9' -> "WXYZ"
)
val charCode: Map[Char, Char] = for ((digit, letters) <- m; letter <- letters) yield letter -> digit
def wordCode(word: String): String = (word.toUpperCase.toList map charCode).toString
-
+
wa.par groupBy wordCode
}
-
+
def reset = runWhat match {
case "seq" =>
case "par" =>
collection.parallel.tasksupport.environment.asInstanceOf[concurrent.forkjoin.ForkJoinPool].setParallelism(parallelism)
}
-
+
def comparisonMap = Map()
-
+
}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/AggregateLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/AggregateLight.scala
index 446f4a6735..2eaddd7572 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/AggregateLight.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/AggregateLight.scala
@@ -11,7 +11,7 @@ object AggregateLight extends Companion {
def apply(sz: Int, parallelism: Int, what: String) = new AggregateLight(sz, parallelism, what)
override def comparisons = List()
override def defaultSize = 200000
-
+
val seqop = (a: Cont, b: Cont) => b
val combop = (a: Cont, b: Cont) => a
}
@@ -22,7 +22,7 @@ extends Resettable[Cont](sz, p, what, new Cont(_), new Array[Any](_), classOf[Co
def companion = AggregateLight
override def repetitionsPerRun = 350
override val runs = 20
-
+
def runpar = pa.aggregate(new Cont(0))(companion.seqop, companion.combop)
def runseq = sequentialReduce(companion.seqop, sz, new Cont(0))
override def comparisonMap = collection.Map()
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CopyToArray.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CopyToArray.scala
index 0f743eeb96..033921d451 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CopyToArray.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CopyToArray.scala
@@ -14,7 +14,7 @@ class CopyToArray(sz: Int, p: Int, what: String)
extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
def companion = CopyToArray
val destarr = new Array[Any](sz)
-
+
def runpar = pa.copyToArray(destarr, 0, sz)
def runseq = sequentialCopyToArray(destarr, 0, sz)
def comparisonMap = collection.Map()
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Corresponds.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Corresponds.scala
index ae50958bdc..c9b3f07ff3 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Corresponds.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Corresponds.scala
@@ -12,7 +12,7 @@ extends Resettable[Cont](sz, p, what, (i: Int) => new Cont(i), new Array[Any](_
with HavingResult[Boolean] {
def companion = Corresponds
override def repetitionsPerRun = 400
-
+
val same = {
val p = new collection.parallel.mutable.ParArray[Cont](sz)
for (i <- 0 until sz) p(i) = what match {
@@ -21,11 +21,11 @@ with HavingResult[Boolean] {
}
p
}
-
+
def runpar = runresult = pa.corresponds(same)(corr)
def runseq = runresult = sequentialCorresponds(same, corr, sz)
override def comparisonMap = collection.Map()
-
+
val corr = (a: Cont, b: Cont) => a.in == b.in
}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CountHeavy.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CountHeavy.scala
index 722d721288..7438be8447 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CountHeavy.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CountHeavy.scala
@@ -8,12 +8,12 @@ object CountHeavy extends Companion {
def apply(sz: Int, parallelism: Int, what: String) = new CountHeavy(sz, parallelism, what)
override def comparisons = List("jsr")
override def defaultSize = 16
-
+
val pred = (a: Cont) => heavyCheck(a)
val predjsr = new extra166y.Ops.Predicate[Cont] {
def op(a: Cont) = heavyCheck(a)
}
-
+
def heavyCheck(a: Cont) = {
val n = a.in
(n until (n + 200)).map(checkPrime(_)).reduceLeft(_ && _)
@@ -28,7 +28,7 @@ object CountHeavy extends Companion {
class CountHeavy(sz: Int, p: Int, what: String)
extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
def companion = CountHeavy
-
+
def runpar = pa.count(CountHeavy.pred)
def runseq = sequentialCount(CountHeavy.pred, sz)
def runjsr = jsrarr.withFilter(CountHeavy.predjsr).size
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CountLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CountLight.scala
index 569b304660..21c64358b4 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CountLight.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CountLight.scala
@@ -13,7 +13,7 @@ object CountLight extends Companion {
class CountLight(sz: Int, p: Int, what: String)
extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
def companion = CountLight
-
+
def runpar = pa.count(Cont.pred)
def runseq = sequentialCount(Cont.pred, sz)
def runjsr = jsrarr.withFilter(Cont.predjsr).size
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CountList.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CountList.scala
index 0d9550d2bd..9c6ac19229 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CountList.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CountList.scala
@@ -8,13 +8,13 @@ object CountList extends Companion {
def apply(sz: Int, parallelism: Int, what: String) = new CountList(sz, parallelism, what)
override def comparisons = List("jsr")
override def defaultSize = 1000
-
+
val listCreator = (i: Int) => (0 until (i % 50 + 50)).toList
val pred = (lst: List[Int]) => check(lst)
val predjsr = new extra166y.Ops.Predicate[List[Int]] {
def op(lst: List[Int]) = check(lst)
}
-
+
def check(lst: List[Int]) = lst.foldLeft(0)((sum, n) => sum + n * n) % 2 == 0
}
@@ -22,7 +22,7 @@ class CountList(sz: Int, p: Int, what: String)
extends Resettable(sz, p, what, CountList.listCreator, new Array[Any](_), classOf[List[Int]]) {
def companion = CountList
override def repetitionsPerRun = 250
-
+
def runpar = pa.count(CountList.pred)
def runseq = sequentialCount(CountList.pred, sz)
def runjsr = jsrarr.withFilter(CountList.predjsr).size
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/DiffHalf.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/DiffHalf.scala
index bc0ff3fe74..4b27569239 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/DiffHalf.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/DiffHalf.scala
@@ -12,7 +12,7 @@ extends Resettable[Cont](sz, p, what, (i: Int) => new Cont(i), new Array[Any](_
with HavingResult[Int] {
def companion = DiffHalf
override def repetitionsPerRun = 400
-
+
val similar = {
val p = new collection.parallel.mutable.ParArray[Cont](sz)
for (i <- 0 until sz) p(i) = what match {
@@ -21,11 +21,11 @@ with HavingResult[Int] {
}
p.drop(p.size / 2)
}
-
+
def runpar = runresult = pa.diff(similar).size
def runseq = runresult = sequentialDiff(similar, sz).size
override def comparisonMap = collection.Map()
-
+
val corr = (a: Cont, b: Cont) => a.in == b.in
}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/DropMany.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/DropMany.scala
index d80ba91a29..443ef2b500 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/DropMany.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/DropMany.scala
@@ -17,7 +17,7 @@ with HavingResult[Int] {
def companion = DropMany
override def repetitionsPerRun = 400
runresult = -1
-
+
def runpar = runresult = pa.drop(pa.size / 2).size
def runseq = runresult = sequentialDrop(sz / 2, sz).size
def comparisonMap = collection.Map()
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ExistsLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ExistsLight.scala
index 401ab38e0b..2749216735 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ExistsLight.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ExistsLight.scala
@@ -9,7 +9,7 @@ object ExistsLight extends Companion {
def apply(sz: Int, parallelism: Int, what: String) = new ExistsLight(sz, parallelism, what)
override def comparisons = List("jsr")
override def defaultSize = 200000
-
+
val pred = (a: Cont) => a.in < 0
val predjsr = new extra166y.Ops.Predicate[Cont] {
def op(a: Cont) = a.in < 0
@@ -21,7 +21,7 @@ extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont])
with HavingResult[Boolean] {
def companion = ExistsLight
runresult = false
-
+
def runpar = runresult = pa.exists(ExistsLight.pred)
def runseq = runresult = sequentialExists(ExistsLight.pred, sz)
def runjsr = runresult = jsrarr.withFilter(ExistsLight.predjsr).size > 0
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/FilterLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/FilterLight.scala
index ee6545efbf..d4c8395951 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/FilterLight.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/FilterLight.scala
@@ -9,12 +9,12 @@ object FilterLight extends Companion {
def apply(sz: Int, parallelism: Int, what: String) = new FilterLight(sz, parallelism, what)
override def comparisons = List("jsr")
override def defaultSize = 10000
-
+
val pred = (a: Cont) => check(a.in)
val predjsr = new extra166y.Ops.Predicate[Cont] {
def op(a: Cont) = check(a.in)
}
-
+
def check(n: Int) = {
var res = n
// var i = 1
@@ -33,7 +33,7 @@ with HavingResult[Int] {
override def repetitionsPerRun = 250
override val runs = 30
runresult = -1
-
+
def runpar = runresult = pa.filter(FilterLight.pred).size
def runseq = runresult = sequentialFilter(FilterLight.pred, sz).size
def runjsr = runresult = { jsrarr.withFilter(FilterLight.predjsr).all.size }
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/FindLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/FindLight.scala
index 11cb6c69fd..f08ddf29e3 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/FindLight.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/FindLight.scala
@@ -9,7 +9,7 @@ object FindLight extends Companion {
def apply(sz: Int, parallelism: Int, what: String) = new FindLight(sz, parallelism, what)
override def comparisons = List("jsr")
override def defaultSize = 200000
-
+
val pred = (a: Cont) => a.in < -10
val predjsr = new extra166y.Ops.Predicate[Cont] {
def op(a: Cont) = a.in < -10
@@ -21,7 +21,7 @@ extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont])
with HavingResult[Option[Cont]] {
def companion = FindLight
runresult = None
-
+
def runpar = runresult = pa.find(FindLight.pred)
def runseq = runresult = sequentialFind(FindLight.pred, sz)
def runjsr = runresult = { jsrarr.withFilter(FindLight.predjsr).size > 0; None }
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/FlatMapLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/FlatMapLight.scala
index b1f8942d94..01ecbbf016 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/FlatMapLight.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/FlatMapLight.scala
@@ -10,14 +10,14 @@ object FlatMapLight extends Companion {
def apply(sz: Int, parallelism: Int, what: String) = new FlatMapLight(sz, parallelism, what)
override def comparisons = List("jsr")
override def defaultSize = 10000
-
+
def fun = (a: Cont) => { List(1, 2, 3, 4, a.in) }
}
class FlatMapLight(sz: Int, p: Int, what: String)
extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
def companion = FlatMapLight
-
+
def runpar = pa.flatMap(FlatMapLight.fun)
def runseq = sequentialFlatMap(FlatMapLight.fun, sz)
def comparisonMap = collection.Map()
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallHeavy.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallHeavy.scala
index c354f65ec9..0d61e5aeb5 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallHeavy.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallHeavy.scala
@@ -8,12 +8,12 @@ object ForallHeavy extends Companion {
def apply(sz: Int, parallelism: Int, what: String) = new ForallHeavy(sz, parallelism, what)
override def comparisons = List("jsr")
override def defaultSize = 16
-
+
val pred = (a: Cont) => heavyCheck(a)
val predjsr = new extra166y.Ops.Predicate[Cont] {
def op(a: Cont) = heavyCheck(a)
}
-
+
def heavyCheck(a: Cont) = {
val init = a.in + 1
var cnt = init
@@ -31,7 +31,7 @@ object ForallHeavy extends Companion {
class ForallHeavy(sz: Int, p: Int, what: String)
extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
def companion = ForallHeavy
-
+
def runpar = pa.forall(ForallHeavy.pred)
def runseq = sequentialForall(ForallHeavy.pred, sz)
def runjsr = jsrarr.withFilter(ForallHeavy.predjsr).size == sz
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallLight.scala
index 079f2ccc32..19671d2bc4 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallLight.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallLight.scala
@@ -8,7 +8,7 @@ object ForallLight extends Companion {
def apply(sz: Int, parallelism: Int, what: String) = new ForallLight(sz, parallelism, what)
override def comparisons = List("jsr")
override def defaultSize = 200000
-
+
val pred = (a: Cont) => a.in >= 0
val predjsr = new extra166y.Ops.Predicate[Cont] {
def op(a: Cont) = a.in >= 0
@@ -18,7 +18,7 @@ object ForallLight extends Companion {
class ForallLight(sz: Int, p: Int, what: String)
extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
def companion = ForallLight
-
+
def runpar = pa.forall(ForallLight.pred)
def runseq = sequentialForall(ForallLight.pred, sz)
def runjsr = jsrarr.withFilter(ForallLight.predjsr).size == sz
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallQuickStop.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallQuickStop.scala
index 310105dd41..624266e49d 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallQuickStop.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallQuickStop.scala
@@ -8,7 +8,7 @@ object ForallQuickStop extends Companion {
def benchName = "forall-quickstop";
def apply(sz: Int, parallelism: Int, what: String) = new ForallQuickStop(sz, parallelism, what)
override def defaultSize = 200000
-
+
val pred = (a: Cont) => a.in != 50
val predjsr = new extra166y.Ops.Predicate[Cont] {
def op(a: Cont) = a.in != 50
@@ -19,7 +19,7 @@ class ForallQuickStop(sz: Int, p: Int, what: String)
extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont])
with HavingResult[Boolean] {
def companion = ForallQuickStop
-
+
def runpar = runresult = pa.forall(ForallQuickStop.pred)
def runseq = runresult = sequentialForall(ForallQuickStop.pred, sz)
def comparisonMap = collection.Map()
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallStop80k.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallStop80k.scala
index cbfa6ebb42..c7462ed04b 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallStop80k.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallStop80k.scala
@@ -8,7 +8,7 @@ object ForallStop80k extends Companion {
def benchName = "forall-stop80k";
def apply(sz: Int, parallelism: Int, what: String) = new ForallStop80k(sz, parallelism, what)
override def defaultSize = 100000
-
+
val pred = (a: Cont) => a.in != 80000
val predjsr = new extra166y.Ops.Predicate[Cont] {
def op(a: Cont) = a.in != 80000
@@ -19,7 +19,7 @@ class ForallStop80k(sz: Int, p: Int, what: String)
extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont])
with HavingResult[Boolean] {
def companion = ForallStop80k
-
+
def runpar = runresult = pa.forall(ForallStop80k.pred)
def runseq = runresult = sequentialForall(ForallStop80k.pred, sz)
def comparisonMap = collection.Map()
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForeachHeavy.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForeachHeavy.scala
index 4d7c48b0b4..d1a3f8085c 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForeachHeavy.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForeachHeavy.scala
@@ -8,18 +8,18 @@ object ForeachHeavy extends Companion {
def apply(sz: Int, parallelism: Int, what: String) = new ForeachHeavy(sz, parallelism, what)
override def comparisons = List("jsr")
override def defaultSize = 2048
-
+
@volatile var z = 0
-
+
val fun = (a: Cont) => heavyOperation(a)
val funjsr = new extra166y.Ops.Procedure[Cont] {
def op(a: Cont) = heavyOperation(a)
}
-
+
def heavyOperation(a: Cont) {
checkPrime(a.in + 1000000000)
}
-
+
def checkPrime(n: Int) = {
var isPrime = true
var i = 2
@@ -37,7 +37,7 @@ class ForeachHeavy(sz: Int, p: Int, what: String)
extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
def companion = ForeachHeavy
override def repetitionsPerRun = 250
-
+
def runpar = pa.pforeach(ForeachHeavy.fun)
def runseq = sequentialForeach(ForeachHeavy.fun, sz)
def runjsr = jsrarr.apply(ForeachHeavy.funjsr)
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForeachLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForeachLight.scala
index 5ef41d8d5b..3d0c5c45c4 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForeachLight.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForeachLight.scala
@@ -8,7 +8,7 @@ object ForeachLight extends Companion {
def apply(sz: Int, parallelism: Int, what: String) = new ForeachLight(sz, parallelism, what)
override def comparisons = List("jsr")
override def defaultSize = 200000
-
+
val fun = (a: Cont) => a.num = a.in
val funjsr = new extra166y.Ops.Procedure[Cont] {
def op(a: Cont) = a.num = a.in
@@ -18,7 +18,7 @@ object ForeachLight extends Companion {
class ForeachLight(sz: Int, p: Int, what: String)
extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
def companion = ForeachLight
-
+
def runpar = pa.pforeach(ForeachLight.fun)
def runseq = sequentialForeach(ForeachLight.fun, sz)
def runjsr = jsrarr.apply(ForeachLight.funjsr)
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/GroupBy.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/GroupBy.scala
index ec83f2982b..a90227a6e4 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/GroupBy.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/GroupBy.scala
@@ -8,7 +8,7 @@ object GroupByLight extends Companion {
def apply(sz: Int, parallelism: Int, what: String) = new GroupByLight(sz, parallelism, what)
override def comparisons = List()
override def defaultSize = 10000
-
+
val fun = (a: Cont) => a.in % 32
}
@@ -18,10 +18,10 @@ extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont])
with HavingResult[Int] {
def companion = GroupByLight
runresult = -1
-
+
val array = new Array[Cont](sz)
for (i <- 0 until sz) array(i) = new Cont(i)
-
+
def runpar = runresult = pa.groupBy(GroupByLight.fun).size
def runseq = runresult = array.asInstanceOf[Array[Cont]].groupBy(GroupByLight.fun).size
def comparisonMap = collection.Map()
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/IndexWhere.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/IndexWhere.scala
index e8a74286ae..3a22bdd1db 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/IndexWhere.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/IndexWhere.scala
@@ -12,7 +12,7 @@ extends Resettable[Cont](sz, p, what, (i: Int) => new Cont(i), new Array[Any](_
with HavingResult[Int] {
def companion = IndexWhere
override def repetitionsPerRun = 400
-
+
def runpar = runresult = pa.indexWhere(IndexWhere.pred2, 0)
def runseq = runresult = sequentialIndexWhere(IndexWhere.pred2, 0, sz)
override def comparisonMap = collection.Map()
@@ -22,7 +22,7 @@ object IndexWhere extends Companion {
def benchName = "index-where";
def apply(sz: Int, p: Int, what: String) = new IndexWhere(sz, p, what)
override def comparisons = List()
-
+
val pred = (c: Cont) => {
var in = c.in
var i = 2
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/IntersectHalf.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/IntersectHalf.scala
index fa0b9f8baf..e429fb288e 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/IntersectHalf.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/IntersectHalf.scala
@@ -12,7 +12,7 @@ extends Resettable[Cont](sz, p, what, (i: Int) => new Cont(i), new Array[Any](_
with HavingResult[Int] {
def companion = IntersectHalf
override def repetitionsPerRun = 400
-
+
val similar = {
val p = new collection.parallel.mutable.ParArray[Cont](sz)
for (i <- 0 until sz) p(i) = what match {
@@ -21,11 +21,11 @@ with HavingResult[Int] {
}
p.drop(p.size / 2)
}
-
+
def runpar = runresult = pa.intersect(similar).size
def runseq = runresult = sequentialIntersect(similar, sz).size
override def comparisonMap = collection.Map()
-
+
val corr = (a: Cont, b: Cont) => a.in == b.in
}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/LastIndexWhere.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/LastIndexWhere.scala
index dbba807390..427afa5571 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/LastIndexWhere.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/LastIndexWhere.scala
@@ -12,7 +12,7 @@ extends Resettable[Cont](sz, p, what, (i: Int) => new Cont(i), new Array[Any](_
with HavingResult[Int] {
def companion = LastIndexWhere
override def repetitionsPerRun = 400
-
+
def runpar = runresult = pa.lastIndexWhere(LastIndexWhere.pred2, pa.size - 1)
def runseq = runresult = sequentialLastIndexWhere(LastIndexWhere.pred2, sz - 1, sz)
override def comparisonMap = collection.Map()
@@ -22,7 +22,7 @@ object LastIndexWhere extends Companion {
def benchName = "last-index-where";
def apply(sz: Int, p: Int, what: String) = new LastIndexWhere(sz, p, what)
override def comparisons = List()
-
+
val pred = (c: Cont) => {
var in = c.in
var i = 2
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MapLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MapLight.scala
index f6a5985cb7..1451f6a57a 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MapLight.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MapLight.scala
@@ -8,7 +8,7 @@ object MapLight extends Companion {
def apply(sz: Int, parallelism: Int, what: String) = new MapLight(sz, parallelism, what)
override def comparisons = List("jsr")
override def defaultSize = 100000
-
+
def fun = (a: Cont) => { a }
def funjsr = new extra166y.Ops.Op[Cont, Cont] {
def op(a: Cont) = { a }
@@ -18,7 +18,7 @@ object MapLight extends Companion {
class MapLight(sz: Int, p: Int, what: String)
extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
def companion = MapLight
-
+
def runpar = pa.map(MapLight.fun)
def runseq = sequentialMap(MapLight.fun, sz)
// def runseq = sequentialMapOpt(MapLight.fun, sz)
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MatrixMultiplication.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MatrixMultiplication.scala
index 29d6ff7580..5f902ff483 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MatrixMultiplication.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MatrixMultiplication.scala
@@ -16,46 +16,46 @@ class MatrixMultiplication(sz: Int, p: Int, what: String)
extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
def companion = MatrixMultiplication
collection.parallel.tasksupport.environment = forkjoinpool
-
+
val a = Matrix.unit[Int](sz)
val b = Matrix.unit[Int](sz)
var c = new Matrix[Int](sz)
-
+
def runpar = c = a * b //{ c.assignProduct(a, b) } //; println("--------"); c.output }
def runseq = throw new UnsupportedOperationException
def comparisonMap = collection.Map()
-
+
class Matrix[T](n: Int)(implicit num: Numeric[T], man: Manifest[T]) {
val array = new Array[T](n * n)
-
+
def apply(y: Int, x: Int) = array(y * n + x)
-
+
def update(y: Int, x: Int, elem: T) = array(y * n + x) = elem
-
+
def *(b: Matrix[T]) = {
val m = new Matrix[T](n)
m.assignProduct(this, b)
m
}
-
+
def assignProduct(a: Matrix[T], b: Matrix[T]) = {
val range = ParRange(0, n * n, 1, false)
for (i <- range) this(i / n, i % n) = calcProduct(a, b, i / n, i % n);
}
-
+
private def calcProduct(a: Matrix[T], b: Matrix[T], y: Int, x: Int): T = {
import num._
var sum = zero
for (i <- 0 until n) sum += a(y, i) * b(i, x)
sum
}
-
+
def output = for (y <- 0 until n) {
for (x <- 0 until n) print(this(y, x))
println
}
}
-
+
object Matrix {
def unit[T](n: Int)(implicit num: Numeric[T], man: Manifest[T]) = {
val m = new Matrix[T](n)
@@ -63,7 +63,7 @@ extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
m
}
}
-
+
}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MinLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MinLight.scala
index 66cd29807a..a51b5d6176 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MinLight.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MinLight.scala
@@ -9,7 +9,7 @@ class MinLight(sz: Int, p: Int, what: String)
extends Resettable[Int](sz, p, what, (i: Int) => i, new Array[Any](_), classOf[Int]) {
def companion = MinLight
override def repetitionsPerRun = 400
-
+
def runpar = pa.min(Ordering[Int])
def runseq = sequentialMin(sz)
override def comparisonMap = collection.Map()
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PadToDouble.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PadToDouble.scala
index 26c5dd2114..f8a985c349 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PadToDouble.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PadToDouble.scala
@@ -12,7 +12,7 @@ extends Resettable[Cont](sz, p, what, (i: Int) => new Cont(i), new Array[Any](_
with HavingResult[Int] {
def companion = PadToDouble
override def repetitionsPerRun = 400
-
+
val similar = {
val p = new collection.parallel.mutable.ParArray[Cont](sz)
for (i <- 0 until sz) p(i) = what match {
@@ -21,11 +21,11 @@ with HavingResult[Int] {
}
p.drop(p.size / 2)
}
-
+
def runpar = runresult = pa.padTo(size * 2, padder).size
def runseq = runresult = sequentialPadTo(size * 2, padder, size).size
override def comparisonMap = collection.Map()
-
+
val padder = new Cont(0)
}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PartialMapLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PartialMapLight.scala
index e06720ae37..57f8536b9e 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PartialMapLight.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PartialMapLight.scala
@@ -8,7 +8,7 @@ object PartialMapLight extends Companion {
def apply(sz: Int, parallelism: Int, what: String) = new PartialMapLight(sz, parallelism, what)
override def comparisons = List()
override def defaultSize = 100000
-
+
def fun: PartialFunction[Cont, Cont] = {
case c: Cont if c.in >= 0 => c
}
@@ -17,7 +17,7 @@ object PartialMapLight extends Companion {
class PartialMapLight(sz: Int, p: Int, what: String)
extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
def companion = PartialMapLight
-
+
def runpar = pa.collect(PartialMapLight.fun)
def runseq = sequentialPartialMap(PartialMapLight.fun, sz)
def comparisonMap = collection.Map()
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PartitionLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PartitionLight.scala
index c0fb0454ec..b99a25b285 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PartitionLight.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PartitionLight.scala
@@ -9,12 +9,12 @@ object PartitionLight extends Companion {
def apply(sz: Int, parallelism: Int, what: String) = new PartitionLight(sz, parallelism, what)
override def comparisons = Nil
override def defaultSize = 20000
-
+
val pred = (a: Cont) => check(a.in)
val predjsr = new extra166y.Ops.Predicate[Cont] {
def op(a: Cont) = check(a.in)
}
-
+
def check(n: Int) = {
var res = n
var i = 1
@@ -31,7 +31,7 @@ extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont])
with HavingResult[Int] {
def companion = PartitionLight
runresult = -1
-
+
def runpar = runresult = pa.partition(PartitionLight.pred)._1.size
def runseq = runresult = sequentialPartition(PartitionLight.pred, sz)._1.size
def comparisonMap = collection.Map()
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PatchHalf.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PatchHalf.scala
index 73bbc5ee1e..55cc71f129 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PatchHalf.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PatchHalf.scala
@@ -12,7 +12,7 @@ extends Resettable[Cont](sz, p, what, (i: Int) => new Cont(i), new Array[Any](_
with HavingResult[Int] {
def companion = PatchHalf
override def repetitionsPerRun = 400
-
+
val similar = {
val p = new collection.parallel.mutable.ParArray[Cont](sz)
for (i <- 0 until sz) p(i) = what match {
@@ -21,7 +21,7 @@ with HavingResult[Int] {
}
p.drop(p.size / 2)
}
-
+
def runpar = runresult = pa.patch(size / 2, similar, 0).size
def runseq = runresult = sequentialPatch(size / 2, similar, 0, size).size
override def comparisonMap = collection.Map()
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PlusPlus.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PlusPlus.scala
index f9d6e277a5..2574621212 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PlusPlus.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PlusPlus.scala
@@ -14,10 +14,10 @@ object PlusPlus extends Companion {
class PlusPlus(sz: Int, p: Int, what: String)
extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
def companion = PlusPlus
-
+
val thatarr = new Array[Cont](sz)
val thatpa = new ParArray[Cont](sz)
-
+
def runpar = pa ++ thatpa
def runseq = arr ++ thatarr
def comparisonMap = collection.Map()
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceHeavy.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceHeavy.scala
index 5806dd7831..dd660ba8e0 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceHeavy.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceHeavy.scala
@@ -7,7 +7,7 @@ class ReduceHeavy(sz: Int, p: Int, what: String)
extends Resettable[Cont](sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
def companion = ReduceHeavy
override def repetitionsPerRun = 100
-
+
def runseq = sequentialReduce(Cont.opheavy, sz, new Cont(0))
def runpar = pa.reduce(Cont.opheavy)
def runjsr = jsrarr.reduce(Cont.reducerheavy, new Cont(0))
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceLight.scala
index 668590d463..f1f2a32403 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceLight.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceLight.scala
@@ -19,24 +19,24 @@ extends Resettable[Cont](sz, p, what, new Cont(_), new Array[Any](_), classOf[Co
def companion = ReduceLight
override def repetitionsPerRun = 350
override val runs = 20
-
+
def runpar = {
pa.reduce(Cont.op)
// updatePar
}
-
+
def runjsr = {
jsrarr.reduce(Cont.reducer, new Cont(0))
// updateJsr
}
-
+
def runseq = {
sequentialReduce(Cont.op, sz, new Cont(0))
// updateSeq
}
-
+
override def comparisonMap = collection.Map("jsr" -> runjsr _)
-
+
}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceList.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceList.scala
index db4fb3331f..f095797d1c 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceList.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceList.scala
@@ -32,7 +32,7 @@ extends Resettable[List[Int]](sz, p, what, ListCreator, new Array[Any](_), class
def companion = ReduceList
override def repetitionsPerRun = 10
override val runs = 15
-
+
def runpar = pa.reduce(ListOps.redop)
def runseq = sequentialReduce(ListOps.redop, sz, List[Int]())
def runjsr = jsrarr.reduce(ListOps.reducer, List[Int]())
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceNew.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceNew.scala
index c69f64d329..1cf4f4169a 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceNew.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceNew.scala
@@ -6,11 +6,11 @@ package scala.collection.parallel.benchmarks.parallel_array
/** Tests reduce method using an operator creating an object as a result. */
class ReduceNew(sz: Int, p: Int, what: String)
-extends Resettable[Cont](sz, p, what, (i: Int) => new Cont(i),
+extends Resettable[Cont](sz, p, what, (i: Int) => new Cont(i),
new Array[Any](_), classOf[Cont]) {
def companion = ReduceNew
override def repetitionsPerRun = 200
-
+
def runpar = pa.reduce(Cont.opnew)
def runseq = sequentialReduce(Cont.opnew, sz, new Cont(0))
def runjsr = jsrarr.reduce(Cont.reducernew, new Cont(0))
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReducePrime.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReducePrime.scala
index b6ff69e37b..8fb90981ac 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReducePrime.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReducePrime.scala
@@ -16,12 +16,12 @@ object IntOps {
val reducer = new extra166y.Ops.Reducer[IntWrap] {
def op(a: IntWrap, b: IntWrap) = primereduce(a, b)
}
-
+
def primereduce(a: IntWrap, b: IntWrap) = {
val check = (checkPrime(a.num), checkPrime(b.num))
if (a.num > b.num) a else b
}
-
+
def checkPrime(n: Int) = {
var isPrime = true
var i = 2
@@ -38,7 +38,7 @@ class ReducePrime(sz: Int, p: Int, what: String)
extends Resettable[IntWrap](sz, p, what, IntWrapCreator, new Array[Any](_), classOf[IntWrap])
with HavingResult[IntWrap] {
def companion = ReducePrime
-
+
def runseq = runresult = sequentialReduce(IntOps.op, sz, new IntWrap(0))
def runpar = runresult = pa.reduce(IntOps.op)
def runjsr = runresult = jsrarr.reduce(IntOps.reducer, new IntWrap(0))
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/RemoveDuplicates.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/RemoveDuplicates.scala
index a66d2fb1f8..feb1bd9466 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/RemoveDuplicates.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/RemoveDuplicates.scala
@@ -12,7 +12,7 @@ extends Resettable[Cont](sz, p, what, (i: Int) => new Cont(i), new Array[Any](_
with HavingResult[Int] {
def companion = RemoveDuplicates
override def repetitionsPerRun = 400
-
+
def runpar = runresult = pa.distinct.size
def runseq = runresult = sequentialRemoveDuplicates(size).size
override def comparisonMap = collection.Map()
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Resettable.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Resettable.scala
index 957e258ca8..b4403fcb9c 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Resettable.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Resettable.scala
@@ -13,37 +13,37 @@ class Cont(val in: Int) {
object Cont {
val pred = (a: Cont) => a.in > 100
-
+
val predjsr = new extra166y.Ops.Predicate[Cont] {
def op(a: Cont) = a.in > 100
}
-
+
val op = (a: Cont, b: Cont) => {
b.num = a.in + b.in
b
}
-
+
val opnew = (a: Cont, b: Cont) => new Cont(a.in + b.in)
-
+
val opheavy = (a: Cont, b: Cont) => {
heavyComputation(a, b)
}
-
+
val reducer = new extra166y.Ops.Reducer[Cont] {
def op(a: Cont, b: Cont) = {
b.num = a.in + b.in
b
}
}
-
+
val reducernew = new extra166y.Ops.Reducer[Cont] {
def op(a: Cont, b: Cont) = new Cont(a.in + b.in)
}
-
+
val reducerheavy = new extra166y.Ops.Reducer[Cont] {
def op(a: Cont, b: Cont) = heavyComputation(a, b)
}
-
+
def heavyComputation(a: Cont, b: Cont) = {
val f = a.in
val s = b.in
@@ -59,18 +59,18 @@ object Cont {
}
}
-abstract class Resettable[T](val size: Int, val parallelism: Int, val runWhat: String,
+abstract class Resettable[T](val size: Int, val parallelism: Int, val runWhat: String,
elemcreator: Int => T, arrcreator: Int => Array[Any], cls: Class[T])
extends Bench with SequentialOps[T] {
val forkjoinpool = new scala.concurrent.forkjoin.ForkJoinPool(parallelism)
forkjoinpool.setMaximumPoolSize(parallelism)
val papool = new jsr166y.ForkJoinPool(parallelism)
papool.setMaximumPoolSize(parallelism)
-
+
var pa: ParArray[T] = null
var jsrarr: JSR166Array[T] = null
reset
-
+
def reset = runWhat match {
case "seq" =>
arr = arrcreator(size)
@@ -84,34 +84,34 @@ extends Bench with SequentialOps[T] {
for (i <- 0 until size) jsrarr.set(i, elemcreator(i))
case _ => throw new IllegalArgumentException("Unknown type: " + runWhat)
}
-
+
var updateCounter = 0
def incUpdateCounter {
updateCounter += 1
if (updateCounter > size) updateCounter = 0
}
-
+
def updateSeq {
val tmp = arr(updateCounter)
arr(updateCounter) = arr(size - updateCounter - 1)
arr(size - updateCounter - 1) = tmp
incUpdateCounter
}
-
+
def updatePar {
val tmp = pa(updateCounter)
pa(updateCounter) = pa(size - updateCounter - 1)
pa(size - updateCounter - 1) = tmp
incUpdateCounter
}
-
+
def updateJsr {
val tmp = jsrarr.get(updateCounter)
jsrarr.set(updateCounter, jsrarr.get(size - updateCounter - 1))
jsrarr.set(size - updateCounter - 1, tmp)
incUpdateCounter
}
-
+
override def printResults {
println(" --- Fork join pool state --- ")
println("Parallelism: " + forkjoinpool.getParallelism)
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Reverse.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Reverse.scala
index 0d00e60731..ec690d4b2d 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Reverse.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Reverse.scala
@@ -11,7 +11,7 @@ class Reverse(sz: Int, p: Int, what: String)
extends Resettable[Cont](sz, p, what, (i: Int) => new Cont(i), new Array[Any](_), classOf[Cont]) {
def companion = Reverse
override def repetitionsPerRun = 400
-
+
def runpar = pa.reverse
def runseq = sequentialReverse(sz)
override def comparisonMap = collection.Map()
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReverseMap.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReverseMap.scala
index c9f4a02baa..47ae108c45 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReverseMap.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReverseMap.scala
@@ -11,11 +11,11 @@ class ReverseMap(sz: Int, p: Int, what: String)
extends Resettable[Cont](sz, p, what, (i: Int) => new Cont(i), new Array[Any](_), classOf[Cont]) {
def companion = ReverseMap
override def repetitionsPerRun = 100
-
+
def runpar = pa.reverseMap(compl)
def runseq = sequentialReverseMap(compl, sz)
override def comparisonMap = collection.Map()
-
+
val id = (c: Cont) => c
val compl = (c: Cont) => {
var in = c.in
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SameElementsLong.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SameElementsLong.scala
index 8ca0bedfde..d22c4df661 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SameElementsLong.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SameElementsLong.scala
@@ -12,7 +12,7 @@ extends Resettable[Cont](sz, p, what, (i: Int) => new Cont(i), new Array[Any](_
with HavingResult[Boolean] {
def companion = SameElementsLong
override def repetitionsPerRun = 400
-
+
val same = {
val p = new collection.parallel.mutable.ParArray[Cont](sz)
for (i <- 0 until sz) p(i) = what match {
@@ -21,7 +21,7 @@ with HavingResult[Boolean] {
}
p
}
-
+
def runpar = runresult = pa.sameElements(same)
def runseq = runresult = sequentialSameElements(same, sz)
override def comparisonMap = collection.Map()
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ScanLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ScanLight.scala
index 85bfabfea7..d0ddf9f70e 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ScanLight.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ScanLight.scala
@@ -10,7 +10,7 @@ object ScanLight extends Companion {
def apply(sz: Int, parallelism: Int, what: String) = new ScanLight(sz, parallelism, what)
override def comparisons = List("jsr")
override def defaultSize = 40000
-
+
val op = (a: Cont, b: Cont) => {
operation(a, b)
}
@@ -26,7 +26,7 @@ extends Resettable[Cont](sz, p, what, new Cont(_), new Array[Any](_), classOf[Co
def companion = ScanLight
override def repetitionsPerRun = 50
override val runs = 12
-
+
def runpar = pa.scan(new Cont(0))(ScanLight.op)
def runseq = sequentialScan(new Cont(0), ScanLight.op, sz)
def runjsr = jsrarr.cumulate(new extra166y.Ops.Reducer[Cont] {
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ScanMedium.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ScanMedium.scala
index 73a237189f..a60ba7aa33 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ScanMedium.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ScanMedium.scala
@@ -10,7 +10,7 @@ object ScanMedium extends Companion {
def apply(sz: Int, parallelism: Int, what: String) = new ScanMedium(sz, parallelism, what)
override def comparisons = List("jsr")
override def defaultSize = 5000
-
+
val op = (a: Cont, b: Cont) => {
operation(a, b)
}
@@ -35,7 +35,7 @@ extends Resettable[Cont](sz, p, what, new Cont(_), new Array[Any](_), classOf[Co
def companion = ScanMedium
override def repetitionsPerRun = 50
override val runs = 12
-
+
def runpar = pa.scan(new Cont(0))(ScanMedium.op)
def runseq = sequentialScan(new Cont(0), ScanMedium.op, sz)
def runjsr = jsrarr.cumulate(new extra166y.Ops.Reducer[Cont] {
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SegmentLength.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SegmentLength.scala
index 1f9041c373..8fae899b45 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SegmentLength.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SegmentLength.scala
@@ -12,7 +12,7 @@ extends Resettable[Cont](sz, p, what, (i: Int) => new Cont(i), new Array[Any](_
with HavingResult[Int] {
def companion = SegmentLength
override def repetitionsPerRun = 400
-
+
def runpar = runresult = pa.segmentLength(SegmentLength.pred2, 0)
def runseq = runresult = sequentialSegmentLength(SegmentLength.pred2, 0, sz)
override def comparisonMap = collection.Map()
@@ -22,7 +22,7 @@ object SegmentLength extends Companion {
def benchName = "segment-length";
def apply(sz: Int, p: Int, what: String) = new SegmentLength(sz, p, what)
override def comparisons = List()
-
+
val pred = (c: Cont) => {
var in = c.in
var i = 2
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SequentialOps.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SequentialOps.scala
index 18568ab7e9..ba32036002 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SequentialOps.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SequentialOps.scala
@@ -3,9 +3,9 @@ package scala.collection.parallel.benchmarks.parallel_array
trait SequentialOps[T] {
-
+
var arr: Array[Any] = null
-
+
def sequentialReduce(op: (T, T) => T, sz: Int, init: T) = {
var i = 0
val until = sz
@@ -16,7 +16,7 @@ trait SequentialOps[T] {
}
sum
}
-
+
def sequentialScan(z: T, op: (T, T) => T, sz: Int) = {
var outarr = new Array[Any](sz + 1)
outarr(0) = z
@@ -31,7 +31,7 @@ trait SequentialOps[T] {
j += 1
}
}
-
+
def sequentialCount(pred: T => Boolean, sz: Int) = {
var i = 0
val until = sz
@@ -42,7 +42,7 @@ trait SequentialOps[T] {
}
sum
}
-
+
def sequentialForeach[U](f: T => U, sz: Int) = {
var i = 0
val until = sz
@@ -52,7 +52,7 @@ trait SequentialOps[T] {
i += 1
}
}
-
+
def sequentialSum[U >: T](sz: Int)(implicit num: Numeric[U]) = {
var i = 0
val until = sz
@@ -63,7 +63,7 @@ trait SequentialOps[T] {
}
sum
}
-
+
def sequentialMin[U >: T](sz: Int)(implicit ord: Ordering[U]) = {
var i = 1
val until = sz
@@ -75,12 +75,12 @@ trait SequentialOps[T] {
}
min
}
-
+
def sequentialForall(pred: T => Boolean, sz: Int) = {
var i = 0
val until = sz
var all = true
- while (i < until) {
+ while (i < until) {
if (pred(arr(i).asInstanceOf[T])) i += 1
else {
all = false
@@ -94,7 +94,7 @@ trait SequentialOps[T] {
var i = 0
val until = sz
var some = false
- while (i < until) {
+ while (i < until) {
if (pred(arr(i).asInstanceOf[T])) {
some = true
i = until
@@ -102,12 +102,12 @@ trait SequentialOps[T] {
}
some
}
-
+
def sequentialFind(pred: T => Boolean, sz: Int) = {
var i = 0
val until = sz
var opt: Option[T] = None
- while (i < until) {
+ while (i < until) {
if (pred(arr(i).asInstanceOf[T])) {
opt = Some(arr(i).asInstanceOf[T])
i = until
@@ -115,7 +115,7 @@ trait SequentialOps[T] {
}
opt
}
-
+
def sequentialFilter(pred: T => Boolean, sz: Int) = {
var i = 0
val buff = new collection.mutable.ArrayBuffer[T]
@@ -128,7 +128,7 @@ trait SequentialOps[T] {
buff.copyToArray(resarr, 0)
resarr
}
-
+
def sequentialPartition(pred: T => Boolean, sz: Int) = {
var i = 0
val btrue = new collection.mutable.ArrayBuffer[T]
@@ -145,7 +145,7 @@ trait SequentialOps[T] {
bfalse.copyToArray(resfalse, 0)
(restrue, resfalse)
}
-
+
def sequentialTakeOpt(n: Int, sz: Int) = {
var i = 0
val until = if (n < sz) n else sz
@@ -172,7 +172,7 @@ trait SequentialOps[T] {
b.copyToArray(res, 0)
res
}
-
+
def sequentialDrop(n: Int, sz: Int) = {
var i = n
val b = new collection.mutable.ArrayBuffer[T]
@@ -186,7 +186,7 @@ trait SequentialOps[T] {
b.copyToArray(res, 0)
res
}
-
+
def sequentialSlice(from: Int, until: Int, sz: Int) = {
var i = from
val b = new collection.mutable.ArrayBuffer[T]
@@ -200,7 +200,7 @@ trait SequentialOps[T] {
b.copyToArray(res, 0)
res
}
-
+
def sequentialSplitAtOpt(n: Int, sz: Int) = {
var i = 0
val before = new Array[Any](n)
@@ -209,7 +209,7 @@ trait SequentialOps[T] {
Array.copy(arr, n, after, 0, sz - n)
(before, after)
}
-
+
def sequentialSplitAt(n: Int, sz: Int) = {
var i = 0
val before = new collection.mutable.ArrayBuffer[T]
@@ -227,7 +227,7 @@ trait SequentialOps[T] {
after.copyToArray(resaft, 0)
(resbef, resaft)
}
-
+
def sequentialTakeWhile(p: T => Boolean, sz: Int) = {
var i = 0
val b = new collection.mutable.ArrayBuffer[T]
@@ -242,7 +242,7 @@ trait SequentialOps[T] {
b.copyToArray(res, 0)
res
}
-
+
def sequentialSpan(p: T => Boolean, sz: Int) = {
val bpref = new collection.mutable.ArrayBuffer[T]
val brest = new collection.mutable.ArrayBuffer[T]
@@ -267,112 +267,112 @@ trait SequentialOps[T] {
brest.copyToArray(resrest, 0)
(respref, resrest)
}
-
+
def sequentialMap(f: T => T, sz: Int) = {
val b = new collection.mutable.ArrayBuffer[T](sz)
-
+
var i = 0
while (i < sz) {
b += f(arr(i).asInstanceOf[T])
i += 1
}
-
+
val res = new Array[Any](sz)
b.copyToArray(res, 0)
res
}
-
+
def sequentialMapOpt(f: T => T, sz: Int) = {
val res = new Array[Any](sz)
-
+
var i = 0
while (i < sz) {
res(i) = f(arr(i).asInstanceOf[T])
i += 1
}
-
+
res
}
-
+
def sequentialPartialMap(f: PartialFunction[T, T], sz: Int) = {
val b = new collection.mutable.ArrayBuffer[T](sz)
-
+
var i = 0
while (i < sz) {
val elem = arr(i).asInstanceOf[T]
if (f.isDefinedAt(elem)) b += f(elem)
i += 1
}
-
+
val res = new Array[Any](b.size)
b.copyToArray(res, 0)
res
}
-
+
def sequentialFlatMap(f: T => Traversable[Int], sz: Int) = {
val b = new collection.mutable.ArrayBuffer[Int](sz)
-
+
var i = 0
while (i < sz) {
val ts = f(arr(i).asInstanceOf[T])
for (elem <- ts) b += elem
i += 1
}
-
+
val res = new Array[Any](b.size)
b.copyToArray(res, 0)
res
- }
-
+ }
+
def sequentialCopyToArray(destarr: Array[Any], pos: Int, sz: Int) = {
Array.copy(arr, 0, destarr, pos, sz)
}
-
+
def sequentialSegmentLength(pred: T => Boolean, from: Int, sz: Int) = {
var i = from
var cnt = 0
-
+
while (i < sz) {
if (pred(arr(i).asInstanceOf[T])) {
cnt += 1
i += 1
} else i = sz
}
-
+
cnt
}
-
+
def sequentialIndexWhere(pred: T => Boolean, from: Int, sz: Int) = {
var i = from
var pos = -1
-
+
while (i < sz) {
if (pred(arr(i).asInstanceOf[T])) {
pos = i
i = sz
} else i += 1
}
-
+
pos
}
-
+
def sequentialLastIndexWhere(pred: T => Boolean, end: Int, sz: Int) = {
var i = end
var pos = -1
-
+
while (i >= 0) {
if (pred(arr(i).asInstanceOf[T])) {
pos = i
i = -1
} else i -= 1
}
-
+
pos
}
-
+
def sequentialReverse(sz: Int) = {
val res = new Array[Any](sz)
-
+
var i = sz - 1
var j = 0
while (i >= 0) {
@@ -382,10 +382,10 @@ trait SequentialOps[T] {
}
res
}
-
+
def sequentialReverseMap(f: T => T, sz: Int) = {
val res = new Array[Any](sz)
-
+
var i = sz - 1
var j = 0
while (i >= 0) {
@@ -395,7 +395,7 @@ trait SequentialOps[T] {
}
res
}
-
+
def sequentialSameElements(sq: Seq[T], sz: Int): Boolean = {
if (sz != sq.length) false
else {
@@ -409,7 +409,7 @@ trait SequentialOps[T] {
else false
}
}
-
+
def sequentialCorresponds(sq: Seq[T], f: (T, T) => Boolean, sz: Int): Boolean = {
if (sz != sq.length) false
else {
@@ -423,11 +423,11 @@ trait SequentialOps[T] {
else false
}
}
-
+
def sequentialDiff(sq: Seq[T], sz: Int) = {
val occmap = occurences(sq)
val b = new collection.mutable.ArrayBuffer[T]
-
+
var i = 0
while (i < sz) {
val elem = arr(i).asInstanceOf[T]
@@ -435,16 +435,16 @@ trait SequentialOps[T] {
else occmap(elem) -= 1
i += 1
}
-
+
val res = new Array[Any](b.size)
b.copyToArray(res, 0)
res
}
-
+
def sequentialIntersect(sq: Seq[T], sz: Int) = {
val occmap = occurences(sq)
val b = new collection.mutable.ArrayBuffer[T]
-
+
var i = 0
while (i < sz) {
val elem = arr(i).asInstanceOf[T]
@@ -455,22 +455,22 @@ trait SequentialOps[T] {
}
i += 1
}
-
+
val res = new Array[Any](b.size)
b.copyToArray(res, 0)
res
}
-
+
private def occurences(sq: Seq[T]) = {
val occmap = new collection.mutable.HashMap[T, Int] { override def default(k: T) = 0 }
for (elem <- sq.iterator) occmap(elem) += 1
occmap
}
-
+
def sequentialRemoveDuplicates(sz: Int) = {
val occ = new collection.mutable.HashSet[T]
val b = new collection.mutable.ArrayBuffer[T]
-
+
var i = 0
while (i < sz) {
val elem = arr(i).asInstanceOf[T]
@@ -480,58 +480,58 @@ trait SequentialOps[T] {
}
i += 1
}
-
+
val res = new Array[Any](b.size)
b.copyToArray(res, 0)
res
}
-
+
def sequentialPatch(from: Int, p: Seq[T], replaced: Int, sz: Int) = {
val b = new collection.mutable.ArrayBuffer[T]
b.sizeHint(from + (sz - from - replaced) + p.size)
-
+
var i = 0
while (i < from) {
b += arr(i).asInstanceOf[T]
i += 1
}
-
+
val jt = p.iterator
while (jt.hasNext) b += jt.next
-
+
val skipto = from + replaced
while (i < from + replaced) i += 1
-
+
while (i < sz) {
b += arr(i).asInstanceOf[T]
i += 1
}
-
+
val res = new Array[Any](b.size)
b.copyToArray(res, 0)
res
}
-
+
def sequentialPadTo(tosize: Int, elem: T, sz: Int) = {
val b = new collection.mutable.ArrayBuffer[T]
b.sizeHint(tosize)
-
+
var i = 0
while (i < sz) {
b += arr(i).asInstanceOf[T]
i += 1
}
-
+
while (i < tosize) {
b += elem
i += 1
}
-
+
val res = new Array[Any](b.size)
b.copyToArray(res, 0)
res
}
-
+
}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SliceFew.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SliceFew.scala
index c22ae47400..450d640b8d 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SliceFew.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SliceFew.scala
@@ -17,7 +17,7 @@ with HavingResult[Int] {
def companion = SliceFew
override def repetitionsPerRun = 200
runresult = -1
-
+
def runpar = runresult = pa.slice(5, 25).size
def runseq = runresult = sequentialSlice(5, 25, sz).size
def comparisonMap = collection.Map()
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SliceMany.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SliceMany.scala
index 37ad666d93..4a30b60e1f 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SliceMany.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SliceMany.scala
@@ -17,7 +17,7 @@ with HavingResult[Int] {
def companion = SliceMany
override def repetitionsPerRun = 200
runresult = -1
-
+
def runpar = runresult = pa.slice(pa.size / 4, pa.size * 3 / 4).size
def runseq = runresult = sequentialSlice(sz / 4, sz * 3 / 4, sz).size
def comparisonMap = collection.Map()
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SliceMedium.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SliceMedium.scala
index 7da94a4a20..e16002f15d 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SliceMedium.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SliceMedium.scala
@@ -17,7 +17,7 @@ with HavingResult[Int] {
def companion = SliceMedium
override def repetitionsPerRun = 200
runresult = -1
-
+
def runpar = runresult = pa.slice(pa.size / 7, pa.size * 4 / 7).size
def runseq = runresult = sequentialSlice(sz / 7, sz * 4 / 7, sz).size
def comparisonMap = collection.Map()
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SpanLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SpanLight.scala
index 4d8b128e1f..5f1e631bce 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SpanLight.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SpanLight.scala
@@ -9,12 +9,12 @@ object SpanLight extends Companion {
def apply(sz: Int, parallelism: Int, what: String) = new SpanLight(sz, parallelism, what)
override def comparisons = Nil
override def defaultSize = 20000
-
+
val pred = (a: Cont) => check(a.in)
val predjsr = new extra166y.Ops.Predicate[Cont] {
def op(a: Cont) = check(a.in)
}
-
+
def check(n: Int) = {
var res = n
var i = 1
@@ -32,7 +32,7 @@ extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont])
with HavingResult[Int] {
def companion = SpanLight
runresult = -1
-
+
def runpar = runresult = pa.span(SpanLight.pred)._1.size
def runseq = runresult = sequentialSpan(SpanLight.pred, sz)._1.size
def comparisonMap = collection.Map()
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SplitHalf.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SplitHalf.scala
index d671e56c3d..ff1e009481 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SplitHalf.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SplitHalf.scala
@@ -17,7 +17,7 @@ with HavingResult[Int] {
def companion = SplitHalf
override def repetitionsPerRun = 300
runresult = -1
-
+
def runpar = runresult = pa.splitAt(pa.size / 2)._1.size
def runseq = runresult = sequentialSplitAtOpt(sz / 2, sz)._1.size
def comparisonMap = collection.Map()
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SumLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SumLight.scala
index 67ac1c6478..6ed6d14370 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SumLight.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SumLight.scala
@@ -9,7 +9,7 @@ class SumLight(sz: Int, p: Int, what: String)
extends Resettable[Int](sz, p, what, (i: Int) => i, new Array[Any](_), classOf[Int]) {
def companion = SumLight
override def repetitionsPerRun = 500
-
+
def runpar = pa.sum
def runseq = sequentialSum(sz)
override def comparisonMap = collection.Map()
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/TakeMany.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/TakeMany.scala
index 2f735c1e45..9ddfb77a9d 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/TakeMany.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/TakeMany.scala
@@ -17,7 +17,7 @@ with HavingResult[Int] {
def companion = TakeMany
override def repetitionsPerRun = 400
runresult = -1
-
+
def runpar = runresult = pa.take(pa.size / 2).size
def runseq = runresult = sequentialTake(sz / 2, sz).size
def comparisonMap = collection.Map()
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/TakeWhileLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/TakeWhileLight.scala
index 255cb695d7..a86c67d0d8 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/TakeWhileLight.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/TakeWhileLight.scala
@@ -9,12 +9,12 @@ object TakeWhileLight extends Companion {
def apply(sz: Int, parallelism: Int, what: String) = new TakeWhileLight(sz, parallelism, what)
override def comparisons = Nil
override def defaultSize = 10000
-
+
val pred = (a: Cont) => check(a.in)
val predjsr = new extra166y.Ops.Predicate[Cont] {
def op(a: Cont) = check(a.in)
}
-
+
def check(n: Int) = {
var res = n
var i = 1
@@ -31,7 +31,7 @@ extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont])
with HavingResult[Int] {
def companion = TakeWhileLight
runresult = -1
-
+
def runpar = runresult = pa.takeWhile(TakeWhileLight.pred).size
def runseq = runresult = sequentialTakeWhile(TakeWhileLight.pred, sz).size
def comparisonMap = collection.Map()
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_range/RangeBenches.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_range/RangeBenches.scala
index 1bd35180c8..af852ce992 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_range/RangeBenches.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_range/RangeBenches.scala
@@ -13,7 +13,7 @@ import scala.collection.parallel.benchmarks.generic.StandardParIterableBenches
object RangeBenches extends StandardParIterableBenches[Int, ParRange] {
-
+
def nameOfCollection = "ParRange"
def operators = new IntOperators {}
def comparisonMap = collection.Map()
@@ -25,29 +25,29 @@ object RangeBenches extends StandardParIterableBenches[Int, ParRange] {
collection.parallel.tasksupport.environment = forkJoinPool
pr
}
-
+
object MapLight extends IterableBenchCompanion {
override def defaultSize = 20000
def benchName = "map-light";
def apply(sz: Int, p: Int, w: String) = new MapLight(sz, p, w)
}
-
+
class MapLight(val size: Int, val parallelism: Int, val runWhat: String)
extends IterableBench {
def calc(n: Int) = n % 2 + 1
-
+
def comparisonMap = collection.Map()
def runseq = for (n <- this.seqcoll) yield calc(n)
def runpar = for (n <- this.parcoll) yield calc(n)
def companion = MapLight
}
-
+
object MapMedium extends IterableBenchCompanion {
override def defaultSize = 5000
def benchName = "map-medium";
def apply(sz: Int, p: Int, w: String) = new MapMedium(sz, p, w)
}
-
+
class MapMedium(val size: Int, val parallelism: Int, val runWhat: String)
extends IterableBench {
def calc(n: Int) = {
@@ -59,24 +59,24 @@ object RangeBenches extends StandardParIterableBenches[Int, ParRange] {
}
sum
}
-
+
def comparisonMap = collection.Map()
def runseq = for (n <- this.seqcoll) yield calc(n)
def runpar = for (n <- this.parcoll) yield calc(n)
def companion = MapMedium
}
-
+
object ForeachModify extends IterableBenchCompanion {
override def defaultSize = 150000
def benchName = "foreach-modify";
def apply(sz: Int, p: Int, w: String) = new ForeachModify(sz, p, w)
}
-
+
class ForeachModify(val size: Int, val parallelism: Int, val runWhat: String)
extends IterableBench {
val array = new Array[Int](size)
def modify(n: Int) = array(n) += 1
-
+
def comparisonMap = collection.Map()
def runseq = for (n <- this.seqcoll) modify(n)
def runpar = for (n <- this.parcoll.asInstanceOf[ParRange]) {
@@ -85,13 +85,13 @@ object RangeBenches extends StandardParIterableBenches[Int, ParRange] {
}
def companion = ForeachModify
}
-
+
object ForeachModifyMedium extends IterableBenchCompanion {
override def defaultSize = 20000
def benchName = "foreach-modify-medium";
def apply(sz: Int, p: Int, w: String) = new ForeachModifyMedium(sz, p, w)
}
-
+
class ForeachModifyMedium(val size: Int, val parallelism: Int, val runWhat: String)
extends IterableBench {
val array = new Array[Int](size)
@@ -104,78 +104,78 @@ object RangeBenches extends StandardParIterableBenches[Int, ParRange] {
}
sum
}
-
+
def comparisonMap = collection.Map()
def runseq = for (n <- this.seqcoll) modify(n)
def runpar = for (n <- this.parcoll) modify(n)
def companion = ForeachModifyMedium
}
-
+
object ForeachModifyHeavy extends IterableBenchCompanion {
override def defaultSize = 1000
def benchName = "foreach-modify-heavy";
def apply(sz: Int, p: Int, w: String) = new ForeachModifyHeavy(sz, p, w)
}
-
+
class ForeachModifyHeavy(val size: Int, val parallelism: Int, val runWhat: String)
extends IterableBench {
val array = new Array[Int](size)
def modify(n: Int) = array(n) = collatz(10000 + array(n))
-
+
def comparisonMap = collection.Map()
def runseq = for (n <- this.seqcoll) modify(n)
def runpar = for (n <- this.parcoll) modify(n)
def companion = ForeachModifyHeavy
}
-
+
object ForeachAdd extends IterableBenchCompanion {
override def defaultSize = 10000
def benchName = "foreach-add";
def apply(sz: Int, p: Int, w: String) = new ForeachAdd(sz, p, w)
override def comparisons = List("seq-hashmap")
}
-
+
class ForeachAdd(val size: Int, val parallelism: Int, val runWhat: String)
extends IterableBench {
val cmap = new java.util.concurrent.ConcurrentHashMap[Int, Int]
val hmap = new java.util.HashMap[Int, Int]
-
+
override def reset = runWhat match {
case "seq-hashmap" => seqcoll = createSequential(size, parallelism)
case _ => super.reset
}
-
+
def comparisonMap = collection.Map("seq-hashmap" -> runseqhashmap _)
def runseqhashmap = for (i <- seqcoll) hmap put (i, onesum(i))
def runseq = for (i <- seqcoll) cmap put (i, onesum(i))
def runpar = for (i <- parcoll) cmap put (i, onesum(i))
def companion = ForeachAdd
}
-
+
object ForeachAddCollatz extends IterableBenchCompanion {
override def defaultSize = 5000
def benchName = "foreach-add-collatz";
def apply(sz: Int, p: Int, w: String) = new ForeachAddCollatz(sz, p, w)
override def comparisons = List("seq-hashmap")
}
-
+
class ForeachAddCollatz(val size: Int, val parallelism: Int, val runWhat: String)
extends IterableBench {
val cmap = new java.util.concurrent.ConcurrentHashMap[Int, Int]
val hmap = new java.util.HashMap[Int, Int]
-
+
override def reset = runWhat match {
case "seq-hashmap" => seqcoll = createSequential(size, parallelism)
case _ => super.reset
}
-
+
def comparisonMap = collection.Map("seq-hashmap" -> runseqhashmap _)
def runseqhashmap = for (i <- seqcoll) hmap put (i, collatz(i))
def runseq = for (i <- seqcoll) cmap put (i, collatz(i))
def runpar = for (i <- parcoll) cmap put (i, collatz(i))
def companion = ForeachAddCollatz
}
-
+
def collatz(n: Int) = {
var curr = n
var sum = 0
@@ -186,7 +186,7 @@ object RangeBenches extends StandardParIterableBenches[Int, ParRange] {
}
sum
}
-
+
def onesum(n: Int) = {
var left = n
var sum = 0
@@ -196,7 +196,7 @@ object RangeBenches extends StandardParIterableBenches[Int, ParRange] {
}
sum
}
-
+
}
diff --git a/test/benchmarks/src/scala/util/HashSpeedTest.scala b/test/benchmarks/src/scala/util/HashSpeedTest.scala
index 5f6915e4fc..a4d310e6d1 100644
--- a/test/benchmarks/src/scala/util/HashSpeedTest.scala
+++ b/test/benchmarks/src/scala/util/HashSpeedTest.scala
@@ -1,5 +1,5 @@
object HashSpeedTest {
-
+
import System.{ nanoTime => now }
def time[A](f: => A) = {
@@ -7,7 +7,7 @@ object HashSpeedTest {
val ans = f
(ans, now - t0)
}
-
+
def ptime[A](f: => A) = {
val (ans, dt) = time(f)
printf("Elapsed: %.3f\n", dt * 1e-9)
@@ -40,17 +40,17 @@ object HashSpeedTest {
// If you have MurmurHash3 installed, uncomment below (and in main)
import scala.util.{ MurmurHash3 => MH3 }
-
+
val justCountString: String => Unit = str => {
var s, i = 0
while (i < str.length) { s += str.charAt(i); i += 1 }
HashHist.add(s)
}
-
+
val defaultHashString: String => Unit = str => HashHist.add(str.hashCode)
-
+
val murmurHashString: String => Unit = str => HashHist.add(MH3.stringHash(str))
-
+
def makeCharStrings = {
val a = new Array[Byte](4)
val buffer = new collection.mutable.ArrayBuffer[String]
@@ -77,7 +77,7 @@ object HashSpeedTest {
}
buffer.toArray
}
-
+
def hashCharStrings(ss: Array[String], hash: String => Unit) {
var i = 0
while (i < ss.length) {
@@ -91,9 +91,9 @@ object HashSpeedTest {
lli.foreach(_.foreach(s += _))
HashHist.add(s)
}
-
+
def defaultHashList: List[List[Int]] => Unit = lli => HashHist.add(lli.hashCode)
-
+
def makeBinaryLists = {
def singleLists(depth: Int): List[List[Int]] = {
if (depth <= 0) List(Nil)
@@ -132,7 +132,7 @@ object HashSpeedTest {
}
buffer.toArray
}
-
+
def hashBinaryLists(ls: Array[List[List[Int]]], hash: List[List[Int]] => Unit) {
var i = 0
while (i < ls.length) {
@@ -146,9 +146,9 @@ object HashSpeedTest {
si.foreach(s += _)
HashHist.add(s)
}
-
+
def defaultHashSets: Set[Int] => Unit = si => HashHist.add(si.hashCode)
-
+
def makeIntSets = {
def sets(depth: Int): List[Set[Int]] = {
if (depth <= 0) List(Set.empty[Int])
@@ -159,7 +159,7 @@ object HashSpeedTest {
}
sets(20).toArray
}
-
+
def hashIntSets(ss: Array[Set[Int]], hash: Set[Int] => Unit) {
var i = 0
while (i < ss.length) {
@@ -169,7 +169,7 @@ object HashSpeedTest {
}
def defaultHashTuples: (Product with Serializable) => Unit = p => HashHist.add(p.hashCode)
-
+
def makeNestedTuples = {
val basic = Array(
(0, 0),
@@ -199,7 +199,7 @@ object HashSpeedTest {
(for (i <- basic; j <- basic; k <- basic; l <- basic; m <- basic) yield (i, j, k, l, m)) ++
(for (i <- basic; j <- basic; k <- basic; l <- basic; m <- basic) yield (i, (j, (k, (l, m)))))
}
-
+
def hashNestedTuples(ts: Array[Product with Serializable], hash: (Product with Serializable) => Unit) {
var i = 0
while (i < ts.length) {