From 18ad78dd73b29c0c8b34e970c58cd86232cdc4f5 Mon Sep 17 00:00:00 2001 From: Aleksandar Pokopec Date: Fri, 18 Jun 2010 07:49:14 +0000 Subject: Refactorings and hash trie combiners. No review. --- .../benchmarks/hashtries/MultipleCombine.scala | 87 +++++++++++++++ .../benchmarks/hashtries/ParallelHashTries.scala | 118 +++++++++++++++++++++ 2 files changed, 205 insertions(+) create mode 100644 test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/MultipleCombine.scala create mode 100644 test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/ParallelHashTries.scala (limited to 'test/benchmarks') diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/MultipleCombine.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/MultipleCombine.scala new file mode 100644 index 0000000000..a944a7fb39 --- /dev/null +++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/MultipleCombine.scala @@ -0,0 +1,87 @@ +package scala.collection.parallel.benchmarks +package hashtries + + + + +import collection.immutable.{HashMap => HashTrie} +import collection.mutable.HashMap + + + + + + +class MultipleCombine(val size: Int, val parallelism: Int, val runWhat: String) extends Bench with IntInit { + var combines = 10 + + var thattries = new Array[HashTrie[Int, Int]](combines) + def initTries = for (r <- 0 until combines) { + var thattrie = new HashTrie[Int, Int] + for (i <- ((r + 1) * size) until ((r + 2) * size)) thattrie += ((i, i)) + thattries(r) = thattrie + } + initTries + + val thatmaps = new Array[HashMap[Int, Int]](10) + def initMaps = for (r <- 0 until combines) { + var thatmap = new HashMap[Int, Int] + for (i <- ((r + 1) * size) until ((r + 2) * size)) thatmap += ((i, i)) + thatmaps(r) = thatmap + } + initMaps + + override def repetitionsPerRun = 25 + def runpar = throw new UnsupportedOperationException + def runseq = runhashtrie + def runhashtrie = { + initHashTrie + var trie = hashtrie + for (r <- 0 until combines) trie = trie combine thattries(r) + } + def runappendtrie = { + initHashTrie + var trie = hashtrie + for (r <- 0 until combines) trie = trie ++ thattries(r) + } + def runhashmap = { + initHashMap + var map = hashmap + for (r <- 0 until combines) map = map ++ thatmaps(r) + } + def rundestructive = { + initHashTrie + var trie = hashtrie + for (r <- 0 until combines) trie = trie combine thattries(r) + } + def companion = MultipleCombine + def comparisonMap = Map("hashtrie" -> runhashtrie _, "hashmap" -> runhashmap _, "appendtrie" -> runappendtrie _, "destruct" -> rundestructive _) + override def reset = runWhat match { + case "appendtrie" => initHashTrie + case "destruct" => initHashTrie + case _ => super.reset + } +} + + +object MultipleCombine extends BenchCompanion { + def collectionName = "HashTrie" + def benchName = "multi-combine"; + def apply(sz: Int, p: Int, what: String) = new MultipleCombine(sz, p, what) + override def defaultSize = 5000 +} + + + + + + + + + + + + + + + diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/ParallelHashTries.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/ParallelHashTries.scala new file mode 100644 index 0000000000..747178c1a4 --- /dev/null +++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/ParallelHashTries.scala @@ -0,0 +1,118 @@ +package scala.collection.parallel.benchmarks.hashtries + + + + +import scala.collection.parallel.benchmarks.generic.StandardParallelIterableBench +import scala.collection.parallel.benchmarks.generic.NotBenchmark +import scala.collection.parallel.benchmarks.generic.Dummy +import scala.collection.parallel.benchmarks.generic.Operators +import scala.collection.parallel.immutable.ParallelHashTrie + + + + + +trait ParallelHashTrieBenches[K, V] extends StandardParallelIterableBench[(K, V), ParallelHashTrie[K, V]] { + + def nameOfCollection = "ParallelHashTrie" + def comparisonMap = collection.Map() + val forkJoinPool = new scala.concurrent.forkjoin.ForkJoinPool + + object Map2 extends IterableBenchCompanion { + override def defaultSize = 5000 + def benchName = "map2"; + def apply(sz: Int, p: Int, w: String) = new Map2(sz, p, w) + } + + class Map2(val size: Int, val parallelism: Int, val runWhat: String) + extends IterableBench with StandardParallelIterableBench[(K, V), ParallelHashTrie[K, V]] { + var result: Int = 0 + def comparisonMap = collection.Map() + def runseq = result = this.seqcoll.map(operators.mapper2).size + def runpar = { + result = this.parcoll.map(operators.mapper2).size + //println(collection.parallel.immutable.ParallelHashTrie.totalcombines) + //System.exit(1) + } + def companion = Map2 + override def repetitionsPerRun = 50 + override def printResults { + println("Total combines: " + collection.parallel.immutable.ParallelHashTrie.totalcombines) + println("Size of last result: " + result) + } + } + +} + + + + + +object RefParallelHashTrieBenches extends ParallelHashTrieBenches[Dummy, Dummy] with NotBenchmark { + + type DPair = (Dummy, Dummy) + + object operators extends Operators[DPair] { + def gcd(a: Int, b: Int): Int = { + val result = if (b == 0) a else { + gcd(b, a - b * (a / b)) + } + result + 1000 + } + def heavy(a: Int): Int = { + var i = 0 + var sum = a + while (i < 3000) { + i += 1 + sum += a + i + } + sum + } + val reducer = (x: DPair, y: DPair) => { + y._2.num = x._2.in + y._2.in + y + } + val mediumreducer = (x: DPair, y: DPair) => { + y._2.num = gcd(x._2.in, y._2.in) + y + } + val filterer = (p: DPair) => { + p._1.num % 2 == 0 + } + val mapper = (p: DPair) => { + val a = p._1 + a.num = a.in % 2 + (a, p._2) + } + override val mapper2 = (p: DPair) => { + val a = 1 //heavy(p._1.in) + (new Dummy(p._1.in * -2 + a), p._2) + } + val heavymapper = (p: DPair) => { + val a = p._1 + var i = -100 + while (i < 0) { + if (a.in < i) a.num += 1 + i += 1 + } + (a, p._2) + } + val taker = (p: DPair) => true + } + + def createSequential(sz: Int, p: Int) = { + var ht = new collection.immutable.HashMap[Dummy, Dummy] + for (i <- 0 until sz) ht += ((new Dummy(i), new Dummy(i))) + ht + } + + def createParallel(sz: Int, p: Int) = { + var pht = new ParallelHashTrie[Dummy, Dummy] + for (i <- 0 until sz) pht += ((new Dummy(i), new Dummy(i))) + forkJoinPool.setParallelism(p) + pht.environment = forkJoinPool + pht + } + +} -- cgit v1.2.3