diff options
author | Aleksandar Pokopec <aleksandar.prokopec@epfl.ch> | 2010-11-08 08:52:20 +0000 |
---|---|---|
committer | Aleksandar Pokopec <aleksandar.prokopec@epfl.ch> | 2010-11-08 08:52:20 +0000 |
commit | 09ed9d12c343ee972861c8439fd10596903efe59 (patch) | |
tree | ea2735b13b43d4132664d8b3d6a9c23e2b709b7e /test | |
parent | 056663c3f22b8c03f222856305ef99e3ed029889 (diff) | |
download | scala-09ed9d12c343ee972861c8439fd10596903efe59.tar.gz scala-09ed9d12c343ee972861c8439fd10596903efe59.tar.bz2 scala-09ed9d12c343ee972861c8439fd10596903efe59.zip |
Added size maps to flat hash tables.
Added parallel mutable hash sets.
Implemented parallel mutable hash set iterators.
Implemented parallel mutable hash set combiners.
Factored out unrolled linked lists into a separate class UnrolledBuffer, added tests.
Added parallel mutable hash set tests, and debugged hashsets.
No review.
Diffstat (limited to 'test')
6 files changed, 262 insertions, 7 deletions
diff --git a/test/files/run/UnrolledBuffer.scala b/test/files/run/UnrolledBuffer.scala new file mode 100644 index 0000000000..7e113c3e04 --- /dev/null +++ b/test/files/run/UnrolledBuffer.scala @@ -0,0 +1,125 @@ + + + + +import collection.parallel.UnrolledBuffer + + + +object Test { + + def main(args: Array[String]) { + val u1 = new UnrolledBuffer[Int] + assert(u1.isEmpty) + assert(u1.size == 0) + + u1 += 1 + u1 += 2 + u1 += 3 + assert(u1 == UnrolledBuffer(1, 2, 3)) + assert(u1.toList == List(1, 2, 3)) + assert(u1.nonEmpty) + assert(u1.size == 3) + + u1.clear + assert(u1.isEmpty) + assert(u1.size == 0) + + u1 += 1 + u1 += 2 + u1 += 3 + u1.remove(1) + assert(u1.nonEmpty) + assert(u1.size == 2) + assert(u1 == UnrolledBuffer(1, 3)) + assert(u1.toList == List(1, 3)) + + u1 concat UnrolledBuffer(5, 7, 9) + assert(u1 == UnrolledBuffer(1, 3, 5, 7, 9)) + + val u2 = u1 map { x => (x - 1) / 2 } + assert(u2 == UnrolledBuffer(0, 1, 2, 3, 4)) + + u1.clear + u2.clear + assert(u1.size == 0) + assert(u2.size == 0) + + for (i <- 0 until 500) u1 += i + for (i <- 500 until 1000) u2 += i + assert(u1.size == 500) + assert(u2.size == 500) + assert(u1.iterator.toList == (0 until 500).toList) + assert((for (elem <- u1) yield elem) sameElements (0 until 500)) + + u1 concat u2 + assert(u1.size == 1000) + assert(u2.size == 0) + assertCorrect(u1) + + u1 concat UnrolledBuffer() + assertCorrect(u1) + + val u3 = u1 map { x => x } + var i = 0 + for (elem <- u1) { + assert(elem == u3(i)) + i += 1 + } + + u1.remove(999) + assert(u1.size == 999) + assertCorrect(u1) + + u1.remove(500) + assert(u1.size == 998) + assertCorrect(u1) + + u1.remove(5) + assert(u1.size == 997) + assertCorrect(u1) + + u1.remove(0) + assert(u1.size == 996) + assertCorrect(u1) + + u1.insert(0, 0) + assert(u1.size == 997) + assertCorrect(u1) + + u1.insert(5, 5) + assert(u1.size == 998) + assertCorrect(u1) + + u1.insert(500, 500) + assert(u1.size == 999) + assertCorrect(u1) + + u1.insert(999, 999) + assert(u1.size == 1000) + assertCorrect(u1) + + for (i <- -100 until 0) { + i +=: u1 + assertCorrect(u1) + } + assert(u1.size == 1100) + assertCorrect(u1) + } + + def assertCorrect(u1: UnrolledBuffer[Int]) { + val sz = u1.size + val store = new Array[Int](sz) + for (i <- 0 until sz) { + store(i) = u1(i) + u1(i) = sz - i + } + for (i <- 0 until sz) assert(u1(i) == (sz - i)) + for (i <- 0 until sz) u1(i) = store(i) + for (i <- 0 until sz) assert(store(i) == u1(i)) + + assert((u1 map { x => x }) == u1) + assert(u1.iterator.toSeq.size == u1.size) + } + +} diff --git a/test/files/scalacheck/Unrolled.scala b/test/files/scalacheck/Unrolled.scala new file mode 100644 index 0000000000..d69e62dd01 --- /dev/null +++ b/test/files/scalacheck/Unrolled.scala @@ -0,0 +1,26 @@ +import org.scalacheck._ +import Prop._ +import Gen._ + +import collection.parallel.UnrolledBuffer + +object Test extends Properties("UnrolledBuffer") { + + property("concat size") = forAll { (l1: List[Int], l2: List[Int]) => + val u1 = new UnrolledBuffer[Int] + u1 ++= l1 + val u2 = new UnrolledBuffer[Int] + u2 ++= l2 + val totalsz = u1.size + u2.size + u1 concat u2 + totalsz == u1.size + } + + property("adding") = forAll { (l: List[Int]) => + val u = new UnrolledBuffer[Int] + u ++= l + u == l + } + +} + diff --git a/test/files/scalacheck/parallel-collections/PairOperators.scala b/test/files/scalacheck/parallel-collections/PairOperators.scala index 48cbd136e5..2055c29d38 100644 --- a/test/files/scalacheck/parallel-collections/PairOperators.scala +++ b/test/files/scalacheck/parallel-collections/PairOperators.scala @@ -49,7 +49,7 @@ trait PairOperators[K, V] extends Operators[(K, V)] { def apply(kv: (K, V)) = kfm(kv._1).toIterable zip vfm(kv._2).toIterable } - def filterPredicates = zipPredicates(koperators.filterPredicates, voperators.existsPredicates) + def filterPredicates = zipPredicates(koperators.filterPredicates, voperators.filterPredicates) def filterNotPredicates = filterPredicates diff --git a/test/files/scalacheck/parallel-collections/ParallelHashSetCheck.scala b/test/files/scalacheck/parallel-collections/ParallelHashSetCheck.scala new file mode 100644 index 0000000000..973a5cdf4b --- /dev/null +++ b/test/files/scalacheck/parallel-collections/ParallelHashSetCheck.scala @@ -0,0 +1,94 @@ +package scala.collection.parallel +package mutable + + + +import org.scalacheck._ +import org.scalacheck.Gen +import org.scalacheck.Gen._ +import org.scalacheck.Prop._ +import org.scalacheck.Properties +import org.scalacheck.Arbitrary._ + +import scala.collection._ +import scala.collection.parallel.ops._ + + +abstract class ParallelHashSetCheck[T](tp: String) extends ParallelSetCheck[T]("mutable.ParHashSet[" + tp + "]") { + ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2) + ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2) + + type CollType = ParHashSet[T] + + def isCheckingViews = false + + def hasStrictOrder = false + + def ofSize(vals: Seq[Gen[T]], sz: Int) = { + val hm = new mutable.HashSet[T] + val gen = vals(rnd.nextInt(vals.size)) + for (i <- 0 until sz) hm += sample(gen) + hm + } + + def fromTraversable(t: Traversable[T]) = { + val phm = new ParHashSet[T] + var i = 0 + for (kv <- t.toList) { + phm += kv + i += 1 + } + phm + } + +} + + +object IntParallelHashSetCheck extends ParallelHashSetCheck[Int]("Int") +with IntOperators +with IntValues +{ + override def printDataStructureDebugInfo(ds: AnyRef) = ds match { + case pm: ParHashSet[t] => + println("Mutable parallel hash set") + case _ => + println("could not match data structure type: " + ds.getClass) + } + + override def checkDataStructureInvariants(orig: Traversable[Int], ds: AnyRef) = ds match { + case pm: ParHashSet[t] => + // for an example of how not to write code proceed below + val invs = pm.brokenInvariants + + val containsall = (for (elem <- orig) yield { + if (pm.asInstanceOf[ParHashSet[Int]](elem) == true) true + else { + println("Does not contain original element: " + elem) + println(pm.hashTableContents.table.find(_ == elem)) + println(pm.hashTableContents.table.indexOf(elem)) + false + } + }).foldLeft(true)(_ && _) + + + if (invs.isEmpty) { + if (!containsall) println(pm.debugInformation) + containsall + } else { + println("Invariants broken:\n" + invs.mkString("\n")) + false + } + case _ => true + } + +} + + + + + + + + + + diff --git a/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala b/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala index 0acdb2b0a7..d2d6119997 100644 --- a/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala +++ b/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala @@ -146,7 +146,10 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col println("mapped to: ") println(ms) println(mp) - println("valid: " + !checkDataStructureInvariants(ms, mp)) + println("sizes: ") + println(ms.size) + println(mp.size) + println("valid: " + checkDataStructureInvariants(ms, mp)) } ("op index: " + ind) |: (areEqual(ms, mp) && checkDataStructureInvariants(ms, mp)) } diff --git a/test/files/scalacheck/parallel-collections/pc.scala b/test/files/scalacheck/parallel-collections/pc.scala index 24e7d09918..4e2d5611f0 100644 --- a/test/files/scalacheck/parallel-collections/pc.scala +++ b/test/files/scalacheck/parallel-collections/pc.scala @@ -11,20 +11,25 @@ class ParCollProperties extends Properties("Parallel collections") { /* Collections */ // parallel arrays - //include(mutable.IntParallelArrayCheck) + // include(mutable.IntParallelArrayCheck) // parallel ranges - //include(immutable.ParallelRangeCheck) + // include(immutable.ParallelRangeCheck) // parallel immutable hash maps (tries) - //include(immutable.IntIntParallelHashMapCheck) + // include(immutable.IntIntParallelHashMapCheck) // parallel immutable hash sets (tries) - //include(immutable.IntParallelHashSetCheck) + // include(immutable.IntParallelHashSetCheck) // parallel mutable hash maps (tables) // include(mutable.IntIntParallelHashMapCheck) + // parallel mutable hash sets (tables) + include(mutable.IntParallelHashSetCheck) + + // parallel vectors + /* Views */ // parallel array views @@ -32,6 +37,8 @@ class ParCollProperties extends Properties("Parallel collections") { // parallel immutable hash map views // parallel mutable hash map views + + // parallel vector views } @@ -45,7 +52,7 @@ object Test { workers = 1, minSize = 0, maxSize = 4000, - minSuccessfulTests = 100 + minSuccessfulTests = 250 ), pc ) |