From e7ca142b45255f6b41582c25fe590a664d5fc8b9 Mon Sep 17 00:00:00 2001 From: Aleksandar Pokopec Date: Wed, 20 Oct 2010 20:20:00 +0000 Subject: Some exception handling fixes in parallel colle... Some exception handling fixes in parallel collections. Fixed some regressions. Fixed some tests. No review. --- .../parallel-collections/PairOperators.scala | 97 ++++++++++++++++++++++ .../parallel-collections/PairValues.scala | 28 +++++++ .../parallel-collections/ParallelArrayCheck.scala | 2 +- .../ParallelHashMapCheck.scala | 66 +++++++++++++++ .../ParallelIterableCheck.scala | 17 +++- .../files/scalacheck/parallel-collections/pc.scala | 29 ++++++- 6 files changed, 232 insertions(+), 7 deletions(-) create mode 100644 test/files/scalacheck/parallel-collections/PairOperators.scala create mode 100644 test/files/scalacheck/parallel-collections/PairValues.scala create mode 100644 test/files/scalacheck/parallel-collections/ParallelHashMapCheck.scala (limited to 'test/files/scalacheck') diff --git a/test/files/scalacheck/parallel-collections/PairOperators.scala b/test/files/scalacheck/parallel-collections/PairOperators.scala new file mode 100644 index 0000000000..48cbd136e5 --- /dev/null +++ b/test/files/scalacheck/parallel-collections/PairOperators.scala @@ -0,0 +1,97 @@ +package scala.collection.parallel.ops + + +import scala.collection.parallel._ + + +trait PairOperators[K, V] extends Operators[(K, V)] { + def koperators: Operators[K] + def voperators: Operators[V] + + private def zipPredicates(kps: List[K => Boolean], vps: List[V => Boolean]): List[((K, V)) => Boolean] = for { + (kp, vp) <- koperators.countPredicates zip voperators.countPredicates + } yield new Function1[(K, V), Boolean] { + def apply(kv: (K, V)) = kp(kv._1) && vp(kv._2) + } + + /* operators */ + + def reduceOperators = for { + (kop, vop) <- koperators.reduceOperators zip voperators.reduceOperators + } yield new Function2[(K, V), (K, V), (K, V)] { + def apply(kv1: (K, V), kv2: (K, V)) = (kop(kv1._1, kv2._1), vop(kv1._2, kv2._2)) + } + + def countPredicates = zipPredicates(koperators.countPredicates, voperators.countPredicates) + + def forallPredicates = zipPredicates(koperators.forallPredicates, voperators.forallPredicates) + + def existsPredicates = zipPredicates(koperators.existsPredicates, voperators.existsPredicates) + + def findPredicates = zipPredicates(koperators.findPredicates, voperators.findPredicates) + + def mapFunctions = for { + (km, vm) <- koperators.mapFunctions zip voperators.mapFunctions + } yield new Function1[(K, V), (K, V)] { + def apply(kv: (K, V)) = (km(kv._1), vm(kv._2)) + } + + def partialMapFunctions = for { + (kpm, vpm) <- koperators.partialMapFunctions zip voperators.partialMapFunctions + } yield new PartialFunction[(K, V), (K, V)] { + def isDefinedAt(kv: (K, V)) = kpm.isDefinedAt(kv._1) && vpm.isDefinedAt(kv._2) + def apply(kv: (K, V)) = (kpm(kv._1), vpm(kv._2)) + } + + def flatMapFunctions = for { + (kfm, vfm) <- koperators.flatMapFunctions zip voperators.flatMapFunctions + } yield new Function1[(K, V), Traversable[(K, V)]] { + def apply(kv: (K, V)) = kfm(kv._1).toIterable zip vfm(kv._2).toIterable + } + + def filterPredicates = zipPredicates(koperators.filterPredicates, voperators.existsPredicates) + + def filterNotPredicates = filterPredicates + + def partitionPredicates = filterPredicates + + def takeWhilePredicates = zipPredicates(koperators.takeWhilePredicates, voperators.takeWhilePredicates) + + def dropWhilePredicates = takeWhilePredicates + + def spanPredicates = takeWhilePredicates + + def foldArguments = for { + ((kinit, kop), (vinit, vop)) <- koperators.foldArguments zip voperators.foldArguments + } yield ((kinit, vinit), new Function2[(K, V), (K, V), (K, V)] { + def apply(kv1: (K, V), kv2: (K, V)) = (kop(kv1._1, kv2._1), vop(kv1._2, kv2._2)) + }) + + def addAllTraversables = for { + (kt, vt) <- koperators.addAllTraversables zip voperators.addAllTraversables + } yield kt.toIterable zip vt.toIterable + + def newArray(sz: Int) = new Array[(K, V)](sz) + +} + + + + + + + + + + + + + + + + + + + + + diff --git a/test/files/scalacheck/parallel-collections/PairValues.scala b/test/files/scalacheck/parallel-collections/PairValues.scala new file mode 100644 index 0000000000..864dad2425 --- /dev/null +++ b/test/files/scalacheck/parallel-collections/PairValues.scala @@ -0,0 +1,28 @@ +package scala.collection.parallel.ops + + + + + +import org.scalacheck._ +import org.scalacheck.Gen +import org.scalacheck.Gen._ +import org.scalacheck.Prop._ +import org.scalacheck.Properties +import org.scalacheck.Arbitrary._ + + + + +trait PairValues[K, V] { + def kvalues: Seq[Gen[K]] + def vvalues: Seq[Gen[V]] + + def values = for { + kg <- kvalues + vg <- vvalues + } yield for { + k <- kg + v <- vg + } yield (k, v) +} diff --git a/test/files/scalacheck/parallel-collections/ParallelArrayCheck.scala b/test/files/scalacheck/parallel-collections/ParallelArrayCheck.scala index 9169890e98..394dc6b370 100644 --- a/test/files/scalacheck/parallel-collections/ParallelArrayCheck.scala +++ b/test/files/scalacheck/parallel-collections/ParallelArrayCheck.scala @@ -14,7 +14,7 @@ import scala.collection._ import scala.collection.parallel.ops._ -abstract class ParallelArrayCheck[T](tp: String) extends ParallelSeqCheck[T]("ParallelArray[" + tp + "]") { +abstract class ParallelArrayCheck[T](tp: String) extends ParallelSeqCheck[T]("ParArray[" + tp + "]") { ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2) ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2) diff --git a/test/files/scalacheck/parallel-collections/ParallelHashMapCheck.scala b/test/files/scalacheck/parallel-collections/ParallelHashMapCheck.scala new file mode 100644 index 0000000000..1224ec8d4d --- /dev/null +++ b/test/files/scalacheck/parallel-collections/ParallelHashMapCheck.scala @@ -0,0 +1,66 @@ +package scala.collection.parallel +package immutable + + + +import org.scalacheck._ +import org.scalacheck.Gen +import org.scalacheck.Gen._ +import org.scalacheck.Prop._ +import org.scalacheck.Properties +import org.scalacheck.Arbitrary._ + +import scala.collection._ +import scala.collection.parallel.ops._ + + +abstract class ParallelHashMapCheck[K, V](tp: String) extends ParallelIterableCheck[(K, V)]("immutable.ParHashMap[" + tp + "]") { + ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2) + ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2) + + type CollType = ParHashMap[K, V] + + def isCheckingViews = false + + def instances(vals: Seq[Gen[(K, V)]]): Gen[Iterable[(K, V)]] = sized { sz => + var hm = new immutable.HashMap[K, V] + val gen = vals(rnd.nextInt(vals.size)) + for (i <- 0 until sz) hm += sample(gen) + hm + } + + def fromTraversable(t: Traversable[(K, V)]) = { + var phm = new ParHashMap[K, V] + var i = 0 + for (kv <- t.toList) { + phm += kv + i += 1 + } + phm + } + +} + + +object IntIntParallelHashMapCheck extends ParallelHashMapCheck[Int, Int]("Int, Int") +with PairOperators[Int, Int] +with PairValues[Int, Int] +{ + def intvalues = new IntValues {} + def kvalues = intvalues.values + def vvalues = intvalues.values + + val intoperators = new IntOperators {} + def voperators = intoperators + def koperators = intoperators +} + + + + + + + + + + diff --git a/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala b/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala index fd323ef82c..bc08947af4 100644 --- a/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala +++ b/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala @@ -95,8 +95,18 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col } property("mappings must be equal") = forAll(collectionPairs) { case (t, coll) => - val results = for ((f, ind) <- mapFunctions.zipWithIndex) - yield ("op index: " + ind) |: t.map(f) == coll.map(f) + val results = for ((f, ind) <- mapFunctions.zipWithIndex) yield { + val ms = t.map(f) + val mp = coll.map(f) + if (ms != mp) { + println(t) + println(coll) + println("mapped to: ") + println(ms) + println(mp) + } + ("op index: " + ind) |: ms == mp + } results.reduceLeft(_ && _) } @@ -107,7 +117,7 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col if (ps != pp) { println(t) println(coll) - println("partially mapped to: ") + println("collected to: ") println(ps) println(pp) } @@ -166,7 +176,6 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col println(tsl) println(collsl) println("as list: " + collsl.toList) - println(tsl.asInstanceOf[Seq[T]].sameElements(collsl)) println(collsl.iterator.hasNext) println(collsl.iterator.next) println(collsl.iterator.hasNext) diff --git a/test/files/scalacheck/parallel-collections/pc.scala b/test/files/scalacheck/parallel-collections/pc.scala index f77c6db435..04b7168286 100644 --- a/test/files/scalacheck/parallel-collections/pc.scala +++ b/test/files/scalacheck/parallel-collections/pc.scala @@ -3,21 +3,46 @@ import org.scalacheck._ + import scala.collection.parallel._ class ParCollProperties extends Properties("Parallel collections") { + /* Collections */ + // parallel arrays //include(mutable.IntParallelArrayCheck) // parallel ranges //include(immutable.ParallelRangeCheck) + + // parallel immutable hash maps (tries) + include(immutable.IntIntParallelHashMapCheck) + + // parallel immutable hash sets (tries) + + // parallel mutable hash maps (tables) + + + /* Views */ + + // parallel array views + + // parallel immutable hash map views + + // parallel mutable hash map views } object Test { def main(args: Array[String]) { - val results = org.scalacheck.Test.checkProperties(new ParCollProperties) - if (!results.forall(_._2.passed)) println("Test results: " + results.mkString("\n")) + val pc = new ParCollProperties + org.scalacheck.Test.checkProperties( + org.scalacheck.Test.Params( + rng = new java.util.Random(5134L), + testCallback = new ConsoleReporter(0) + ), + pc + ) } } -- cgit v1.2.3