summaryrefslogtreecommitdiff
path: root/test/scalacheck/Ctrie.scala
diff options
context:
space:
mode:
authorSeth Tisue <seth@tisue.net>2017-01-23 14:35:47 -0800
committerSeth Tisue <seth@tisue.net>2017-01-27 09:29:53 -0800
commit4386b948a0b597cc78e4f3b22b51e0588a5b6d60 (patch)
treeeca8cea41d110d8b14d27a83ae06a90e966f9621 /test/scalacheck/Ctrie.scala
parent27c10db549e6f43571663d0162b58fc04fbb34bf (diff)
downloadscala-4386b948a0b597cc78e4f3b22b51e0588a5b6d60.tar.gz
scala-4386b948a0b597cc78e4f3b22b51e0588a5b6d60.tar.bz2
scala-4386b948a0b597cc78e4f3b22b51e0588a5b6d60.zip
run ScalaCheck tests directly, not through partest
ScalaCheck ever being under partest in the first place is ancient history, from back in the Ant build days (shudder) ScalaCheck support was removed from partest 1.1.0, which we already upgraded to in a recent commit also upgrades ScalaCheck from 1.11.6 to 1.13.4, since we might as well. no source changes were necessary.
Diffstat (limited to 'test/scalacheck/Ctrie.scala')
-rw-r--r--test/scalacheck/Ctrie.scala215
1 files changed, 215 insertions, 0 deletions
diff --git a/test/scalacheck/Ctrie.scala b/test/scalacheck/Ctrie.scala
new file mode 100644
index 0000000000..8a24079ad3
--- /dev/null
+++ b/test/scalacheck/Ctrie.scala
@@ -0,0 +1,215 @@
+import org.scalacheck._
+import Prop._
+import org.scalacheck.Gen._
+import collection._
+import collection.concurrent.TrieMap
+
+
+
+case class Wrap(i: Int) {
+ override def hashCode = i // * 0x9e3775cd
+}
+
+
+/** A check mainly oriented towards checking snapshot correctness.
+ */
+object CtrieTest extends Properties("concurrent.TrieMap") {
+
+ /* generators */
+
+ val sizes = choose(0, 200000)
+
+ val threadCounts = choose(2, 16)
+
+ val threadCountsAndSizes = for {
+ p <- threadCounts
+ sz <- sizes
+ } yield (p, sz);
+
+
+ /* helpers */
+
+ def inParallel[T](totalThreads: Int)(body: Int => T): Seq[T] = {
+ val threads = for (idx <- 0 until totalThreads) yield new Thread {
+ setName("ParThread-" + idx)
+ private var res: T = _
+ override def run() {
+ res = body(idx)
+ }
+ def result = {
+ this.join()
+ res
+ }
+ }
+
+ threads foreach (_.start())
+ threads map (_.result)
+ }
+
+ def spawn[T](body: =>T): { def get: T } = {
+ val t = new Thread {
+ setName("SpawnThread")
+ private var res: T = _
+ override def run() {
+ res = body
+ }
+ def result = res
+ }
+ t.start()
+ new {
+ def get: T = {
+ t.join()
+ t.result
+ }
+ }
+ }
+
+ def elementRange(threadIdx: Int, totalThreads: Int, totalElems: Int): Range = {
+ val sz = totalElems
+ val idx = threadIdx
+ val p = totalThreads
+ val start = (sz / p) * idx + math.min(idx, sz % p)
+ val elems = (sz / p) + (if (idx < sz % p) 1 else 0)
+ val end = start + elems
+ (start until end)
+ }
+
+ def hasGrown[K, V](last: Map[K, V], current: Map[K, V]) = {
+ (last.size <= current.size) && {
+ last forall {
+ case (k, v) => current.get(k) == Some(v)
+ }
+ }
+ }
+
+ object err {
+ var buffer = new StringBuilder
+ def println(a: AnyRef) = buffer.append(a.toString).append("\n")
+ def clear() = buffer.clear()
+ def flush() = {
+ Console.out.println(buffer)
+ clear()
+ }
+ }
+
+
+ /* properties */
+
+ property("concurrent growing snapshots") = forAll(threadCounts, sizes) {
+ (numThreads, numElems) =>
+ val p = 3 //numThreads
+ val sz = 102 //numElems
+ val ct = new TrieMap[Wrap, Int]
+
+ // checker
+ val checker = spawn {
+ def check(last: Map[Wrap, Int], iterationsLeft: Int): Boolean = {
+ val current = ct.readOnlySnapshot()
+ if (!hasGrown(last, current)) false
+ else if (current.size >= sz) true
+ else if (iterationsLeft < 0) false
+ else check(current, iterationsLeft - 1)
+ }
+ check(ct.readOnlySnapshot(), 500)
+ }
+
+ // fillers
+ inParallel(p) {
+ idx =>
+ elementRange(idx, p, sz) foreach (i => ct.update(Wrap(i), i))
+ }
+
+ // wait for checker to finish
+ val growing = true//checker.get
+
+ val ok = growing && ((0 until sz) forall {
+ case i => ct.get(Wrap(i)) == Some(i)
+ })
+
+ ok
+ }
+
+ property("update") = forAll(sizes) {
+ (n: Int) =>
+ val ct = new TrieMap[Int, Int]
+ for (i <- 0 until n) ct(i) = i
+ (0 until n) forall {
+ case i => ct(i) == i
+ }
+ }
+
+ property("concurrent update") = forAll(threadCountsAndSizes) {
+ case (p, sz) =>
+ val ct = new TrieMap[Wrap, Int]
+
+ inParallel(p) {
+ idx =>
+ for (i <- elementRange(idx, p, sz)) ct(Wrap(i)) = i
+ }
+
+ (0 until sz) forall {
+ case i => ct(Wrap(i)) == i
+ }
+ }
+
+
+ property("concurrent remove") = forAll(threadCounts, sizes) {
+ (p, sz) =>
+ val ct = new TrieMap[Wrap, Int]
+ for (i <- 0 until sz) ct(Wrap(i)) = i
+
+ inParallel(p) {
+ idx =>
+ for (i <- elementRange(idx, p, sz)) ct.remove(Wrap(i))
+ }
+
+ (0 until sz) forall {
+ case i => ct.get(Wrap(i)) == None
+ }
+ }
+
+
+ property("concurrent putIfAbsent") = forAll(threadCounts, sizes) {
+ (p, sz) =>
+ val ct = new TrieMap[Wrap, Int]
+
+ val results = inParallel(p) {
+ idx =>
+ elementRange(idx, p, sz) find (i => ct.putIfAbsent(Wrap(i), i) != None)
+ }
+
+ (results forall (_ == None)) && ((0 until sz) forall {
+ case i => ct.get(Wrap(i)) == Some(i)
+ })
+ }
+
+ property("concurrent getOrElseUpdate") = forAll(threadCounts, sizes) {
+ (p, sz) =>
+ val totalInserts = new java.util.concurrent.atomic.AtomicInteger
+ val ct = new TrieMap[Wrap, String]
+
+ val results = inParallel(p) {
+ idx =>
+ (0 until sz) foreach {
+ i =>
+ val v = ct.getOrElseUpdate(Wrap(i), idx + ":" + i)
+ if (v == idx + ":" + i) totalInserts.incrementAndGet()
+ }
+ }
+
+ (totalInserts.get == sz) && ((0 until sz) forall {
+ case i => ct(Wrap(i)).split(":")(1).toInt == i
+ })
+ }
+
+}
+
+
+
+
+
+
+
+
+
+