summaryrefslogtreecommitdiff
path: root/test/scalacheck
diff options
context:
space:
mode:
Diffstat (limited to 'test/scalacheck')
-rw-r--r--test/scalacheck/CheckCollections.scala52
-rw-r--r--test/scalacheck/CheckEither.scala243
-rw-r--r--test/scalacheck/Ctrie.scala215
-rw-r--r--test/scalacheck/MutablePriorityQueue.scala102
-rw-r--r--test/scalacheck/ReflectionExtractors.scala52
-rw-r--r--test/scalacheck/Unrolled.scala26
-rw-r--r--test/scalacheck/array-new.scala37
-rw-r--r--test/scalacheck/array-old.scala37
-rw-r--r--test/scalacheck/concurrent-map.scala70
-rw-r--r--test/scalacheck/duration.scala72
-rw-r--r--test/scalacheck/list.scala21
-rw-r--r--test/scalacheck/nan-ordering.scala130
-rw-r--r--test/scalacheck/primitive-eqeq.scala37
-rw-r--r--test/scalacheck/range.scala304
-rw-r--r--test/scalacheck/redblacktree.scala247
-rw-r--r--test/scalacheck/scala/collection/mutable/MutableTreeMap.scala337
-rw-r--r--test/scalacheck/scala/collection/mutable/MutableTreeSet.scala209
-rw-r--r--test/scalacheck/scala/collection/parallel/IntOperators.scala110
-rw-r--r--test/scalacheck/scala/collection/parallel/IntValues.scala29
-rw-r--r--test/scalacheck/scala/collection/parallel/Operators.scala36
-rw-r--r--test/scalacheck/scala/collection/parallel/PairOperators.scala101
-rw-r--r--test/scalacheck/scala/collection/parallel/PairValues.scala28
-rw-r--r--test/scalacheck/scala/collection/parallel/ParallelHashTrieCheck.scala144
-rw-r--r--test/scalacheck/scala/collection/parallel/ParallelIterableCheck.scala477
-rw-r--r--test/scalacheck/scala/collection/parallel/ParallelMapCheck1.scala67
-rw-r--r--test/scalacheck/scala/collection/parallel/ParallelRangeCheck.scala75
-rw-r--r--test/scalacheck/scala/collection/parallel/ParallelSeqCheck.scala300
-rw-r--r--test/scalacheck/scala/collection/parallel/ParallelSetCheck.scala62
-rw-r--r--test/scalacheck/scala/collection/parallel/immutable/ParallelVectorCheck.scala67
-rw-r--r--test/scalacheck/scala/collection/parallel/mutable/ParallelArrayCheck.scala62
-rw-r--r--test/scalacheck/scala/collection/parallel/mutable/ParallelArrayTest.scala112
-rw-r--r--test/scalacheck/scala/collection/parallel/mutable/ParallelArrayViewCheck.scala122
-rw-r--r--test/scalacheck/scala/collection/parallel/mutable/ParallelCtrieCheck.scala101
-rw-r--r--test/scalacheck/scala/collection/parallel/mutable/ParallelHashMapCheck.scala100
-rw-r--r--test/scalacheck/scala/collection/parallel/mutable/ParallelHashSetCheck.scala97
-rw-r--r--test/scalacheck/scala/pc.scala61
-rw-r--r--test/scalacheck/scala/reflect/quasiquotes/ArbitraryTreesAndNames.scala297
-rw-r--r--test/scalacheck/scala/reflect/quasiquotes/DefinitionConstructionProps.scala455
-rw-r--r--test/scalacheck/scala/reflect/quasiquotes/DefinitionDeconstructionProps.scala292
-rw-r--r--test/scalacheck/scala/reflect/quasiquotes/DeprecationProps.scala54
-rw-r--r--test/scalacheck/scala/reflect/quasiquotes/ErrorProps.scala215
-rw-r--r--test/scalacheck/scala/reflect/quasiquotes/ForProps.scala72
-rw-r--r--test/scalacheck/scala/reflect/quasiquotes/LiftableProps.scala176
-rw-r--r--test/scalacheck/scala/reflect/quasiquotes/PatternConstructionProps.scala38
-rw-r--r--test/scalacheck/scala/reflect/quasiquotes/PatternDeconstructionProps.scala46
-rw-r--r--test/scalacheck/scala/reflect/quasiquotes/QuasiquoteProperties.scala122
-rw-r--r--test/scalacheck/scala/reflect/quasiquotes/RuntimeErrorProps.scala77
-rw-r--r--test/scalacheck/scala/reflect/quasiquotes/TermConstructionProps.scala327
-rw-r--r--test/scalacheck/scala/reflect/quasiquotes/TermDeconstructionProps.scala258
-rw-r--r--test/scalacheck/scala/reflect/quasiquotes/TypeConstructionProps.scala44
-rw-r--r--test/scalacheck/scala/reflect/quasiquotes/TypeDeconstructionProps.scala80
-rw-r--r--test/scalacheck/scala/reflect/quasiquotes/TypecheckedProps.scala217
-rw-r--r--test/scalacheck/scala/reflect/quasiquotes/UnliftableProps.scala168
-rw-r--r--test/scalacheck/scala/tools/nsc/scaladoc/CommentFactoryTest.scala189
-rw-r--r--test/scalacheck/scala/tools/nsc/scaladoc/HtmlFactoryTest.scala786
-rw-r--r--test/scalacheck/scala/tools/nsc/scaladoc/IndexScriptTest.scala57
-rw-r--r--test/scalacheck/scan.scala16
-rw-r--r--test/scalacheck/substringTests.scala19
-rw-r--r--test/scalacheck/t2460.scala27
-rw-r--r--test/scalacheck/t4147.scala68
-rw-r--r--test/scalacheck/treemap.scala154
-rw-r--r--test/scalacheck/treeset.scala155
62 files changed, 8751 insertions, 0 deletions
diff --git a/test/scalacheck/CheckCollections.scala b/test/scalacheck/CheckCollections.scala
new file mode 100644
index 0000000000..d94b71d150
--- /dev/null
+++ b/test/scalacheck/CheckCollections.scala
@@ -0,0 +1,52 @@
+import org.scalacheck.Properties
+import org.scalacheck.Prop._
+
+import scala.reflect.internal.util.Collections._
+
+object CheckCollectionsTest extends Properties("reflect.internal.util.Collections") {
+ def map2ConserveOld[A <: AnyRef, B](xs: List[A], ys: List[B])(f: (A, B) => A): List[A] =
+ if (xs.isEmpty || ys.isEmpty) xs
+ else {
+ val x1 = f(xs.head, ys.head)
+ val xs1 = map2Conserve(xs.tail, ys.tail)(f)
+ if ((x1 eq xs.head) && (xs1 eq xs.tail)) xs
+ else x1 :: xs1
+ }
+
+ val testfun: (String, Int) => String = { case(x, y) =>
+ x.toLowerCase + y.toString
+ }
+ val testid: (String, Int) => String = { case (x, y) => x }
+
+ val prop1_map2Conserve = forAll { (xs: List[String], ys: List[Int]) =>
+ val res = map2Conserve(xs, ys)(testid)
+ res eq xs
+ }
+
+ val prop2_map2Conserve = forAll { (xs: List[String], ys: List[Int]) =>
+ map2Conserve(xs, ys)(testid) == map2ConserveOld(xs, ys)(testid) &&
+ map2Conserve(xs, ys)(testfun) == map2ConserveOld(xs, ys)(testfun)
+ }
+
+ def checkStackOverflow() {
+ var xs: List[String] = Nil
+ var ys: List[Int] = Nil
+ for (i <- 0 until 250000) {
+ xs = "X" :: xs
+ ys = 1 :: ys
+ }
+ map2Conserve(xs, ys){ case(x, y) => x.toLowerCase + y.toString }
+ }
+
+
+ val tests = List(
+ ("map2Conserve(identity)", prop1_map2Conserve),
+ ("map2Conserve == old impl", prop2_map2Conserve)
+ )
+
+ checkStackOverflow()
+
+ for {
+ (label, prop) <- tests
+ } property(label) = prop
+}
diff --git a/test/scalacheck/CheckEither.scala b/test/scalacheck/CheckEither.scala
new file mode 100644
index 0000000000..48b90c1d9b
--- /dev/null
+++ b/test/scalacheck/CheckEither.scala
@@ -0,0 +1,243 @@
+import org.scalacheck.{ Arbitrary, Prop, Properties }
+import org.scalacheck.Arbitrary.{arbitrary, arbThrowable}
+import org.scalacheck.Gen.oneOf
+import org.scalacheck.Prop._
+import org.scalacheck.Test.check
+import Function.tupled
+
+object CheckEitherTest extends Properties("Either") {
+ implicit def arbitraryEither[X, Y](implicit xa: Arbitrary[X], ya: Arbitrary[Y]): Arbitrary[Either[X, Y]] =
+ Arbitrary[Either[X, Y]](oneOf(arbitrary[X].map(Left(_)), arbitrary[Y].map(Right(_))))
+
+ val prop_either1 = forAll((n: Int) => Left(n).fold(x => x, b => sys.error("fail")) == n)
+
+ val prop_either2 = forAll((n: Int) => Right(n).fold(a => sys.error("fail"), x => x) == n)
+
+ val prop_swap = forAll((e: Either[Int, Int]) => e match {
+ case Left(a) => e.swap.right.get == a
+ case Right(b) => e.swap.left.get == b
+ })
+
+ val prop_isLeftRight = forAll((e: Either[Int, Int]) => e.isLeft != e.isRight)
+
+ object CheckLeftProjection {
+ val prop_value = forAll((n: Int) => Left(n).left.get == n)
+
+ val prop_getOrElse = forAll((e: Either[Int, Int], or: Int) => e.left.getOrElse(or) == (e match {
+ case Left(a) => a
+ case Right(_) => or
+ }))
+
+ val prop_forall = forAll((e: Either[Int, Int]) =>
+ e.left.forall(_ % 2 == 0) == (e.isRight || e.left.get % 2 == 0))
+
+ val prop_exists = forAll((e: Either[Int, Int]) =>
+ e.left.exists(_ % 2 == 0) == (e.isLeft && e.left.get % 2 == 0))
+
+ val prop_flatMapLeftIdentity = forAll((e: Either[Int, Int], n: Int, s: String) => {
+ def f(x: Int) = if(x % 2 == 0) Left(s) else Right(s)
+ Left(n).left.flatMap(f(_)) == f(n)})
+
+ val prop_flatMapRightIdentity = forAll((e: Either[Int, Int]) => e.left.flatMap(Left(_)) == e)
+
+ val prop_flatMapComposition = forAll((e: Either[Int, Int]) => {
+ def f(x: Int) = if(x % 2 == 0) Left(x) else Right(x)
+ def g(x: Int) = if(x % 7 == 0) Right(x) else Left(x)
+ e.left.flatMap(f(_)).left.flatMap(g(_)) == e.left.flatMap(f(_).left.flatMap(g(_)))})
+
+ val prop_mapIdentity = forAll((e: Either[Int, Int]) => e.left.map(x => x) == e)
+
+ val prop_mapComposition = forAll((e: Either[String, Int]) => {
+ def f(s: String) = s.toLowerCase
+ def g(s: String) = s.reverse
+ e.left.map(x => f(g(x))) == e.left.map(x => g(x)).left.map(f(_))})
+
+ val prop_filter = forAll((e: Either[Int, Int], x: Int) => e.left.filter(_ % 2 == 0) ==
+ (if(e.isRight || e.left.get % 2 != 0) None else Some(e)))
+
+ val prop_seq = forAll((e: Either[Int, Int]) => e.left.toSeq == (e match {
+ case Left(a) => Seq(a)
+ case Right(_) => Seq.empty
+ }))
+
+ val prop_option = forAll((e: Either[Int, Int]) => e.left.toOption == (e match {
+ case Left(a) => Some(a)
+ case Right(_) => None
+ }))
+ }
+
+ object CheckRightProjection {
+ val prop_value = forAll((n: Int) => Right(n).right.get == n)
+
+ val prop_getOrElse = forAll((e: Either[Int, Int], or: Int) => e.right.getOrElse(or) == (e match {
+ case Left(_) => or
+ case Right(b) => b
+ }))
+
+ val prop_forall = forAll((e: Either[Int, Int]) =>
+ e.right.forall(_ % 2 == 0) == (e.isLeft || e.right.get % 2 == 0))
+
+ val prop_exists = forAll((e: Either[Int, Int]) =>
+ e.right.exists(_ % 2 == 0) == (e.isRight && e.right.get % 2 == 0))
+
+ val prop_flatMapLeftIdentity = forAll((e: Either[Int, Int], n: Int, s: String) => {
+ def f(x: Int) = if(x % 2 == 0) Left(s) else Right(s)
+ Right(n).right.flatMap(f(_)) == f(n)})
+
+ val prop_flatMapRightIdentity = forAll((e: Either[Int, Int]) => e.right.flatMap(Right(_)) == e)
+
+ val prop_flatMapComposition = forAll((e: Either[Int, Int]) => {
+ def f(x: Int) = if(x % 2 == 0) Left(x) else Right(x)
+ def g(x: Int) = if(x % 7 == 0) Right(x) else Left(x)
+ e.right.flatMap(f(_)).right.flatMap(g(_)) == e.right.flatMap(f(_).right.flatMap(g(_)))})
+
+ val prop_mapIdentity = forAll((e: Either[Int, Int]) => e.right.map(x => x) == e)
+
+ val prop_mapComposition = forAll((e: Either[Int, String]) => {
+ def f(s: String) = s.toLowerCase
+ def g(s: String) = s.reverse
+ e.right.map(x => f(g(x))) == e.right.map(x => g(x)).right.map(f(_))})
+
+ val prop_filter = forAll((e: Either[Int, Int], x: Int) => e.right.filter(_ % 2 == 0) ==
+ (if(e.isLeft || e.right.get % 2 != 0) None else Some(e)))
+
+ val prop_seq = forAll((e: Either[Int, Int]) => e.right.toSeq == (e match {
+ case Left(_) => Seq.empty
+ case Right(b) => Seq(b)
+ }))
+
+ val prop_option = forAll((e: Either[Int, Int]) => e.right.toOption == (e match {
+ case Left(_) => None
+ case Right(b) => Some(b)
+ }))
+ }
+
+ val prop_Either_left = forAll((n: Int) => Left(n).left.get == n)
+
+ val prop_Either_right = forAll((n: Int) => Right(n).right.get == n)
+
+ val prop_Either_joinLeft = forAll((e: Either[Either[Int, Int], Int]) => e match {
+ case Left(ee) => e.joinLeft == ee
+ case Right(n) => e.joinLeft == Right(n)
+ })
+
+ val prop_Either_joinRight = forAll((e: Either[Int, Either[Int, Int]]) => e match {
+ case Left(n) => e.joinRight == Left(n)
+ case Right(ee) => e.joinRight == ee
+ })
+
+ val prop_Either_reduce = forAll((e: Either[Int, Int]) =>
+ e.merge == (e match {
+ case Left(a) => a
+ case Right(a) => a
+ }))
+
+ val prop_getOrElse = forAll((e: Either[Int, Int], or: Int) => e.getOrElse(or) == (e match {
+ case Left(_) => or
+ case Right(b) => b
+ }))
+
+ val prop_contains = forAll((e: Either[Int, Int], n: Int) =>
+ e.contains(n) == (e.isRight && e.right.get == n))
+
+ val prop_forall = forAll((e: Either[Int, Int]) =>
+ e.forall(_ % 2 == 0) == (e.isLeft || e.right.get % 2 == 0))
+
+ val prop_exists = forAll((e: Either[Int, Int]) =>
+ e.exists(_ % 2 == 0) == (e.isRight && e.right.get % 2 == 0))
+
+ val prop_flatMapLeftIdentity = forAll((e: Either[Int, Int], n: Int, s: String) => {
+ def f(x: Int) = if(x % 2 == 0) Left(s) else Right(s)
+ Right(n).flatMap(f(_)) == f(n)})
+
+ val prop_flatMapRightIdentity = forAll((e: Either[Int, Int]) => e.flatMap(Right(_)) == e)
+
+ val prop_flatMapComposition = forAll((e: Either[Int, Int]) => {
+ def f(x: Int) = if(x % 2 == 0) Left(x) else Right(x)
+ def g(x: Int) = if(x % 7 == 0) Right(x) else Left(x)
+ e.flatMap(f(_)).flatMap(g(_)) == e.flatMap(f(_).flatMap(g(_)))})
+
+ val prop_mapIdentity = forAll((e: Either[Int, Int]) => e.map(x => x) == e)
+
+ val prop_mapComposition = forAll((e: Either[Int, String]) => {
+ def f(s: String) = s.toLowerCase
+ def g(s: String) = s.reverse
+ e.map(x => f(g(x))) == e.map(x => g(x)).map(f(_))})
+
+ val prop_filterOrElse = forAll((e: Either[Int, Int], x: Int) => e.filterOrElse(_ % 2 == 0, -x) ==
+ (if(e.isLeft) e
+ else if(e.right.get % 2 == 0) e
+ else Left(-x)))
+
+ val prop_seq = forAll((e: Either[Int, Int]) => e.toSeq == (e match {
+ case Left(_) => collection.immutable.Seq.empty
+ case Right(b) => collection.immutable.Seq(b)
+ }))
+
+ val prop_option = forAll((e: Either[Int, Int]) => e.toOption == (e match {
+ case Left(_) => None
+ case Right(b) => Some(b)
+ }))
+
+ val prop_try = forAll((e: Either[Throwable, Int]) => e.toTry == (e match {
+ case Left(a) => util.Failure(a)
+ case Right(b) => util.Success(b)
+ }))
+
+ /** Hard to believe I'm "fixing" a test to reflect B before A ... */
+ val prop_Either_cond = forAll((c: Boolean, a: Int, b: Int) =>
+ Either.cond(c, a, b) == (if(c) Right(a) else Left(b)))
+
+ val tests = List(
+ ("prop_either1", prop_either1),
+ ("prop_either2", prop_either2),
+ ("prop_swap", prop_swap),
+ ("prop_isLeftRight", prop_isLeftRight),
+ ("Left.prop_value", CheckLeftProjection.prop_value),
+ ("Left.prop_getOrElse", CheckLeftProjection.prop_getOrElse),
+ ("Left.prop_forall", CheckLeftProjection.prop_forall),
+ ("Left.prop_exists", CheckLeftProjection.prop_exists),
+ ("Left.prop_flatMapLeftIdentity", CheckLeftProjection.prop_flatMapLeftIdentity),
+ ("Left.prop_flatMapRightIdentity", CheckLeftProjection.prop_flatMapRightIdentity),
+ ("Left.prop_flatMapComposition", CheckLeftProjection.prop_flatMapComposition),
+ ("Left.prop_mapIdentity", CheckLeftProjection.prop_mapIdentity),
+ ("Left.prop_mapComposition", CheckLeftProjection.prop_mapComposition),
+ ("Left.prop_filter", CheckLeftProjection.prop_filter),
+ ("Left.prop_seq", CheckLeftProjection.prop_seq),
+ ("Left.prop_option", CheckLeftProjection.prop_option),
+ ("Right.prop_value", CheckRightProjection.prop_value),
+ ("Right.prop_getOrElse", CheckRightProjection.prop_getOrElse),
+ ("Right.prop_forall", CheckRightProjection.prop_forall),
+ ("Right.prop_exists", CheckRightProjection.prop_exists),
+ ("Right.prop_flatMapLeftIdentity", CheckRightProjection.prop_flatMapLeftIdentity),
+ ("Right.prop_flatMapRightIdentity", CheckRightProjection.prop_flatMapRightIdentity),
+ ("Right.prop_flatMapComposition", CheckRightProjection.prop_flatMapComposition),
+ ("Right.prop_mapIdentity", CheckRightProjection.prop_mapIdentity),
+ ("Right.prop_mapComposition", CheckRightProjection.prop_mapComposition),
+ ("Right.prop_filter", CheckRightProjection.prop_filter),
+ ("Right.prop_seq", CheckRightProjection.prop_seq),
+ ("Right.prop_option", CheckRightProjection.prop_option),
+ ("prop_Either_left", prop_Either_left),
+ ("prop_Either_right", prop_Either_right),
+ ("prop_Either_joinLeft", prop_Either_joinLeft),
+ ("prop_Either_joinRight", prop_Either_joinRight),
+ ("prop_Either_reduce", prop_Either_reduce),
+ ("prop_getOrElse", prop_getOrElse),
+ ("prop_contains", prop_contains),
+ ("prop_forall", prop_forall),
+ ("prop_exists", prop_exists),
+ ("prop_flatMapLeftIdentity", prop_flatMapLeftIdentity),
+ ("prop_flatMapRightIdentity", prop_flatMapRightIdentity),
+ ("prop_flatMapComposition", prop_flatMapComposition),
+ ("prop_mapIdentity", prop_mapIdentity),
+ ("prop_mapComposition", prop_mapComposition),
+ ("prop_filterOrElse", prop_filterOrElse),
+ ("prop_seq", prop_seq),
+ ("prop_option", prop_option),
+ ("prop_try", prop_try),
+ ("prop_Either_cond", prop_Either_cond))
+
+ for ((label, prop) <- tests) {
+ property(label) = prop
+ }
+}
diff --git a/test/scalacheck/Ctrie.scala b/test/scalacheck/Ctrie.scala
new file mode 100644
index 0000000000..8a24079ad3
--- /dev/null
+++ b/test/scalacheck/Ctrie.scala
@@ -0,0 +1,215 @@
+import org.scalacheck._
+import Prop._
+import org.scalacheck.Gen._
+import collection._
+import collection.concurrent.TrieMap
+
+
+
+case class Wrap(i: Int) {
+ override def hashCode = i // * 0x9e3775cd
+}
+
+
+/** A check mainly oriented towards checking snapshot correctness.
+ */
+object CtrieTest extends Properties("concurrent.TrieMap") {
+
+ /* generators */
+
+ val sizes = choose(0, 200000)
+
+ val threadCounts = choose(2, 16)
+
+ val threadCountsAndSizes = for {
+ p <- threadCounts
+ sz <- sizes
+ } yield (p, sz);
+
+
+ /* helpers */
+
+ def inParallel[T](totalThreads: Int)(body: Int => T): Seq[T] = {
+ val threads = for (idx <- 0 until totalThreads) yield new Thread {
+ setName("ParThread-" + idx)
+ private var res: T = _
+ override def run() {
+ res = body(idx)
+ }
+ def result = {
+ this.join()
+ res
+ }
+ }
+
+ threads foreach (_.start())
+ threads map (_.result)
+ }
+
+ def spawn[T](body: =>T): { def get: T } = {
+ val t = new Thread {
+ setName("SpawnThread")
+ private var res: T = _
+ override def run() {
+ res = body
+ }
+ def result = res
+ }
+ t.start()
+ new {
+ def get: T = {
+ t.join()
+ t.result
+ }
+ }
+ }
+
+ def elementRange(threadIdx: Int, totalThreads: Int, totalElems: Int): Range = {
+ val sz = totalElems
+ val idx = threadIdx
+ val p = totalThreads
+ val start = (sz / p) * idx + math.min(idx, sz % p)
+ val elems = (sz / p) + (if (idx < sz % p) 1 else 0)
+ val end = start + elems
+ (start until end)
+ }
+
+ def hasGrown[K, V](last: Map[K, V], current: Map[K, V]) = {
+ (last.size <= current.size) && {
+ last forall {
+ case (k, v) => current.get(k) == Some(v)
+ }
+ }
+ }
+
+ object err {
+ var buffer = new StringBuilder
+ def println(a: AnyRef) = buffer.append(a.toString).append("\n")
+ def clear() = buffer.clear()
+ def flush() = {
+ Console.out.println(buffer)
+ clear()
+ }
+ }
+
+
+ /* properties */
+
+ property("concurrent growing snapshots") = forAll(threadCounts, sizes) {
+ (numThreads, numElems) =>
+ val p = 3 //numThreads
+ val sz = 102 //numElems
+ val ct = new TrieMap[Wrap, Int]
+
+ // checker
+ val checker = spawn {
+ def check(last: Map[Wrap, Int], iterationsLeft: Int): Boolean = {
+ val current = ct.readOnlySnapshot()
+ if (!hasGrown(last, current)) false
+ else if (current.size >= sz) true
+ else if (iterationsLeft < 0) false
+ else check(current, iterationsLeft - 1)
+ }
+ check(ct.readOnlySnapshot(), 500)
+ }
+
+ // fillers
+ inParallel(p) {
+ idx =>
+ elementRange(idx, p, sz) foreach (i => ct.update(Wrap(i), i))
+ }
+
+ // wait for checker to finish
+ val growing = true//checker.get
+
+ val ok = growing && ((0 until sz) forall {
+ case i => ct.get(Wrap(i)) == Some(i)
+ })
+
+ ok
+ }
+
+ property("update") = forAll(sizes) {
+ (n: Int) =>
+ val ct = new TrieMap[Int, Int]
+ for (i <- 0 until n) ct(i) = i
+ (0 until n) forall {
+ case i => ct(i) == i
+ }
+ }
+
+ property("concurrent update") = forAll(threadCountsAndSizes) {
+ case (p, sz) =>
+ val ct = new TrieMap[Wrap, Int]
+
+ inParallel(p) {
+ idx =>
+ for (i <- elementRange(idx, p, sz)) ct(Wrap(i)) = i
+ }
+
+ (0 until sz) forall {
+ case i => ct(Wrap(i)) == i
+ }
+ }
+
+
+ property("concurrent remove") = forAll(threadCounts, sizes) {
+ (p, sz) =>
+ val ct = new TrieMap[Wrap, Int]
+ for (i <- 0 until sz) ct(Wrap(i)) = i
+
+ inParallel(p) {
+ idx =>
+ for (i <- elementRange(idx, p, sz)) ct.remove(Wrap(i))
+ }
+
+ (0 until sz) forall {
+ case i => ct.get(Wrap(i)) == None
+ }
+ }
+
+
+ property("concurrent putIfAbsent") = forAll(threadCounts, sizes) {
+ (p, sz) =>
+ val ct = new TrieMap[Wrap, Int]
+
+ val results = inParallel(p) {
+ idx =>
+ elementRange(idx, p, sz) find (i => ct.putIfAbsent(Wrap(i), i) != None)
+ }
+
+ (results forall (_ == None)) && ((0 until sz) forall {
+ case i => ct.get(Wrap(i)) == Some(i)
+ })
+ }
+
+ property("concurrent getOrElseUpdate") = forAll(threadCounts, sizes) {
+ (p, sz) =>
+ val totalInserts = new java.util.concurrent.atomic.AtomicInteger
+ val ct = new TrieMap[Wrap, String]
+
+ val results = inParallel(p) {
+ idx =>
+ (0 until sz) foreach {
+ i =>
+ val v = ct.getOrElseUpdate(Wrap(i), idx + ":" + i)
+ if (v == idx + ":" + i) totalInserts.incrementAndGet()
+ }
+ }
+
+ (totalInserts.get == sz) && ((0 until sz) forall {
+ case i => ct(Wrap(i)).split(":")(1).toInt == i
+ })
+ }
+
+}
+
+
+
+
+
+
+
+
+
+
diff --git a/test/scalacheck/MutablePriorityQueue.scala b/test/scalacheck/MutablePriorityQueue.scala
new file mode 100644
index 0000000000..1df432d811
--- /dev/null
+++ b/test/scalacheck/MutablePriorityQueue.scala
@@ -0,0 +1,102 @@
+import scala.collection.mutable.PriorityQueue
+import org.scalacheck._
+import Prop._
+import Arbitrary._
+
+object MutablePriorityQueueTest extends Properties("PriorityQueue") {
+ type E = Int // the element type used for most/all of the tests
+
+ def checkInvariant[A](pq: PriorityQueue[A])(implicit ord: Ordering[A]): Boolean = {
+ // The ordering invariant in the heap is that parent >= child.
+ // A child at index i has a parent at index i/2 in the priority
+ // queue's internal array. However, that array is padded with
+ // an extra slot in front so that the first real element is at
+ // index 1. The vector below is not padded, so subtract 1 from
+ // every index.
+ import ord._
+ val vec = pq.toVector // elements in same order as pq's internal array
+ 2 until pq.size forall { i => vec(i/2-1) >= vec(i-1) }
+ }
+
+ property("newBuilder (in companion)") = forAll { list: List[E] =>
+ val builder = PriorityQueue.newBuilder[E]
+ for (x <- list) builder += x
+ val pq = builder.result()
+ checkInvariant(pq) &&
+ pq.dequeueAll == list.sorted.reverse
+ }
+
+ property("to[PriorityQueue]") = forAll { list: List[E] =>
+ val pq = list.to[PriorityQueue]
+ checkInvariant(pq) &&
+ pq.dequeueAll == list.sorted.reverse
+ }
+
+ property("apply (in companion)") = forAll { list: List[E] =>
+ val pq = PriorityQueue.apply(list : _*)
+ checkInvariant(pq) &&
+ pq.dequeueAll == list.sorted.reverse
+ }
+
+ property("size, isEmpty") = forAll { list: List[E] =>
+ val pq = PriorityQueue(list : _*)
+ pq.size == list.size && pq.isEmpty == list.isEmpty
+ }
+
+ property("+=") = forAll { (x: E, list: List[E]) =>
+ val pq = PriorityQueue(list : _*)
+ pq += x
+ checkInvariant(pq) &&
+ pq.dequeueAll == (x :: list).sorted.reverse
+ }
+
+ property("++= on empty") = forAll { list: List[E] =>
+ val pq = PriorityQueue.empty[E]
+ pq ++= list
+ checkInvariant(pq) &&
+ pq.dequeueAll == list.sorted.reverse
+ }
+
+ property("++=") = forAll { (list1: List[E], list2: List[E]) =>
+ val pq = PriorityQueue(list1 : _*)
+ pq ++= list2
+ checkInvariant(pq) &&
+ pq.dequeueAll == (list1 ++ list2).sorted.reverse
+ }
+
+ property("reverse") = forAll { list: List[E] =>
+ val pq = PriorityQueue(list : _*).reverse
+ checkInvariant(pq)(implicitly[Ordering[E]].reverse) &&
+ pq.dequeueAll == list.sorted
+ }
+
+ property("reverse then ++=") = forAll { list: List[E] =>
+ val pq = PriorityQueue.empty[E].reverse ++= list
+ checkInvariant(pq)(implicitly[Ordering[E]].reverse) &&
+ pq.dequeueAll == list.sorted
+ }
+
+ property("reverse then +=") = forAll { (x: E, list: List[E]) =>
+ val pq = PriorityQueue(list : _*).reverse += x
+ checkInvariant(pq)(implicitly[Ordering[E]].reverse) &&
+ pq.dequeueAll == (x +: list).sorted
+ }
+
+ property("clone") = forAll { list: List[E] =>
+ val pq = PriorityQueue(list : _*)
+ val c = pq.clone()
+ (pq ne c) &&
+ checkInvariant(c) &&
+ c.dequeueAll == pq.dequeueAll
+ }
+
+ property("dequeue") = forAll { list: List[E] =>
+ list.nonEmpty ==> {
+ val pq = PriorityQueue(list : _*)
+ val x = pq.dequeue()
+ checkInvariant(pq) &&
+ x == list.max && pq.dequeueAll == list.sorted.reverse.tail
+ }
+ }
+
+}
diff --git a/test/scalacheck/ReflectionExtractors.scala b/test/scalacheck/ReflectionExtractors.scala
new file mode 100644
index 0000000000..23076662fd
--- /dev/null
+++ b/test/scalacheck/ReflectionExtractors.scala
@@ -0,0 +1,52 @@
+import org.scalacheck._
+import Prop._
+import Gen._
+import Arbitrary._
+
+import scala.reflect.runtime.universe._
+import Flag._
+
+object ReflectionExtractorsTest extends Properties("reflection extractors") {
+
+ val genFlag = oneOf(
+ TRAIT, INTERFACE, MUTABLE, MACRO, DEFERRED, ABSTRACT, FINAL, SEALED,
+ IMPLICIT, LAZY, OVERRIDE, PRIVATE, PROTECTED, LOCAL, CASE, ABSOVERRIDE,
+ BYNAMEPARAM, PARAM, COVARIANT, CONTRAVARIANT, DEFAULTPARAM, PRESUPER,
+ DEFAULTINIT
+ )
+ val genModifiers =
+ for(flag <- genFlag; privateWithin <- genName)
+ yield Modifiers(flag, privateWithin, Nil)
+ val genTermName = for(name <- arbitrary[String]) yield TermName(name)
+ val genTypeName = for(name <- arbitrary[String]) yield TypeName(name)
+ val genName = oneOf(genTermName, genTypeName)
+
+ implicit val arbTermName: Arbitrary[TermName] = Arbitrary(genTermName)
+ implicit val arbTypeName: Arbitrary[TypeName] = Arbitrary(genTypeName)
+ implicit val arbName: Arbitrary[Name] = Arbitrary(genName)
+ implicit val arbMods: Arbitrary[Modifiers] = Arbitrary(genModifiers)
+
+ property("extract term name") = forAll { (name: TermName) =>
+ val TermName(s) = name
+ s == name.toString
+ }
+
+ property("extract type name") = forAll { (name: TypeName) =>
+ val TypeName(s) = name
+ s == name.toString
+ }
+
+ property("extract term or type name") = forAll { (name: Name) =>
+ name match {
+ case TermName(s) => s == name.toString
+ case TypeName(s) => s == name.toString
+ }
+ }
+
+ property("extract modifiers") = forAll { (mods: Modifiers) =>
+ val Modifiers(flags, priv, annots) = mods
+ flags == mods.flags &&
+ priv == mods.privateWithin &&
+ annots == mods.annotations
+ }
+}
diff --git a/test/scalacheck/Unrolled.scala b/test/scalacheck/Unrolled.scala
new file mode 100644
index 0000000000..ad6e9d3cc8
--- /dev/null
+++ b/test/scalacheck/Unrolled.scala
@@ -0,0 +1,26 @@
+import org.scalacheck._
+import Prop._
+import Gen._
+
+import collection.mutable.UnrolledBuffer
+
+object UnrolledTest extends Properties("UnrolledBuffer") {
+
+ property("concat size") = forAll { (l1: List[Int], l2: List[Int]) =>
+ val u1 = new UnrolledBuffer[Int]
+ u1 ++= l1
+ val u2 = new UnrolledBuffer[Int]
+ u2 ++= l2
+ val totalsz = u1.size + u2.size
+ u1 concat u2
+ totalsz == u1.size
+ }
+
+ property("adding") = forAll { (l: List[Int]) =>
+ val u = new UnrolledBuffer[Int]
+ u ++= l
+ u == l
+ }
+
+}
+
diff --git a/test/scalacheck/array-new.scala b/test/scalacheck/array-new.scala
new file mode 100644
index 0000000000..de2df68b3a
--- /dev/null
+++ b/test/scalacheck/array-new.scala
@@ -0,0 +1,37 @@
+import scala.reflect.ClassTag // new style: use ClassTag
+import org.scalacheck._
+import Prop._
+import Gen._
+import Arbitrary._
+import util._
+import Buildable._
+import scala.collection.mutable.ArraySeq
+
+object ArrayNewTest extends Properties("Array") {
+ /** At this moment the authentic scalacheck Array Builder/Arb bits are commented out.
+ */
+ implicit def arbArray[T](implicit a: Arbitrary[T], m: ClassTag[T]): Arbitrary[Array[T]] =
+ Arbitrary(containerOf[List,T](arbitrary[T]) map (_.toArray))
+
+ val arrGen: Gen[Array[_]] = oneOf(
+ arbitrary[Array[Int]],
+ arbitrary[Array[Array[Int]]],
+ arbitrary[Array[List[String]]],
+ arbitrary[Array[String]],
+ arbitrary[Array[Boolean]],
+ arbitrary[Array[AnyVal]]
+ )
+
+ // inspired by #1857 and #2352
+ property("eq/ne") = forAll(arrGen, arrGen) { (c1, c2) =>
+ (c1 eq c2) || (c1 ne c2)
+ }
+
+ // inspired by #2299
+ def smallInt = choose(1, 10)
+ property("ofDim") = forAll(smallInt, smallInt, smallInt) { (i1, i2, i3) =>
+ val arr = Array.ofDim[String](i1, i2, i3)
+ val flattened = arr flatMap (x => x) flatMap (x => x)
+ flattened.length == i1 * i2 * i3
+ }
+}
diff --git a/test/scalacheck/array-old.scala b/test/scalacheck/array-old.scala
new file mode 100644
index 0000000000..9532636660
--- /dev/null
+++ b/test/scalacheck/array-old.scala
@@ -0,0 +1,37 @@
+import org.scalacheck._
+import Prop._
+import Gen._
+import Arbitrary._
+import util._
+import Buildable._
+import scala.collection.mutable.ArraySeq
+
+object ArrayOldTest extends Properties("Array") {
+ /** At this moment the authentic scalacheck Array Builder/Arb bits are commented out.
+ */
+ implicit def arbArray[T](implicit a: Arbitrary[T], m: Manifest[T]): Arbitrary[Array[T]] =
+ Arbitrary(containerOf[List,T](arbitrary[T]) map (_.toArray))
+
+ val arrGen: Gen[Array[_]] = oneOf(
+ arbitrary[Array[Int]],
+ arbitrary[Array[Array[Int]]],
+ arbitrary[Array[List[String]]],
+ arbitrary[Array[String]],
+ arbitrary[Array[Boolean]],
+ arbitrary[Array[AnyVal]]
+ )
+
+ // inspired by #1857 and #2352
+ property("eq/ne") = forAll(arrGen, arrGen) { (c1, c2) =>
+ (c1 eq c2) || (c1 ne c2)
+ }
+
+ // inspired by #2299
+ def smallInt = choose(1, 10)
+ property("ofDim") = forAll(smallInt, smallInt, smallInt) { (i1, i2, i3) =>
+ val arr = Array.ofDim[String](i1, i2, i3)
+ val flattened = arr flatMap (x => x) flatMap (x => x)
+ flattened.length == i1 * i2 * i3
+ }
+}
+
diff --git a/test/scalacheck/concurrent-map.scala b/test/scalacheck/concurrent-map.scala
new file mode 100644
index 0000000000..0dae7a98bd
--- /dev/null
+++ b/test/scalacheck/concurrent-map.scala
@@ -0,0 +1,70 @@
+import java.util.concurrent._
+import scala.collection._
+import scala.collection.JavaConverters._
+import org.scalacheck._
+import org.scalacheck.Prop._
+import org.scalacheck.Gen._
+
+object ConcurrentMapTest extends Properties("concurrent.TrieMap") {
+
+ case class Wrap(i: Int) {
+ override def hashCode = i * 0x9e3775cd
+ }
+
+ /* generators */
+
+ val sizes = choose(0, 20000)
+
+ val threadCounts = choose(2, 16)
+
+ val threadCountsAndSizes = for {
+ p <- threadCounts
+ sz <- sizes
+ } yield (p, sz);
+
+
+ /* helpers */
+
+ def inParallel[T](totalThreads: Int)(body: Int => T): Seq[T] = {
+ val threads = for (idx <- 0 until totalThreads) yield new Thread {
+ setName("ParThread-" + idx)
+ private var res: T = _
+ override def run() {
+ res = body(idx)
+ }
+ def result = {
+ this.join()
+ res
+ }
+ }
+
+ threads foreach (_.start())
+ threads map (_.result)
+ }
+
+ property("concurrent getOrElseUpdate insertions") = forAll(threadCounts, sizes) {
+ (p, sz) =>
+ val chm = new ConcurrentHashMap[Wrap, Int]().asScala
+
+ val results = inParallel(p) {
+ idx =>
+ for (i <- 0 until sz) yield chm.getOrElseUpdate(new Wrap(i), idx)
+ }
+
+ val resultSets = for (i <- 0 until sz) yield results.map(_(i)).toSet
+ val largerThanOne = resultSets.zipWithIndex.find(_._1.size != 1)
+ val allThreadsAgreeOnWhoInserted = {
+ largerThanOne == None
+ } :| s"$p threads agree on who inserted [disagreement (differentResults, position) = $largerThanOne]"
+
+ allThreadsAgreeOnWhoInserted
+ }
+
+
+}
+
+
+
+
+
+
diff --git a/test/scalacheck/duration.scala b/test/scalacheck/duration.scala
new file mode 100644
index 0000000000..fc861b886a
--- /dev/null
+++ b/test/scalacheck/duration.scala
@@ -0,0 +1,72 @@
+import org.scalacheck._
+import Prop._
+import Gen._
+import Arbitrary._
+import math._
+import concurrent.duration.Duration.fromNanos
+
+object DurationTest extends Properties("Division of Duration by Long") {
+
+ val weightedLong =
+ frequency(
+ 1 -> choose(-128L, 127L),
+ 1 -> (arbitrary[Byte] map (_.toLong << 8)),
+ 1 -> (arbitrary[Byte] map (_.toLong << 16)),
+ 1 -> (arbitrary[Byte] map (_.toLong << 24)),
+ 1 -> (arbitrary[Byte] map (_.toLong << 32)),
+ 1 -> (arbitrary[Byte] map (_.toLong << 40)),
+ 1 -> (arbitrary[Byte] map (_.toLong << 48)),
+ 1 -> (choose(-127L, 127L) map (_ << 56))
+ )
+
+ val genTwoSmall = for {
+ a <- weightedLong
+ b <- choose(-(Long.MaxValue / max(1, abs(a))), Long.MaxValue / max(1, abs(a)))
+ } yield (a, b)
+
+ val genTwoLarge = for {
+ a <- weightedLong
+ b <- arbitrary[Long] suchThat (b => (abs(b) > Long.MaxValue / max(1, abs(a))))
+ } yield (a, b)
+
+ val genClose = for {
+ a <- weightedLong
+ if a != 0
+ val center = Long.MaxValue / a
+ b <-
+ if (center - 10 < center + 10) choose(center - 10, center + 10)
+ else choose(center + 10, center - 10) // deal with overflow if abs(a) == 1
+ } yield (a, b)
+
+ val genBorderline =
+ frequency(
+ 1 -> (Long.MinValue, 0L),
+ 1 -> (Long.MinValue, 1L),
+ 1 -> (Long.MinValue, -1L),
+ 1 -> (0L, Long.MinValue),
+ 1 -> (1L, Long.MinValue),
+ 1 -> (-1L, Long.MinValue),
+ 90 -> genClose
+ )
+
+ def mul(a: Long, b: Long): Long = {
+ (fromNanos(a) * b).toNanos
+ }
+
+ property("without overflow") = forAll(genTwoSmall) { case (a, b) =>
+ a * b == mul(a, b)
+ }
+
+ property("with overflow") = forAll(genTwoLarge) { case (a, b) =>
+ try { mul(a, b); false } catch { case _: IllegalArgumentException => true }
+ }
+
+ property("on overflow edge cases") = forAll(genBorderline) { case (a, b) =>
+ val shouldFit =
+ a != Long.MinValue && // must fail due to illegal duration length
+ (b != Long.MinValue || a == 0) && // Long factor may only be MinValue if the duration is zero, otherwise the result will be illegal
+ (abs(b) <= Long.MaxValue / max(1, abs(a))) // check the rest against the “safe” division method
+ try { mul(a, b); shouldFit }
+ catch { case _: IllegalArgumentException => !shouldFit }
+ }
+}
diff --git a/test/scalacheck/list.scala b/test/scalacheck/list.scala
new file mode 100644
index 0000000000..3531f620f9
--- /dev/null
+++ b/test/scalacheck/list.scala
@@ -0,0 +1,21 @@
+import org.scalacheck._
+import Prop._
+import Gen._
+
+object ListTest extends Properties("List") {
+ def sorted(xs: List[Int]) = xs sortWith (_ < _)
+
+ property("concat size") = forAll { (l1: List[Int], l2: List[Int]) => (l1.size + l2.size) == (l1 ::: l2).size }
+ property("reverse") = forAll { (l1: List[Int]) => l1.reverse.reverse == l1 }
+ property("toSet") = forAll { (l1: List[Int]) => sorted(l1.toSet.toList) sameElements sorted(l1).distinct }
+ // property("flatten") = forAll { (xxs: List[List[Int]]) => xxs.flatten.length == (xxs map (_.length) sum) }
+ property("startsWith/take") = forAll { (xs: List[Int], count: Int) => xs startsWith (xs take count) }
+ property("endsWith/takeRight") = forAll { (xs: List[Int], count: Int) => xs endsWith (xs takeRight count) }
+ property("fill") = forAll(choose(1, 100)) { count =>
+ forAll { (x: Int) =>
+ val xs = List.fill(count)(x)
+ (xs.length == count) && (xs.distinct == List(x))
+ }
+ }
+}
+
diff --git a/test/scalacheck/nan-ordering.scala b/test/scalacheck/nan-ordering.scala
new file mode 100644
index 0000000000..be57b27178
--- /dev/null
+++ b/test/scalacheck/nan-ordering.scala
@@ -0,0 +1,130 @@
+import org.scalacheck._
+import Gen._
+import Prop._
+
+object NanOrderingTest extends Properties("NaN-Ordering") {
+
+ val specFloats: Gen[Float] = oneOf(
+ Float.MaxValue,
+ Float.MinPositiveValue,
+ Float.MinValue,
+ Float.NaN,
+ Float.NegativeInfinity,
+ Float.PositiveInfinity,
+ -0.0f,
+ +0.0f
+ )
+
+ property("Float min") = forAll(specFloats, specFloats) { (d1, d2) => {
+ val mathmin = math.min(d1, d2)
+ val numericmin = d1 min d2
+ mathmin == numericmin || mathmin.isNaN && numericmin.isNaN
+ }
+ }
+
+ property("Float max") = forAll(specFloats, specFloats) { (d1, d2) => {
+ val mathmax = math.max(d1, d2)
+ val numericmax = d1 max d2
+ mathmax == numericmax || mathmax.isNaN && numericmax.isNaN
+ }
+ }
+
+ val numFloat = implicitly[Numeric[Float]]
+
+ property("Float lt") = forAll(specFloats, specFloats) { (d1, d2) => numFloat.lt(d1, d2) == d1 < d2 }
+
+ property("Float lteq") = forAll(specFloats, specFloats) { (d1, d2) => numFloat.lteq(d1, d2) == d1 <= d2 }
+
+ property("Float gt") = forAll(specFloats, specFloats) { (d1, d2) => numFloat.gt(d1, d2) == d1 > d2 }
+
+ property("Float gteq") = forAll(specFloats, specFloats) { (d1, d2) => numFloat.gteq(d1, d2) == d1 >= d2 }
+
+ property("Float equiv") = forAll(specFloats, specFloats) { (d1, d2) => numFloat.equiv(d1, d2) == (d1 == d2) }
+
+ property("Float reverse.min") = forAll(specFloats, specFloats) { (d1, d2) => {
+ val mathmax = math.max(d1, d2)
+ val numericmin = numFloat.reverse.min(d1, d2)
+ mathmax == numericmin || mathmax.isNaN && numericmin.isNaN
+ }
+ }
+
+ property("Float reverse.max") = forAll(specFloats, specFloats) { (d1, d2) => {
+ val mathmin = math.min(d1, d2)
+ val numericmax = numFloat.reverse.max(d1, d2)
+ mathmin == numericmax || mathmin.isNaN && numericmax.isNaN
+ }
+ }
+
+ property("Float reverse.lt") = forAll(specFloats, specFloats) { (d1, d2) => numFloat.reverse.lt(d1, d2) == d2 < d1 }
+
+ property("Float reverse.lteq") = forAll(specFloats, specFloats) { (d1, d2) => numFloat.reverse.lteq(d1, d2) == d2 <= d1 }
+
+ property("Float reverse.gt") = forAll(specFloats, specFloats) { (d1, d2) => numFloat.reverse.gt(d1, d2) == d2 > d1 }
+
+ property("Float reverse.gteq") = forAll(specFloats, specFloats) { (d1, d2) => numFloat.reverse.gteq(d1, d2) == d2 >= d1 }
+
+ property("Float reverse.equiv") = forAll(specFloats, specFloats) { (d1, d2) => numFloat.reverse.equiv(d1, d2) == (d1 == d2) }
+
+
+ val specDoubles: Gen[Double] = oneOf(
+ Double.MaxValue,
+ Double.MinPositiveValue,
+ Double.MinValue,
+ Double.NaN,
+ Double.NegativeInfinity,
+ Double.PositiveInfinity,
+ -0.0,
+ +0.0
+ )
+
+ // ticket #5104
+ property("Double min") = forAll(specDoubles, specDoubles) { (d1, d2) => {
+ val mathmin = math.min(d1, d2)
+ val numericmin = d1 min d2
+ mathmin == numericmin || mathmin.isNaN && numericmin.isNaN
+ }
+ }
+
+ property("Double max") = forAll(specDoubles, specDoubles) { (d1, d2) => {
+ val mathmax = math.max(d1, d2)
+ val numericmax = d1 max d2
+ mathmax == numericmax || mathmax.isNaN && numericmax.isNaN
+ }
+ }
+
+ val numDouble = implicitly[Numeric[Double]]
+
+ property("Double lt") = forAll(specDoubles, specDoubles) { (d1, d2) => numDouble.lt(d1, d2) == d1 < d2 }
+
+ property("Double lteq") = forAll(specDoubles, specDoubles) { (d1, d2) => numDouble.lteq(d1, d2) == d1 <= d2 }
+
+ property("Double gt") = forAll(specDoubles, specDoubles) { (d1, d2) => numDouble.gt(d1, d2) == d1 > d2 }
+
+ property("Double gteq") = forAll(specDoubles, specDoubles) { (d1, d2) => numDouble.gteq(d1, d2) == d1 >= d2 }
+
+ property("Double equiv") = forAll(specDoubles, specDoubles) { (d1, d2) => numDouble.equiv(d1, d2) == (d1 == d2) }
+
+ property("Double reverse.min") = forAll(specDoubles, specDoubles) { (d1, d2) => {
+ val mathmax = math.max(d1, d2)
+ val numericmin = numDouble.reverse.min(d1, d2)
+ mathmax == numericmin || mathmax.isNaN && numericmin.isNaN
+ }
+ }
+
+ property("Double reverse.max") = forAll(specDoubles, specDoubles) { (d1, d2) => {
+ val mathmin = math.min(d1, d2)
+ val numericmax = numDouble.reverse.max(d1, d2)
+ mathmin == numericmax || mathmin.isNaN && numericmax.isNaN
+ }
+ }
+
+ property("Double reverse.lt") = forAll(specDoubles, specDoubles) { (d1, d2) => numDouble.reverse.lt(d1, d2) == d2 < d1 }
+
+ property("Double reverse.lteq") = forAll(specDoubles, specDoubles) { (d1, d2) => numDouble.reverse.lteq(d1, d2) == d2 <= d1 }
+
+ property("Double reverse.gt") = forAll(specDoubles, specDoubles) { (d1, d2) => numDouble.reverse.gt(d1, d2) == d2 > d1 }
+
+ property("Double reverse.gteq") = forAll(specDoubles, specDoubles) { (d1, d2) => numDouble.reverse.gteq(d1, d2) == d2 >= d1 }
+
+ property("Double reverse.equiv") = forAll(specDoubles, specDoubles) { (d1, d2) => numDouble.reverse.equiv(d1, d2) == (d1 == d2) }
+}
diff --git a/test/scalacheck/primitive-eqeq.scala b/test/scalacheck/primitive-eqeq.scala
new file mode 100644
index 0000000000..fda8087bb0
--- /dev/null
+++ b/test/scalacheck/primitive-eqeq.scala
@@ -0,0 +1,37 @@
+import org.scalacheck._
+import Prop._
+import Gen._
+
+object PrimitiveEqEqTest extends Properties("==") {
+ def equalObjectsEqualHashcodes(x: Any, y: Any) = (x != y) || (x == y && x.## == y.##)
+
+ // ticket #2087
+ property("short/char") = forAll { (x: Short) => {
+ val ch: Char = x.toChar
+ (x == ch) == (ch == x)
+ }
+ }
+
+ property("symmetry") = forAll { (x: AnyVal, y: AnyVal) => (x == y) == (y == x) }
+ property("transitivity") = forAll { (x: AnyVal, y: AnyVal, z: AnyVal) => x != y || y != z || x == z }
+
+ property("##") = forAll {
+ (x: Short) => {
+ val anyvals = List(x.toByte, x.toChar, x, x.toInt, x.toLong, x.toFloat, x.toDouble, BigInt(x), BigDecimal(x))
+ val shortAndLarger = anyvals drop 2
+
+ val result = (
+ ((anyvals, anyvals).zipped forall equalObjectsEqualHashcodes) &&
+ ((shortAndLarger, shortAndLarger).zipped forall (_ == _)) &&
+ ((shortAndLarger, shortAndLarger).zipped forall ((x, y) => (x: Any) == (y: Any)))
+ )
+ result
+ }
+ }
+ property("## 2") = forAll {
+ (dv: Double) => {
+ val fv = dv.toFloat
+ (fv != dv) || (fv.## == dv.##)
+ }
+ }
+}
diff --git a/test/scalacheck/range.scala b/test/scalacheck/range.scala
new file mode 100644
index 0000000000..bbd7de2149
--- /dev/null
+++ b/test/scalacheck/range.scala
@@ -0,0 +1,304 @@
+import org.scalacheck._
+import Prop._
+import Gen._
+import Arbitrary._
+
+class Counter(r: Range) {
+ var cnt = 0L
+ var last: Option[Int] = None
+ val str = "Range["+r.start+", "+r.end+", "+r.step+(if (r.isInclusive) "]" else ")")
+ def apply(x: Int) = {
+ cnt += 1L
+ if (cnt % 500000000L == 0L) {
+ println("Working: %s %d %d" format (str, cnt, x))
+ }
+ if (cnt > (Int.MaxValue.toLong + 1) * 2) {
+ val msg = "Count exceeds maximum possible for an Int Range: %s" format str
+ println(msg) // exception is likely to be eaten by an out of memory error
+ sys error msg
+ }
+ if ((r.step > 0 && last.exists(_ > x)) || (r.step < 0 && last.exists(_ < x))) {
+ val msg = "Range %s wrapped: %d %s" format (str, x, last.toString)
+ println(msg) // exception is likely to be eaten by an out of memory error
+ sys error msg
+ }
+ last = Some(x)
+ }
+}
+
+abstract class RangeTest(kind: String) extends Properties("Range "+kind) {
+ def myGen: Gen[Range]
+
+ def genReasonableSizeRange = oneOf(genArbitraryRange, genBoundaryRange)
+
+ def genArbitraryRange = for {
+ start <- choose(Int.MinValue, Int.MaxValue)
+ end <- choose(Int.MinValue, Int.MaxValue)
+ step <- choose(-Int.MaxValue, Int.MaxValue)
+ } yield Range(start, end, if (step == 0) 100 else step)
+
+ def genBoundaryRange = for {
+ boundary <- oneOf(Int.MinValue, -1, 0, 1, Int.MaxValue)
+ isStart <- arbitrary[Boolean]
+ size <- choose(1, 100)
+ step <- choose(1, 101)
+ } yield {
+ val signum = if (boundary == 0) 1 else boundary.signum
+ if (isStart) Range(boundary, boundary - size * boundary.signum, - step * signum)
+ else Range(boundary - size * boundary.signum, boundary, step * signum)
+ }
+
+
+ def genSmallRange = for {
+ start <- choose(-100, 100)
+ end <- choose(-100, 100)
+ step <- choose(1, 1)
+ } yield if (start < end) Range(start, end, step) else Range(start, end, -step)
+
+ def genRangeByOne = oneOf(genRangeOpenByOne, genRangeClosedByOne)
+
+ def genRangeOpenByOne = for {
+ r <- oneOf(genSmallRange, genBoundaryRange)
+ if (r.end.toLong - r.start.toLong).abs <= 10000000L
+ } yield if (r.start < r.end) Range(r.start, r.end) else Range(r.end, r.start)
+
+ def genRangeClosedByOne = for (r <- genRangeOpenByOne) yield r.start to r.end
+
+ def str(r: Range) = "Range["+r.start+", "+r.end+", "+r.step+(if (r.isInclusive) "]" else ")")
+
+ def expectedSize(r: Range): Long = if (r.isInclusive) {
+ (r.end.toLong - r.start.toLong < 0, r.step < 0) match {
+ case (true, true) | (false, false) => (r.end.toLong - r.start.toLong).abs / r.step.abs.toLong + 1L
+ case _ => if (r.start == r.end) 1L else 0L
+ }
+ } else {
+ (r.end.toLong - r.start.toLong < 0, r.step < 0) match {
+ case (true, true) | (false, false) => (
+ (r.end.toLong - r.start.toLong).abs / r.step.abs.toLong
+ + (if ((r.end.toLong - r.start.toLong).abs % r.step.abs.toLong > 0L) 1L else 0L)
+ )
+ case _ => 0L
+ }
+ }
+
+ def within(r: Range, x: Int) = if (r.step > 0)
+ r.start <= x && (if (r.isInclusive) x <= r.end else x < r.end)
+ else
+ r.start >= x && (if (r.isInclusive) x >= r.end else x > r.end)
+
+ def multiple(r: Range, x: Int) = (x.toLong - r.start) % r.step == 0
+
+ property("foreach.step") = forAllNoShrink(myGen) { r =>
+// println("foreach.step "+str(r))
+ var allValid = true
+ val cnt = new Counter(r)
+// println("--------------------")
+// println(r)
+ r foreach { x => cnt(x)
+// println(x + ", " + (x - r.start) + ", " + (x.toLong - r.start) + ", " + ((x.toLong - r.start) % r.step))
+ allValid &&= multiple(r, x)
+ }
+ allValid :| str(r)
+ }
+
+ property("foreach.inside.range") = forAll(myGen) { r =>
+// println("foreach.inside.range "+str(r))
+ var allValid = true
+ var last: Option[Int] = None
+ val cnt = new Counter(r)
+ r foreach { x => cnt(x)
+ allValid &&= within(r, x)
+ }
+ allValid :| str(r)
+ }
+
+ property("foreach.visited.size") = forAll(myGen) { r =>
+// println("foreach.visited.size "+str(r))
+ var visited = 0L
+ val cnt = new Counter(r)
+ r foreach { x => cnt(x)
+ visited += 1L
+ }
+// println("----------")
+// println(str(r))
+// println("size: " + r.size)
+// println("expected: " + expectedSize(r))
+// println("visited: " + visited)
+ (visited == expectedSize(r)) :| str(r)
+ }
+
+ property("sum") = forAll(myGen) { r =>
+// println("----------")
+// println("sum "+str(r))
+ val rSum = r.sum
+ val expected = r.length match {
+ case 0 => 0
+ case 1 => r.head
+ case x if x < 1000 =>
+ // Explicit sum, to guard against having the same mistake in both the
+ // range implementation and test implementation of sum formula.
+ // (Yes, this happened before.)
+ var i = r.head
+ var s = 0L
+ var n = x
+ while (n > 0) {
+ s += i
+ i += r.step
+ n -= 1
+ }
+ s.toInt
+ case _ =>
+ // Make sure head + last doesn't overflow!
+ ((r.head.toLong + r.last) * r.length / 2).toInt
+ }
+// println("size: " + r.length)
+// println("expected: " + expected)
+// println("obtained: " + rSum)
+
+ (rSum == expected) :| str(r)
+ }
+
+/* checks that sum respects custom Numeric */
+ property("sumCustomNumeric") = forAll(myGen) { r =>
+ val mod = 65536
+ object mynum extends Numeric[Int] {
+ def plus(x: Int, y: Int): Int = (x + y) % mod
+ override def zero = 0
+
+ def fromInt(x: Int): Int = ???
+ def minus(x: Int, y: Int): Int = ???
+ def negate(x: Int): Int = ???
+ def times(x: Int, y: Int): Int = ???
+ def toDouble(x: Int): Double = ???
+ def toFloat(x: Int): Float = ???
+ def toInt(x: Int): Int = ((x % mod) + mod * 2) % mod
+ def toLong(x: Int): Long = ???
+ def compare(x: Int, y: Int): Int = ???
+ }
+
+ val rSum = r.sum(mynum)
+ val expected = mynum.toInt(r.sum)
+
+ (rSum == expected) :| str(r)
+ }
+
+
+ property("length") = forAll(myGen suchThat (r => expectedSize(r).toInt == expectedSize(r))) { r =>
+// println("length "+str(r))
+ (r.length == expectedSize(r)) :| str(r)
+ }
+
+ property("isEmpty") = forAll(myGen suchThat (r => expectedSize(r).toInt == expectedSize(r))) { r =>
+// println("isEmpty "+str(r))
+ (r.isEmpty == (expectedSize(r) == 0L)) :| str(r)
+ }
+
+ property("contains") = forAll(myGen, arbInt.arbitrary) { (r, x) =>
+// println("contains "+str(r))
+// println("----------------")
+// println(str(r))
+// println(x)
+// println("within: " + within(r, x))
+// println("multiple: " + multiple(r, x))
+// println("contains: " + r.contains(x))
+ ((within(r, x) && multiple(r, x)) == r.contains(x)) :| str(r)+": "+x
+ }
+
+ property("take") = forAll(myGen suchThat (r => expectedSize(r).toInt == expectedSize(r)), arbInt.arbitrary) { (r, x) =>
+// println("take "+str(r))
+ val t = r take x
+ (t.size == (0 max x min r.size) && t.start == r.start && t.step == r.step) :| str(r)+" / "+str(t)+": "+x
+ }
+
+ property("init") = forAll(myGen suchThat (r => expectedSize(r).toInt == expectedSize(r))) { r =>
+// println("init "+str(r))
+ (r.size == 0) || {
+ val t = r.init
+ (t.size + 1 == r.size) && (t.isEmpty || t.head == r.head)
+ }
+ }
+
+ property("takeWhile") = forAll(myGen suchThat (r => expectedSize(r).toInt == expectedSize(r)), arbInt.arbitrary) { (r, x) =>
+// println("takeWhile "+str(r))
+ val t = (if (r.step > 0) r takeWhile (_ <= x) else r takeWhile(_ >= x))
+ if (r.size == 0) {
+ (t.size == 0) :| str(r)+" / "+str(t)+": "+x
+ } else {
+ val t2 = (if (r.step > 0) Range(r.start, x min r.last, r.step).inclusive else Range(r.start, x max r.last, r.step).inclusive)
+ (t.start == r.start && t.size == t2.size && t.step == r.step) :| str(r)+" / "+str(t)+" / "+str(t2)+": "+x
+ }
+ }
+
+ property("reverse.toSet.equal") = forAll(myGen) { r =>
+// println("reverse.toSet.equal "+str(r))
+ val reversed = r.reverse
+ val aresame = r.toSet == reversed.toSet
+ if (!aresame) {
+ println(str(r))
+ println(r)
+ println(reversed)
+ println(r.toSet)
+ println(reversed.toSet)
+ }
+ aresame :| str(r)
+ }
+}
+
+object NormalRangeTest extends RangeTest("normal") {
+ override def myGen = genReasonableSizeRange
+ def genOne = for {
+ start <- arbitrary[Int]
+ end <- arbitrary[Int]
+ if (start.toLong - end.toLong).abs < Int.MaxValue.toLong
+ } yield Range(start, end, if (start < end) 1 else - 1)
+ property("by 1.size + 1 == inclusive.size") = forAll(genOne) { r =>
+ (r.size + 1 == r.inclusive.size) :| str(r)
+ }
+}
+
+object InclusiveRangeTest extends RangeTest("inclusive") {
+ override def myGen = for (r <- genReasonableSizeRange) yield r.inclusive
+}
+
+object ByOneRangeTest extends RangeTest("byOne") {
+ override def myGen = genRangeByOne
+}
+
+object InclusiveByOneRangeTest extends RangeTest("inclusiveByOne") {
+ override def myGen = for (r <- genRangeByOne) yield r.inclusive
+}
+
+object SmallValuesRange extends RangeTest("smallValues") {
+ override def myGen = genSmallRange
+}
+
+object TooLargeRange extends Properties("Too Large Range") {
+ val genTooLargeStart = for {
+ start <- choose(-Int.MinValue, 0)
+ } yield start
+
+ property("Too large range throws exception") = forAll(genTooLargeStart) { start =>
+ try {
+ val r = Range.inclusive(start, Int.MaxValue, 1)
+ val l = r.length
+ println("how here? length = " + l + ", r = " + r.toString)
+ false
+ }
+ catch { case _: IllegalArgumentException => true }
+ }
+}
+
+/* Mini-benchmark
+def testRange(i: Int, j: Int, k: Int) = {
+ var count = 0
+ for {
+ vi <- 0 to i
+ vj <- 0 to j
+ vk <- 0 to k
+ } { count += 1 }
+}
+
+testRange(10, 1000, 10000)
+testRange(10000, 1000, 10)
+*/
+
diff --git a/test/scalacheck/redblacktree.scala b/test/scalacheck/redblacktree.scala
new file mode 100644
index 0000000000..09c3839752
--- /dev/null
+++ b/test/scalacheck/redblacktree.scala
@@ -0,0 +1,247 @@
+package scala.collection.immutable.redblacktree
+
+import collection.immutable.{RedBlackTree => RB}
+import org.scalacheck._
+import Prop._
+import Gen._
+
+/*
+Properties of a Red & Black Tree:
+
+A node is either red or black.
+The root is black. (This rule is used in some definitions and not others. Since the
+root can always be changed from red to black but not necessarily vice-versa this
+rule has little effect on analysis.)
+All leaves are black.
+Both children of every red node are black.
+Every simple path from a given node to any of its descendant leaves contains the same number of black nodes.
+*/
+
+abstract class RedBlackTreeTest extends Properties("RedBlackTree") {
+ def minimumSize = 0
+ def maximumSize = 5
+
+ import RB._
+
+ def nodeAt[A](tree: Tree[String, A], n: Int): Option[(String, A)] = if (n < iterator(tree).size && n >= 0)
+ Some(iterator(tree).drop(n).next)
+ else
+ None
+
+ def treeContains[A](tree: Tree[String, A], key: String) = iterator(tree).map(_._1) contains key
+
+ def height(tree: Tree[_, _]): Int = if (tree eq null) 0 else (1 + math.max(height(tree.left), height(tree.right)))
+
+ def mkTree(level: Int, parentIsBlack: Boolean = false, label: String = ""): Gen[Tree[String, Int]] =
+ if (level == 0) {
+ const(null)
+ } else {
+ for {
+ oddOrEven <- choose(0, 2)
+ tryRed = oddOrEven.sample.get % 2 == 0 // work around arbitrary[Boolean] bug
+ isRed = parentIsBlack && tryRed
+ nextLevel = if (isRed) level else level - 1
+ left <- mkTree(nextLevel, !isRed, label + "L")
+ right <- mkTree(nextLevel, !isRed, label + "R")
+ } yield {
+ if (isRed)
+ RedTree(label + "N", 0, left, right)
+ else
+ BlackTree(label + "N", 0, left, right)
+ }
+ }
+
+ def genTree = for {
+ depth <- choose(minimumSize, maximumSize + 1)
+ tree <- mkTree(depth)
+ } yield tree
+
+ type ModifyParm
+ def genParm(tree: Tree[String, Int]): Gen[ModifyParm]
+ def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int]
+
+ def genInput: Gen[(Tree[String, Int], ModifyParm, Tree[String, Int])] = for {
+ tree <- genTree
+ parm <- genParm(tree)
+ } yield (tree, parm, modify(tree, parm))
+}
+
+trait RedBlackTreeInvariants {
+ self: RedBlackTreeTest =>
+
+ import RB._
+
+ def rootIsBlack[A](t: Tree[String, A]) = isBlack(t)
+
+ def areAllLeavesBlack[A](t: Tree[String, A]): Boolean = t match {
+ case null => isBlack(t)
+ case ne => List(ne.left, ne.right) forall areAllLeavesBlack
+ }
+
+ def areRedNodeChildrenBlack[A](t: Tree[String, A]): Boolean = t match {
+ case RedTree(_, _, left, right) => List(left, right) forall (t => isBlack(t) && areRedNodeChildrenBlack(t))
+ case BlackTree(_, _, left, right) => List(left, right) forall areRedNodeChildrenBlack
+ case null => true
+ }
+
+ def blackNodesToLeaves[A](t: Tree[String, A]): List[Int] = t match {
+ case null => List(1)
+ case BlackTree(_, _, left, right) => List(left, right) flatMap blackNodesToLeaves map (_ + 1)
+ case RedTree(_, _, left, right) => List(left, right) flatMap blackNodesToLeaves
+ }
+
+ def areBlackNodesToLeavesEqual[A](t: Tree[String, A]): Boolean = t match {
+ case null => true
+ case ne =>
+ (
+ blackNodesToLeaves(ne).distinct.size == 1
+ && areBlackNodesToLeavesEqual(ne.left)
+ && areBlackNodesToLeavesEqual(ne.right)
+ )
+ }
+
+ def orderIsPreserved[A](t: Tree[String, A]): Boolean =
+ iterator(t) zip iterator(t).drop(1) forall { case (x, y) => x._1 < y._1 }
+
+ def heightIsBounded(t: Tree[_, _]): Boolean = height(t) <= (2 * (32 - Integer.numberOfLeadingZeros(count(t) + 2)) - 2)
+
+ def setup(invariant: Tree[String, Int] => Boolean) = forAll(genInput) { case (tree, parm, newTree) =>
+ invariant(newTree)
+ }
+
+ property("root is black") = setup(rootIsBlack)
+ property("all leaves are black") = setup(areAllLeavesBlack)
+ property("children of red nodes are black") = setup(areRedNodeChildrenBlack)
+ property("black nodes are balanced") = setup(areBlackNodesToLeavesEqual)
+ property("ordering of keys is preserved") = setup(orderIsPreserved)
+ property("height is bounded") = setup(heightIsBounded)
+}
+
+object TestInsert extends RedBlackTreeTest with RedBlackTreeInvariants {
+ import RB._
+
+ override type ModifyParm = Int
+ override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, iterator(tree).size + 1)
+ override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = update(tree, generateKey(tree, parm), 0, true)
+
+ def generateKey(tree: Tree[String, Int], parm: ModifyParm): String = nodeAt(tree, parm) match {
+ case Some((key, _)) => key.init.mkString + "MN"
+ case None => nodeAt(tree, parm - 1) match {
+ case Some((key, _)) => key.init.mkString + "RN"
+ case None => "N"
+ }
+ }
+
+ property("update adds elements") = forAll(genInput) { case (tree, parm, newTree) =>
+ treeContains(newTree, generateKey(tree, parm))
+ }
+}
+
+object TestModify extends RedBlackTreeTest {
+ import RB._
+
+ def newValue = 1
+ override def minimumSize = 1
+ override type ModifyParm = Int
+ override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, iterator(tree).size)
+ override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = nodeAt(tree, parm) map {
+ case (key, _) => update(tree, key, newValue, true)
+ } getOrElse tree
+
+ property("update modifies values") = forAll(genInput) { case (tree, parm, newTree) =>
+ nodeAt(tree,parm) forall { case (key, _) =>
+ iterator(newTree) contains (key, newValue)
+ }
+ }
+}
+
+object TestDelete extends RedBlackTreeTest with RedBlackTreeInvariants {
+ import RB._
+
+ override def minimumSize = 1
+ override type ModifyParm = Int
+ override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, iterator(tree).size)
+ override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = nodeAt(tree, parm) map {
+ case (key, _) => delete(tree, key)
+ } getOrElse tree
+
+ property("delete removes elements") = forAll(genInput) { case (tree, parm, newTree) =>
+ nodeAt(tree, parm) forall { case (key, _) =>
+ !treeContains(newTree, key)
+ }
+ }
+}
+
+object TestRange extends RedBlackTreeTest with RedBlackTreeInvariants {
+ import RB._
+
+ override type ModifyParm = (Option[Int], Option[Int])
+ override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = for {
+ from <- choose(0, iterator(tree).size)
+ to <- choose(0, iterator(tree).size) suchThat (from <=)
+ optionalFrom <- oneOf(Some(from), None, Some(from)) // Double Some(n) to get around a bug
+ optionalTo <- oneOf(Some(to), None, Some(to)) // Double Some(n) to get around a bug
+ } yield (optionalFrom, optionalTo)
+
+ override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = {
+ val from = parm._1 flatMap (nodeAt(tree, _) map (_._1))
+ val to = parm._2 flatMap (nodeAt(tree, _) map (_._1))
+ rangeImpl(tree, from, to)
+ }
+
+ property("range boundaries respected") = forAll(genInput) { case (tree, parm, newTree) =>
+ val from = parm._1 flatMap (nodeAt(tree, _) map (_._1))
+ val to = parm._2 flatMap (nodeAt(tree, _) map (_._1))
+ ("lower boundary" |: (from forall ( key => keysIterator(newTree) forall (key <=)))) &&
+ ("upper boundary" |: (to forall ( key => keysIterator(newTree) forall (key >))))
+ }
+
+ property("range returns all elements") = forAll(genInput) { case (tree, parm, newTree) =>
+ val from = parm._1 flatMap (nodeAt(tree, _) map (_._1))
+ val to = parm._2 flatMap (nodeAt(tree, _) map (_._1))
+ val filteredTree = (keysIterator(tree)
+ .filter(key => from forall (key >=))
+ .filter(key => to forall (key <))
+ .toList)
+ filteredTree == keysIterator(newTree).toList
+ }
+}
+
+object TestDrop extends RedBlackTreeTest with RedBlackTreeInvariants {
+ import RB._
+
+ override type ModifyParm = Int
+ override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, iterator(tree).size)
+ override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = drop(tree, parm)
+
+ property("drop") = forAll(genInput) { case (tree, parm, newTree) =>
+ iterator(tree).drop(parm).toList == iterator(newTree).toList
+ }
+}
+
+object TestTake extends RedBlackTreeTest with RedBlackTreeInvariants {
+ import RB._
+
+ override type ModifyParm = Int
+ override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, iterator(tree).size)
+ override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = take(tree, parm)
+
+ property("take") = forAll(genInput) { case (tree, parm, newTree) =>
+ iterator(tree).take(parm).toList == iterator(newTree).toList
+ }
+}
+
+object TestSlice extends RedBlackTreeTest with RedBlackTreeInvariants {
+ import RB._
+
+ override type ModifyParm = (Int, Int)
+ override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = for {
+ from <- choose(0, iterator(tree).size)
+ to <- choose(from, iterator(tree).size)
+ } yield (from, to)
+ override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = slice(tree, parm._1, parm._2)
+
+ property("slice") = forAll(genInput) { case (tree, parm, newTree) =>
+ iterator(tree).slice(parm._1, parm._2).toList == iterator(newTree).toList
+ }
+}
diff --git a/test/scalacheck/scala/collection/mutable/MutableTreeMap.scala b/test/scalacheck/scala/collection/mutable/MutableTreeMap.scala
new file mode 100644
index 0000000000..e3c19b8841
--- /dev/null
+++ b/test/scalacheck/scala/collection/mutable/MutableTreeMap.scala
@@ -0,0 +1,337 @@
+package scala.collection.mutable
+
+import java.io._
+
+import org.scalacheck._
+import org.scalacheck.Arbitrary._
+import org.scalacheck.Prop.forAll
+
+import scala.collection.generic.CanBuildFrom
+import scala.collection.immutable
+import scala.collection.mutable
+import scala.util.Try
+import scala.collection.mutable.{RedBlackTree => RB}
+
+trait Generators {
+
+ def genRedBlackTree[A: Arbitrary: Ordering, B: Arbitrary]: Gen[RB.Tree[A, B]] = {
+ import org.scalacheck.Gen._
+ for { entries <- listOf(arbitrary[(A, B)]) } yield {
+ val tree = RB.Tree.empty[A, B]
+ entries.foreach { case (k, v) => RB.insert(tree, k, v) }
+ tree
+ }
+ }
+
+ // Note: in scalacheck 1.12.2 tree maps can be automatically generated without the need for custom
+ // machinery
+ def genTreeMap[A: Arbitrary: Ordering, B: Arbitrary]: Gen[mutable.TreeMap[A, B]] = {
+ import org.scalacheck.Gen._
+ for {
+ keys <- listOf(arbitrary[A])
+ values <- listOfN(keys.size, arbitrary[B])
+ } yield mutable.TreeMap(keys zip values: _*)
+ }
+
+ implicit def arbRedBlackTree[A: Arbitrary: Ordering, B: Arbitrary] = Arbitrary(genRedBlackTree[A, B])
+ implicit def arbTreeMap[A: Arbitrary: Ordering, B: Arbitrary] = Arbitrary(genTreeMap[A, B])
+}
+
+object RedBlackTreeProperties extends Properties("mutable.RedBlackTree") with Generators {
+ type K = String
+ type V = Int
+
+ property("initial invariants") = forAll { (tree: RB.Tree[K, V]) =>
+ RB.isValid(tree)
+ }
+
+ property("insert") = forAll { (tree: RB.Tree[K, V], entries: Seq[(K, V)]) =>
+ entries.foreach { case (k, v) => RB.insert(tree, k, v) }
+ RB.isValid(tree) && entries.toMap.forall { case (k, v) => RB.get(tree, k) == Some(v) }
+ }
+
+ property("delete") = forAll { (tree: RB.Tree[K, V], ks: Seq[K]) =>
+ ks.foreach { k => RB.delete(tree, k) }
+ RB.isValid(tree) && ks.toSet.forall { k => RB.get(tree, k) == None }
+ }
+
+ property("insert & delete") = forAll { (tree: RB.Tree[K, V], ops: Seq[Either[(K, V), K]]) =>
+ ops.foreach {
+ case Left((k, v)) => RB.insert(tree, k, v)
+ case Right(k) => RB.delete(tree, k)
+ }
+ RB.isValid(tree)
+ }
+
+ property("min") = forAll { (entries: Seq[(K, V)]) =>
+ val tree = RB.Tree.empty[K, V]
+ entries.foreach { case (k, v) => RB.insert(tree, k, v) }
+ RB.min(tree) == (if (entries.isEmpty) None else Some(entries.toMap.min))
+ }
+
+ property("max") = forAll { (entries: Seq[(K, V)]) =>
+ val tree = RB.Tree.empty[K, V]
+ entries.foreach { case (k, v) => RB.insert(tree, k, v) }
+ RB.max(tree) == (if (entries.isEmpty) None else Some(entries.toMap.max))
+ }
+}
+
+object MutableTreeMapProperties extends Properties("mutable.TreeMap") with Generators {
+ type K = String
+ type V = Int
+
+ property("get, contains") = forAll { (allEntries: Map[K, V]) =>
+ val entries = allEntries.take(allEntries.size / 2)
+
+ val map = mutable.TreeMap[K, V]()
+ map ++= entries
+
+ allEntries.forall { case (k, v) =>
+ map.contains(k) == entries.contains(k) &&
+ map.get(k) == entries.get(k)
+ }
+ }
+
+ property("size, isEmpty") = forAll { (entries: Map[K, V]) =>
+ val map = mutable.TreeMap[K, V]()
+ map ++= entries
+ map.size == entries.size && map.isEmpty == entries.isEmpty
+ }
+
+ property("+=") = forAll { (map: mutable.TreeMap[K, V], k: K, v: V) =>
+ val oldSize = map.size
+ val containedKeyBefore = map.contains(k)
+ val newExpectedSize = if(containedKeyBefore) oldSize else oldSize + 1
+
+ map += (k -> v)
+ map.contains(k) && map.get(k) == Some(v) && map.size == newExpectedSize
+ }
+
+ property("++=") = forAll { (map: mutable.TreeMap[K, V], entries: Seq[(K, V)]) =>
+ val oldEntries = map.toMap
+ map ++= entries
+ (oldEntries ++ entries).forall { case (k, v) => map.get(k) == Some(v) }
+ }
+
+ property("-=") = forAll { (map: mutable.TreeMap[K, V], k: K) =>
+ val oldSize = map.size
+ val containedKeyBefore = map.contains(k)
+ val newExpectedSize = if(containedKeyBefore) oldSize - 1 else oldSize
+
+ map -= k
+ !map.contains(k) && map.get(k) == None && map.size == newExpectedSize
+ }
+
+ property("--=") = forAll { (map: mutable.TreeMap[K, V], ks: Seq[K]) =>
+ val oldElems = map.toList
+ map --= ks
+ val deletedElems = ks.toSet
+ oldElems.forall { case (k, v) => map.get(k) == (if(deletedElems(k)) None else Some(v)) }
+ }
+
+ property("iterator") = forAll { (entries: Map[K, V]) =>
+ val map = mutable.TreeMap[K, V]()
+ map ++= entries
+
+ map.iterator.toSeq == entries.toSeq.sorted
+ }
+
+ property("iteratorFrom") = forAll { (entries: Map[K, V], k: K) =>
+ val map = mutable.TreeMap[K, V]()
+ map ++= entries
+
+ map.iteratorFrom(k).toSeq == entries.filterKeys(_ >= k).toSeq.sorted
+ }
+
+ property("keysIteratorFrom") = forAll { (entries: Map[K, V], k: K) =>
+ val map = mutable.TreeMap[K, V]()
+ map ++= entries
+
+ map.keysIteratorFrom(k).toSeq == entries.keysIterator.filter(_ >= k).toSeq.sorted
+ }
+
+ property("valuesIteratorFrom") = forAll { (entries: Map[K, V], k: K) =>
+ val map = mutable.TreeMap[K, V]()
+ map ++= entries
+
+ map.valuesIteratorFrom(k).toSeq == entries.filterKeys(_ >= k).toSeq.sorted.map(_._2)
+ }
+
+ property("headOption") = forAll { (map: mutable.TreeMap[K, V]) =>
+ map.headOption == Try(map.iterator.next()).toOption
+ }
+
+ property("lastOption") = forAll { (map: mutable.TreeMap[K, V]) =>
+ map.lastOption == Try(map.iterator.max).toOption
+ }
+
+ property("clear") = forAll { (map: mutable.TreeMap[K, V]) =>
+ map.clear()
+ map.isEmpty && map.size == 0
+ }
+
+ property("serializable") = forAll { (map: mutable.TreeMap[K, V]) =>
+ val bytesOut = new ByteArrayOutputStream()
+ val out = new ObjectOutputStream(bytesOut)
+ out.writeObject(map)
+ val bytes = bytesOut.toByteArray
+
+ val in = new ObjectInputStream(new ByteArrayInputStream(bytes))
+ val sameMap = in.readObject().asInstanceOf[mutable.TreeMap[K, V]]
+ map.iterator.toSeq == sameMap.iterator.toSeq
+ }
+
+ property("same behavior as immutable.TreeMap") = forAll { ops: Seq[Either[(K, V), K]] =>
+ var imap = immutable.TreeMap[K, V]()
+ val mmap = mutable.TreeMap[K, V]()
+
+ ops.foreach {
+ case Left((k, v)) => imap += k -> v; mmap += k -> v
+ case Right(k) => imap -= k; mmap -= k
+ }
+
+ imap.toList == mmap.toList
+ }
+}
+
+object MutableTreeMapViewProperties extends Properties("mutable.TreeMapView") with Generators {
+ type K = String
+ type V = Int
+
+ implicit val ord = implicitly[Ordering[K]]
+
+ def in(key: K, from: Option[K], until: Option[K]) =
+ from.fold(true)(_ <= key) && until.fold(true)(_ > key)
+
+ def entriesInView[This <: TraversableOnce[(K, V)], That](entries: This, from: Option[K], until: Option[K])(implicit bf: CanBuildFrom[This, (K, V), That]) = {
+ (bf.apply(entries) ++= entries.filter { case (k, _) => in(k, from, until) }).result()
+ }
+
+ property("get, contains") = forAll { (allEntries: Map[K, V], from: Option[K], until: Option[K]) =>
+ val entries = allEntries.take(allEntries.size / 2)
+
+ val map = mutable.TreeMap[K, V]()
+ map ++= entries
+
+ val mapView = map.rangeImpl(from, until)
+ allEntries.forall { case (k, v) =>
+ mapView.contains(k) == (in(k, from, until) && entries.contains(k)) &&
+ mapView.get(k) == (if(in(k, from, until)) entries.get(k) else None)
+ }
+ }
+
+ property("size, isEmpty") = forAll { (entries: Map[K, V], from: Option[K], until: Option[K]) =>
+ val map = mutable.TreeMap[K, V]()
+ map ++= entries
+
+ val mapView = map.rangeImpl(from, until)
+ mapView.size == entriesInView(entries, from, until).size &&
+ mapView.isEmpty == !entries.exists { kv => in(kv._1, from, until) }
+ }
+
+ property("+=") = forAll { (map: mutable.TreeMap[K, V], k: K, v: V, from: Option[K], until: Option[K]) =>
+ val oldSize = map.size
+ val containedKeyBefore = map.contains(k)
+ val newExpectedSize = if(containedKeyBefore) oldSize else oldSize + 1
+ val isInRange = in(k, from, until)
+
+ val mapView = map.rangeImpl(from, until)
+ mapView += (k -> v)
+
+ map.contains(k) && map.get(k) == Some(v) && map.size == newExpectedSize &&
+ mapView.contains(k) == isInRange &&
+ mapView.get(k) == (if(isInRange) Some(v) else None)
+ }
+
+ property("++=") = forAll { (map: mutable.TreeMap[K, V], entries: Seq[(K, V)], from: Option[K], until: Option[K]) =>
+ val mapView = map.rangeImpl(from, until)
+ mapView ++= entries
+ entries.toMap.forall { case (k, v) =>
+ map.get(k) == Some(v) &&
+ mapView.get(k) == (if (in(k, from, until)) Some(v) else None)
+ }
+ }
+
+ property("-=") = forAll { (map: mutable.TreeMap[K, V], k: K, from: Option[K], until: Option[K]) =>
+ val oldSize = map.size
+ val containedKeyBefore = map.contains(k)
+ val newExpectedSize = if(containedKeyBefore) oldSize - 1 else oldSize
+
+ val mapView = map.rangeImpl(from, until)
+ mapView -= k
+
+ !map.contains(k) && map.get(k) == None && map.size == newExpectedSize &&
+ !mapView.contains(k) &&
+ mapView.get(k) == None
+ }
+
+ property("--=") = forAll { (map: mutable.TreeMap[K, V], ks: Seq[K], from: Option[K], until: Option[K]) =>
+ val mapView = map.rangeImpl(from, until)
+ mapView --= ks
+ ks.toSet.forall { k => map.get(k) == None && mapView.get(k) == None }
+ }
+
+ property("iterator") = forAll { (entries: Map[K, V], from: Option[K], until: Option[K]) =>
+ val map = mutable.TreeMap[K, V]()
+ map ++= entries
+
+ val mapView = map.rangeImpl(from, until)
+ mapView.iterator.toSeq == entriesInView(entries, from, until).toSeq.sorted
+ }
+
+ property("iteratorFrom") = forAll { (entries: Map[K, V], k: K, from: Option[K], until: Option[K]) =>
+ val map = mutable.TreeMap[K, V]()
+ map ++= entries
+
+ val mapView = map.rangeImpl(from, until)
+ val newLower = Some(from.fold(k)(ord.max(_, k)))
+ mapView.iteratorFrom(k).toSeq == entriesInView(entries, newLower, until).toSeq.sorted
+ }
+
+ property("keysIteratorFrom") = forAll { (entries: Map[K, V], k: K, from: Option[K], until: Option[K]) =>
+ val map = mutable.TreeMap[K, V]()
+ map ++= entries
+
+ val mapView = map.rangeImpl(from, until)
+ val newLower = Some(from.fold(k)(ord.max(_, k)))
+ mapView.keysIteratorFrom(k).toSeq == entriesInView(entries, newLower, until).toSeq.sorted.map(_._1)
+ }
+
+ property("valuesIteratorFrom") = forAll { (entries: Map[K, V], k: K, from: Option[K], until: Option[K]) =>
+ val map = mutable.TreeMap[K, V]()
+ map ++= entries
+
+ val mapView = map.rangeImpl(from, until)
+ val newLower = Some(from.fold(k)(ord.max(_, k)))
+ mapView.valuesIteratorFrom(k).toSeq == entriesInView(entries, newLower, until).toSeq.sorted.map(_._2)
+ }
+
+ property("headOption") = forAll { (map: mutable.TreeMap[K, V], from: Option[K], until: Option[K]) =>
+ val mapView = map.rangeImpl(from, until)
+ mapView.headOption == Try(entriesInView(map.iterator, from, until).next()).toOption
+ }
+
+ property("lastOption") = forAll { (map: mutable.TreeMap[K, V], from: Option[K], until: Option[K]) =>
+ val mapView = map.rangeImpl(from, until)
+ mapView.lastOption == Try(entriesInView(map.iterator, from, until).max).toOption
+ }
+
+ property("clear") = forAll { (map: mutable.TreeMap[K, V], from: Option[K], until: Option[K]) =>
+ val mapView = map.rangeImpl(from, until)
+ mapView.clear()
+ map.isEmpty && mapView.isEmpty && map.size == 0 && mapView.size == 0
+ }
+
+ property("serializable") = forAll { (map: mutable.TreeMap[K, V], from: Option[K], until: Option[K]) =>
+ val mapView = map.rangeImpl(from, until)
+
+ val bytesOut = new ByteArrayOutputStream()
+ val out = new ObjectOutputStream(bytesOut)
+ out.writeObject(mapView)
+ val bytes = bytesOut.toByteArray
+
+ val in = new ObjectInputStream(new ByteArrayInputStream(bytes))
+ val sameMapView = in.readObject().asInstanceOf[mutable.TreeMap[K, V]]
+ mapView.iterator.toSeq == sameMapView.iterator.toSeq
+ }
+}
diff --git a/test/scalacheck/scala/collection/mutable/MutableTreeSet.scala b/test/scalacheck/scala/collection/mutable/MutableTreeSet.scala
new file mode 100644
index 0000000000..d2f5a238c0
--- /dev/null
+++ b/test/scalacheck/scala/collection/mutable/MutableTreeSet.scala
@@ -0,0 +1,209 @@
+package scala.collection.mutable
+
+import java.io._
+
+import org.scalacheck._
+import org.scalacheck.Arbitrary._
+import org.scalacheck.Prop.forAll
+
+import scala.collection.generic.CanBuildFrom
+import scala.collection.immutable
+import scala.collection.mutable
+import scala.util.Try
+
+object MutableTreeSetProperties extends Properties("mutable.TreeSet") {
+ type K = String
+
+ property("size, isEmpty") = forAll { (elems: Set[K]) =>
+ val set = mutable.TreeSet[K]()
+ set ++= elems
+ set.size == elems.size && set.isEmpty == elems.isEmpty
+ }
+
+ property("+=") = forAll { (set: mutable.TreeSet[K], k: K) =>
+ val oldSize = set.size
+ val containedKeyBefore = set.contains(k)
+ val newExpectedSize = if(containedKeyBefore) oldSize else oldSize + 1
+
+ set += k
+ set.contains(k) && set.size == newExpectedSize
+ }
+
+ property("++=") = forAll { (set: mutable.TreeSet[K], ks: Seq[K]) =>
+ val oldElems = set.toList
+ set ++= ks
+ (oldElems ++ ks).forall(set.contains)
+ }
+
+ property("-=") = forAll { (set: mutable.TreeSet[K], k: K) =>
+ val oldSize = set.size
+ val containedKeyBefore = set.contains(k)
+ val newExpectedSize = if(containedKeyBefore) oldSize - 1 else oldSize
+
+ set -= k
+ !set.contains(k) && set.size == newExpectedSize
+ }
+
+ property("--=") = forAll { (set: mutable.TreeSet[K], ks: Seq[K]) =>
+ val oldElems = set.toList
+ set --= ks
+ val deletedElems = ks.toSet
+ oldElems.forall { e => set.contains(e) == !deletedElems(e) }
+ }
+
+ property("iterator") = forAll { (ks: Set[K]) =>
+ val set = mutable.TreeSet[K]()
+ set ++= ks
+
+ set.iterator.toSeq == ks.toSeq.sorted
+ }
+
+ property("iteratorFrom, keysIteratorFrom") = forAll { (ks: Set[K], k: K) =>
+ val set = mutable.TreeSet[K]()
+ set ++= ks
+
+ set.iteratorFrom(k).toSeq == ks.filter(_ >= k).toSeq.sorted
+ set.keysIteratorFrom(k).toSeq == ks.filter(_ >= k).toSeq.sorted
+ }
+
+ property("headOption") = forAll { (set: mutable.TreeSet[K]) =>
+ set.headOption == Try(set.iterator.next()).toOption
+ }
+
+ property("lastOption") = forAll { (set: mutable.TreeSet[K]) =>
+ set.lastOption == Try(set.iterator.max).toOption
+ }
+
+ property("clear") = forAll { (set: mutable.TreeSet[K]) =>
+ set.clear()
+ set.isEmpty && set.size == 0
+ }
+
+ property("serializable") = forAll { (set: mutable.TreeSet[K]) =>
+ val bytesOut = new ByteArrayOutputStream()
+ val out = new ObjectOutputStream(bytesOut)
+ out.writeObject(set)
+ val bytes = bytesOut.toByteArray
+
+ val in = new ObjectInputStream(new ByteArrayInputStream(bytes))
+ val sameSet = in.readObject().asInstanceOf[mutable.TreeSet[K]]
+ set.iterator.toSeq == sameSet.iterator.toSeq
+ }
+
+ property("same behavior as immutable.TreeMap") = forAll { ops: Seq[Either[K, K]] =>
+ var iset = immutable.TreeSet[K]()
+ val mset = mutable.TreeSet[K]()
+
+ ops.foreach {
+ case Left(k) => iset += k; mset += k
+ case Right(k) => iset -= k; mset -= k
+ }
+
+ iset.toList == mset.toList
+ }
+}
+
+object MutableTreeSetViewProperties extends Properties("mutable.TreeSetView") {
+ type K = String
+
+ implicit val ord = implicitly[Ordering[K]]
+
+ def in(key: K, from: Option[K], until: Option[K]) =
+ from.fold(true)(_ <= key) && until.fold(true)(_ > key)
+
+ def keysInView[This <: TraversableOnce[K], That](keys: This, from: Option[K], until: Option[K])(implicit bf: CanBuildFrom[This, K, That]) = {
+ (bf.apply(keys) ++= keys.filter(in(_, from, until))).result()
+ }
+
+ property("size, isEmpty") = forAll { (keys: Set[K], from: Option[K], until: Option[K]) =>
+ val map = mutable.TreeSet[K]()
+ map ++= keys
+
+ val mapView = map.rangeImpl(from, until)
+ mapView.size == keysInView(keys, from, until).size &&
+ mapView.isEmpty == !keys.exists(in(_, from, until))
+ }
+
+ property("+=") = forAll { (set: mutable.TreeSet[K], k: K, from: Option[K], until: Option[K]) =>
+ val oldSize = set.size
+ val containedKeyBefore = set.contains(k)
+ val newExpectedSize = if(containedKeyBefore) oldSize else oldSize + 1
+ val isInRange = in(k, from, until)
+
+ val setView = set.rangeImpl(from, until)
+ setView += k
+
+ set.contains(k) && set.size == newExpectedSize && setView.contains(k) == isInRange
+ }
+
+ property("++=") = forAll { (set: mutable.TreeSet[K], ks: Seq[K], from: Option[K], until: Option[K]) =>
+ val setView = set.rangeImpl(from, until)
+ setView ++= ks
+ ks.toSet.forall { k =>
+ set.contains(k) && setView.contains(k) == in(k, from, until)
+ }
+ }
+
+ property("-=") = forAll { (set: mutable.TreeSet[K], k: K, from: Option[K], until: Option[K]) =>
+ val oldSize = set.size
+ val containedKeyBefore = set.contains(k)
+ val newExpectedSize = if(containedKeyBefore) oldSize - 1 else oldSize
+
+ val setView = set.rangeImpl(from, until)
+ setView -= k
+
+ !set.contains(k) && set.size == newExpectedSize && !setView.contains(k)
+ }
+
+ property("--=") = forAll { (set: mutable.TreeSet[K], ks: Seq[K], from: Option[K], until: Option[K]) =>
+ val setView = set.rangeImpl(from, until)
+ setView --= ks
+ ks.toSet.forall { k => !set.contains(k) && !setView.contains(k) }
+ }
+
+ property("iterator") = forAll { (ks: Set[K], from: Option[K], until: Option[K]) =>
+ val set = mutable.TreeSet[K]()
+ set ++= ks
+
+ val setView = set.rangeImpl(from, until)
+ setView.iterator.toSeq == keysInView(ks, from, until).toSeq.sorted
+ }
+
+ property("iteratorFrom, keysIteratorFrom") = forAll { (ks: Set[K], k: K, from: Option[K], until: Option[K]) =>
+ val set = mutable.TreeSet[K]()
+ set ++= ks
+
+ val setView = set.rangeImpl(from, until)
+ val newLower = Some(from.fold(k)(ord.max(_, k)))
+ setView.iteratorFrom(k).toSeq == keysInView(ks, newLower, until).toSeq.sorted
+ }
+
+ property("headOption") = forAll { (set: mutable.TreeSet[K], from: Option[K], until: Option[K]) =>
+ val setView = set.rangeImpl(from, until)
+ setView.headOption == Try(keysInView(set.iterator, from, until).next()).toOption
+ }
+
+ property("lastOption") = forAll { (set: mutable.TreeSet[K], from: Option[K], until: Option[K]) =>
+ val setView = set.rangeImpl(from, until)
+ setView.lastOption == Try(keysInView(set.iterator, from, until).max).toOption
+ }
+
+ property("clear") = forAll { (set: mutable.TreeSet[K], from: Option[K], until: Option[K]) =>
+ val setView = set.rangeImpl(from, until)
+ setView.clear()
+ set.isEmpty && setView.isEmpty && set.size == 0 && setView.size == 0
+ }
+
+ property("serializable") = forAll { (set: mutable.TreeSet[K], from: Option[K], until: Option[K]) =>
+ val setView = set.rangeImpl(from, until)
+
+ val bytesOut = new ByteArrayOutputStream()
+ val out = new ObjectOutputStream(bytesOut)
+ out.writeObject(setView)
+ val bytes = bytesOut.toByteArray
+
+ val in = new ObjectInputStream(new ByteArrayInputStream(bytes))
+ val sameSetView = in.readObject().asInstanceOf[mutable.TreeSet[K]]
+ setView.iterator.toSeq == sameSetView.iterator.toSeq
+ }
+}
diff --git a/test/scalacheck/scala/collection/parallel/IntOperators.scala b/test/scalacheck/scala/collection/parallel/IntOperators.scala
new file mode 100644
index 0000000000..c7f43b6526
--- /dev/null
+++ b/test/scalacheck/scala/collection/parallel/IntOperators.scala
@@ -0,0 +1,110 @@
+package scala.collection.parallel.ops
+
+
+import scala.collection.parallel._
+
+
+trait IntOperators extends Operators[Int] {
+ def reduceOperators = List(_ + _, _ * _, math.min(_, _), math.max(_, _), _ ^ _)
+ def countPredicates = List(
+ x => true,
+ _ >= 0, _ < 0, _ < 50, _ < 500, _ < 5000, _ < 50000, _ % 2 == 0, _ == 99,
+ x => x > 50 && x < 150,
+ x => x > 350 && x < 550,
+ x => (x > 1000 && x < 1500) || (x > 400 && x < 500)
+ )
+ def forallPredicates = List(_ >= 0, _ < 0, _ % 2 == 0, _ != 55, _ != 505, _ != 5005)
+ def existsPredicates = List(_ >= 0, _ < 0, _ % 2 == 0, _ == 55, _ == 505, _ == 5005)
+ def findPredicates = List(_ >= 0, _ % 2 == 0, _ < 0, _ == 50, _ == 500, _ == 5000)
+ def mapFunctions = List(-_, math.abs(_), _ % 2, _ % 3, _ % 4, _ % 150, _ % 500)
+ def partialMapFunctions = List({case x => -x}, { case 0 => -1; case x if x > 0 => x + 1}, {case x if x % 3 == 0 => x / 3})
+ def flatMapFunctions = List(
+ (n: Int) => if (n < 0) List() else if (n % 2 == 0) List(1, 2, 3) else List(4, 5, 6),
+ (n: Int) => List[Int](),
+ (n: Int) => if (n == 0) List(1, 2, 3, 4, 5) else if (n < 0) List(1, 2, 3) else List()
+ )
+ def filterPredicates = List(
+ _ % 2 == 0, _ % 3 == 0,
+ _ % 4 != 0, _ % 17 != 0,
+ n => n > 50 && n < 100,
+ _ >= 0, _ < 0, _ == 99,
+ _ > 500, _ > 5000, _ > 50000,
+ _ < 500, _ < 50, _ < -50, _ < -5e5,
+ x => true, x => false,
+ x => x % 53 == 0 && x % 17 == 0
+ )
+ def filterNotPredicates = filterPredicates
+ def partitionPredicates = filterPredicates
+ def takeWhilePredicates = List(
+ _ != 50, _ != 500, _ != 5000, _ != 50000, _ % 2 == 0, _ % 3 == 1, _ % 47 != 0,
+ _ < 100, _ < 1000, _ < 10000, _ < 0,
+ _ < -100, _ < -1000, _ > -200, _ > -50,
+ n => -90 < n && n < -10,
+ n => 50 < n && n < 550,
+ n => 5000 < n && n < 7500,
+ n => -50 < n && n < 450
+ )
+ def dropWhilePredicates = takeWhilePredicates
+ def spanPredicates = takeWhilePredicates
+ def foldArguments = List(
+ (0, _ + _),
+ (1, _ * _),
+ (Int.MinValue, math.max(_, _)),
+ (Int.MaxValue, math.min(_, _))
+ )
+ def addAllTraversables = List(
+ List[Int](),
+ List(1),
+ List(1, 2),
+ List(1, 2, 3, 4, 5, 6, 7, 8, 9, 10),
+ Array.fill(1000)(1).toSeq
+ )
+ def newArray(sz: Int) = new Array[Int](sz)
+ def groupByFunctions = List(
+ _ % 2, _ % 3, _ % 5, _ % 10, _ % 154, _% 3217,
+ _ * 2, _ + 1
+ )
+}
+
+
+trait IntSeqOperators extends IntOperators with SeqOperators[Int] {
+ def segmentLengthPredicates = List(
+ _ % 2 == 0, _ > 0, _ >= 0, _ < 0, _ <= 0, _ > -5000, _ > 5000, _ % 541 != 0, _ < -50, _ > 500,
+ n => -90 < n && n < -10, n => 500 < n && n < 1500
+ )
+ def indexWherePredicates = List(
+ _ % 2 == 0, _ % 11 == 0, _ % 123 == 0, _ % 901 == 0,
+ _ > 0, _ >= 0, _ < 0, _ <= 0,
+ _ > 50, _ > 500, _ > 5000,
+ _ < -10, _ < -100, _ < -1000,
+ n => n > 50 && n < 100,
+ n => n * n > 1000000 && n % 111 == 0
+ )
+ def lastIndexWherePredicates = List(
+ _ % 2 == 0, _ % 17 == 0, _ % 314 == 0, _ % 1017 == 0,
+ _ > 0, _ >= 0, _ < 0, _ <= 0,
+ _ > 50, _ > 500, _ > 5000,
+ _ < -20, _ < -200, _ < -2000,
+ _ == 0,
+ n => n > -40 && n < 40,
+ n => n > -80 && n < -10,
+ n => n > 110 && n < 150
+ )
+ def reverseMapFunctions = List(-_, n => n * n, _ + 1)
+ def sameElementsSeqs = List(
+ List[Int](),
+ List(1),
+ List(1, 2, 3, 4, 5, 6, 7, 8, 9),
+ Array.fill(150)(1).toSeq,
+ Array.fill(1000)(1).toSeq
+ )
+ def startEndSeqs = List(
+ Nil,
+ List(1),
+ List(1, 2, 3, 4, 5),
+ List(0, 1, 2, 3, 4, 5),
+ List(4, 5, 6, 7, 8, 9, 10),
+ List(4, 5, 6, 7, 8, 9, 0),
+ List(-4, -3, -2, -1)
+ )
+}
diff --git a/test/scalacheck/scala/collection/parallel/IntValues.scala b/test/scalacheck/scala/collection/parallel/IntValues.scala
new file mode 100644
index 0000000000..cab60ead76
--- /dev/null
+++ b/test/scalacheck/scala/collection/parallel/IntValues.scala
@@ -0,0 +1,29 @@
+package scala.collection.parallel.ops
+
+
+
+
+
+import org.scalacheck._
+import org.scalacheck.Gen
+import org.scalacheck.Gen._
+import org.scalacheck.Prop._
+import org.scalacheck.Properties
+import org.scalacheck.Arbitrary._
+
+
+
+
+trait IntValues {
+ def values = Seq(
+ arbitrary[Int],
+ arbitrary[Int] suchThat (_ >= 0),
+ arbitrary[Int] suchThat (_ < 0),
+ choose(0, 0),
+ choose(0, 10),
+ choose(0, 100),
+ choose(0, 1000) suchThat (_ % 2 == 0),
+ choose(0, 1000) suchThat (_ % 2 != 0),
+ choose(0, 1000) suchThat (n => (n % 2 == 0) || (n % 3 == 0))
+ )
+}
diff --git a/test/scalacheck/scala/collection/parallel/Operators.scala b/test/scalacheck/scala/collection/parallel/Operators.scala
new file mode 100644
index 0000000000..72133a5009
--- /dev/null
+++ b/test/scalacheck/scala/collection/parallel/Operators.scala
@@ -0,0 +1,36 @@
+package scala.collection.parallel
+
+
+
+
+trait Operators[T] {
+ def reduceOperators: List[(T, T) => T]
+ def countPredicates: List[T => Boolean]
+ def forallPredicates: List[T => Boolean]
+ def existsPredicates: List[T => Boolean]
+ def findPredicates: List[T => Boolean]
+ def mapFunctions: List[T => T]
+ def partialMapFunctions: List[PartialFunction[T, T]]
+ def flatMapFunctions: List[T => Traversable[T]]
+ def filterPredicates: List[T => Boolean]
+ def filterNotPredicates: List[T => Boolean]
+ def partitionPredicates: List[T => Boolean]
+ def takeWhilePredicates: List[T => Boolean]
+ def dropWhilePredicates: List[T => Boolean]
+ def spanPredicates: List[T => Boolean]
+ def foldArguments: List[(T, (T, T) => T)]
+ def addAllTraversables: List[Traversable[T]]
+ def newArray(sz: Int): Array[T]
+ def groupByFunctions: List[T => T]
+}
+
+
+
+trait SeqOperators[T] extends Operators[T] {
+ def segmentLengthPredicates: List[T => Boolean]
+ def indexWherePredicates: List[T => Boolean]
+ def lastIndexWherePredicates: List[T => Boolean]
+ def reverseMapFunctions: List[T => T]
+ def sameElementsSeqs: List[Seq[T]]
+ def startEndSeqs: List[Seq[T]]
+}
diff --git a/test/scalacheck/scala/collection/parallel/PairOperators.scala b/test/scalacheck/scala/collection/parallel/PairOperators.scala
new file mode 100644
index 0000000000..fe851114be
--- /dev/null
+++ b/test/scalacheck/scala/collection/parallel/PairOperators.scala
@@ -0,0 +1,101 @@
+package scala.collection.parallel.ops
+
+
+import scala.collection.parallel._
+
+
+trait PairOperators[K, V] extends Operators[(K, V)] {
+ def koperators: Operators[K]
+ def voperators: Operators[V]
+
+ private def zipPredicates(kps: List[K => Boolean], vps: List[V => Boolean]): List[((K, V)) => Boolean] = for {
+ (kp, vp) <- koperators.countPredicates zip voperators.countPredicates
+ } yield new Function1[(K, V), Boolean] {
+ def apply(kv: (K, V)) = kp(kv._1) && vp(kv._2)
+ }
+
+ /* operators */
+
+ def reduceOperators = for {
+ (kop, vop) <- koperators.reduceOperators zip voperators.reduceOperators
+ } yield new Function2[(K, V), (K, V), (K, V)] {
+ def apply(kv1: (K, V), kv2: (K, V)) = (kop(kv1._1, kv2._1), vop(kv1._2, kv2._2))
+ }
+
+ def countPredicates = zipPredicates(koperators.countPredicates, voperators.countPredicates)
+
+ def forallPredicates = zipPredicates(koperators.forallPredicates, voperators.forallPredicates)
+
+ def existsPredicates = zipPredicates(koperators.existsPredicates, voperators.existsPredicates)
+
+ def findPredicates = zipPredicates(koperators.findPredicates, voperators.findPredicates)
+
+ def mapFunctions = for {
+ (km, vm) <- koperators.mapFunctions zip voperators.mapFunctions
+ } yield new Function1[(K, V), (K, V)] {
+ def apply(kv: (K, V)) = (km(kv._1), vm(kv._2))
+ }
+
+ def partialMapFunctions = for {
+ (kpm, vpm) <- koperators.partialMapFunctions zip voperators.partialMapFunctions
+ } yield new PartialFunction[(K, V), (K, V)] {
+ def isDefinedAt(kv: (K, V)) = kpm.isDefinedAt(kv._1) && vpm.isDefinedAt(kv._2)
+ def apply(kv: (K, V)) = (kpm(kv._1), vpm(kv._2))
+ }
+
+ def flatMapFunctions = for {
+ (kfm, vfm) <- koperators.flatMapFunctions zip voperators.flatMapFunctions
+ } yield new Function1[(K, V), Traversable[(K, V)]] {
+ def apply(kv: (K, V)) = kfm(kv._1).toIterable zip vfm(kv._2).toIterable
+ }
+
+ def filterPredicates = zipPredicates(koperators.filterPredicates, voperators.filterPredicates)
+
+ def filterNotPredicates = filterPredicates
+
+ def partitionPredicates = filterPredicates
+
+ def takeWhilePredicates = zipPredicates(koperators.takeWhilePredicates, voperators.takeWhilePredicates)
+
+ def dropWhilePredicates = takeWhilePredicates
+
+ def spanPredicates = takeWhilePredicates
+
+ def foldArguments = for {
+ ((kinit, kop), (vinit, vop)) <- koperators.foldArguments zip voperators.foldArguments
+ } yield ((kinit, vinit), new Function2[(K, V), (K, V), (K, V)] {
+ def apply(kv1: (K, V), kv2: (K, V)) = (kop(kv1._1, kv2._1), vop(kv1._2, kv2._2))
+ })
+
+ def addAllTraversables = for {
+ (kt, vt) <- koperators.addAllTraversables zip voperators.addAllTraversables
+ } yield kt.toIterable zip vt.toIterable
+
+ def newArray(sz: Int) = new Array[(K, V)](sz)
+
+ def groupByFunctions = (koperators.groupByFunctions zip voperators.groupByFunctions) map {
+ opt => { (p: (K, V)) => (opt._1(p._1), opt._2(p._2)) }
+ }
+
+}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/test/scalacheck/scala/collection/parallel/PairValues.scala b/test/scalacheck/scala/collection/parallel/PairValues.scala
new file mode 100644
index 0000000000..864dad2425
--- /dev/null
+++ b/test/scalacheck/scala/collection/parallel/PairValues.scala
@@ -0,0 +1,28 @@
+package scala.collection.parallel.ops
+
+
+
+
+
+import org.scalacheck._
+import org.scalacheck.Gen
+import org.scalacheck.Gen._
+import org.scalacheck.Prop._
+import org.scalacheck.Properties
+import org.scalacheck.Arbitrary._
+
+
+
+
+trait PairValues[K, V] {
+ def kvalues: Seq[Gen[K]]
+ def vvalues: Seq[Gen[V]]
+
+ def values = for {
+ kg <- kvalues
+ vg <- vvalues
+ } yield for {
+ k <- kg
+ v <- vg
+ } yield (k, v)
+}
diff --git a/test/scalacheck/scala/collection/parallel/ParallelHashTrieCheck.scala b/test/scalacheck/scala/collection/parallel/ParallelHashTrieCheck.scala
new file mode 100644
index 0000000000..e1df95e051
--- /dev/null
+++ b/test/scalacheck/scala/collection/parallel/ParallelHashTrieCheck.scala
@@ -0,0 +1,144 @@
+package scala.collection.parallel
+package immutable
+
+
+
+import org.scalacheck._
+import org.scalacheck.Gen
+import org.scalacheck.Gen._
+import org.scalacheck.Prop._
+import org.scalacheck.Properties
+import org.scalacheck.Arbitrary._
+
+import scala.collection._
+import scala.collection.parallel.ops._
+
+
+abstract class ParallelHashMapCheck[K, V](tp: String) extends ParallelMapCheck[K, V]("immutable.ParHashMap[" + tp + "]") {
+ // ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2)
+ // ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2)
+
+ type CollType = ParHashMap[K, V]
+
+ def isCheckingViews = false
+
+ def hasStrictOrder = false
+
+ def tasksupport: TaskSupport
+
+ def ofSize(vals: Seq[Gen[(K, V)]], sz: Int) = {
+ var hm = new immutable.HashMap[K, V]
+ val gen = vals(rnd.nextInt(vals.size))
+ for (i <- 0 until sz) hm += sample(gen)
+ hm
+ }
+
+ def fromTraversable(t: Traversable[(K, V)]) = {
+ var phm = new ParHashMap[K, V]
+ phm.tasksupport = tasksupport
+ var i = 0
+ for (kv <- t.toList) {
+ phm += kv
+ i += 1
+ }
+ phm
+ }
+
+}
+
+
+abstract class IntIntParallelHashMapCheck(val tasksupport: TaskSupport, descriptor: String) extends ParallelHashMapCheck[Int, Int](s"Int, Int ($descriptor)")
+with PairOperators[Int, Int]
+with PairValues[Int, Int]
+{
+ def intvalues = new IntValues {}
+ def kvalues = intvalues.values
+ def vvalues = intvalues.values
+
+ val intoperators = new IntOperators {}
+ def voperators = intoperators
+ def koperators = intoperators
+
+ override def printDataStructureDebugInfo(ds: AnyRef) = ds match {
+ case pm: ParHashMap[k, v] =>
+ pm.printDebugInfo
+ case _ =>
+ println("could not match data structure type: " + ds.getClass)
+ }
+}
+
+
+
+abstract class ParallelHashSetCheck[T](tp: String) extends ParallelSetCheck[T]("immutable.ParHashSet[" + tp + "]") {
+ // ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2)
+ // ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2)
+
+ type CollType = ParHashSet[T]
+
+ def isCheckingViews = false
+
+ def hasStrictOrder = false
+
+ def tasksupport: TaskSupport
+
+ def ofSize(vals: Seq[Gen[T]], sz: Int) = {
+ var hm = new immutable.HashSet[T]
+ val gen = vals(rnd.nextInt(vals.size))
+ for (i <- 0 until sz) hm += sample(gen)
+ hm
+ }
+
+ def fromTraversable(t: Traversable[T]) = {
+ var phs = new ParHashSet[T]
+ phs.tasksupport = tasksupport
+ var i = 0
+ for (kv <- t.toList) {
+ phs += kv
+ i += 1
+ }
+ phs
+ }
+
+ override def printDataStructureDebugInfo(ds: AnyRef) = ds match {
+ case pm: ParHashSet[t] =>
+ println("Parallel hash set")
+ case _ =>
+ println("could not match data structure type: " + ds.getClass)
+ }
+
+}
+
+
+abstract class IntParallelHashSetCheck(val tasksupport: TaskSupport, descriptor: String) extends ParallelHashSetCheck[Int](s"Int ($descriptor)")
+with IntOperators
+with IntValues
+{
+ def intvalues = new IntValues {}
+ def kvalues = intvalues.values
+ def vvalues = intvalues.values
+
+ override def printDataStructureDebugInfo(ds: AnyRef) = ds match {
+ case pm: ParHashMap[k, v] =>
+ pm.printDebugInfo
+ case _ =>
+ println("could not match data structure type: " + ds.getClass)
+ }
+}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/test/scalacheck/scala/collection/parallel/ParallelIterableCheck.scala b/test/scalacheck/scala/collection/parallel/ParallelIterableCheck.scala
new file mode 100644
index 0000000000..7e7ef2ce1b
--- /dev/null
+++ b/test/scalacheck/scala/collection/parallel/ParallelIterableCheck.scala
@@ -0,0 +1,477 @@
+package scala.collection.parallel
+
+
+
+import org.scalacheck._
+import org.scalacheck.Gen
+import org.scalacheck.Gen._
+import org.scalacheck.Prop._
+import org.scalacheck.Properties
+
+import scala.collection._
+import scala.collection.parallel._
+
+
+
+
+abstract class ParallelIterableCheck[T](collName: String) extends Properties(collName) with Operators[T] {
+ type CollType <: ParIterable[T]
+
+ def values: Seq[Gen[T]]
+ def ofSize(vals: Seq[Gen[T]], sz: Int): Iterable[T]
+ def fromTraversable(t: Traversable[T]): CollType
+ def isCheckingViews: Boolean
+ def hasStrictOrder: Boolean
+
+
+ def instances(vals: Seq[Gen[T]]): Gen[Iterable[T]] = oneOf(
+ sized(
+ sz =>
+ ofSize(vals, sz)
+ ),
+ for (sz <- choose(1000, 2000)) yield ofSize(vals, sz),
+ for (sz <- choose(4000, 4001)) yield ofSize(vals, sz),
+ for (sz <- choose(10000, 10001)) yield ofSize(vals, sz)
+ )
+
+ // used to check if constructed collection is valid
+ def checkDataStructureInvariants(orig: Traversable[T], cf: AnyRef) = {
+ // can be overridden in subclasses
+ true
+ }
+
+ def printDataStructureDebugInfo(cf: AnyRef) {
+ // can be overridden in subclasses
+ }
+
+ val rnd = new scala.util.Random
+
+ def sample(gen: Gen[T]): T = {
+ var s = gen.sample
+ while (s == None) s = gen.sample
+ s.get
+ }
+
+ def sampleValue: T = sample(values(rnd.nextInt(values.length)))
+
+ def collectionPairs = for (inst <- instances(values)) yield (inst, fromTraversable(inst))
+
+ def collectionPairsWithLengths = for (inst <- instances(values); s <- choose(0, inst.size))
+ yield (inst, fromTraversable(inst), s)
+
+ def collectionPairsWith2Indices = for (
+ inst <- instances(values);
+ f <- choose(0, inst.size);
+ s <- choose(0, inst.size))
+ yield (inst, fromTraversable(inst), f, s)
+
+ def collectionTriplets = for (inst <- instances(values);
+ updStart <- choose(0, inst.size); howMany <- choose(0, inst.size)) yield {
+ val modif = inst.toSeq.patch(updStart, inst.toSeq, howMany)
+ (inst, fromTraversable(inst), modif)
+ }
+
+ def areEqual(t1: GenTraversable[T], t2: GenTraversable[T]) = if (hasStrictOrder) {
+ t1 == t2 && t2 == t1
+ } else (t1, t2) match { // it is slightly delicate what `equal` means if the order is not strict
+ case (m1: GenMap[_, _], m2: GenMap[_, _]) => m1 == m2 && m2 == m1
+ case (i1: GenIterable[_], i2: GenIterable[_]) =>
+ val i1s = i1.toSet
+ val i2s = i2.toSet
+ i1s == i2s && i2s == i1s
+ case _ => t1 == t2 && t2 == t1
+ }
+
+ def printDebugInfo(coll: ParIterableLike[_, _, _]) {
+ println("Collection debug info: ")
+ coll.printDebugBuffer
+ println("Task debug info: ")
+ println(coll.tasksupport.debugMessages.mkString("\n"))
+ }
+
+ def printComparison(t: Traversable[_], coll: ParIterable[_], tf: Traversable[_], cf: ParIterable[_], ind: Int) {
+ printDebugInfo(coll)
+ println("Operator: " + ind)
+ println("sz: " + t.size)
+ println(t)
+ println
+ println("sz: " + coll.size)
+ println(coll)
+ println("transformed to:")
+ println
+ println("size: " + tf.size)
+ println(tf)
+ println
+ println("size: " + cf.size)
+ println(cf)
+ println
+ println("tf == cf - " + (tf == cf))
+ println("cf == tf - " + (cf == tf))
+ }
+
+ property("reductions must be equal for assoc. operators") = forAllNoShrink(collectionPairs) { case (t, coll) =>
+ if (t.size != 0) {
+ val results = for ((op, ind) <- reduceOperators.zipWithIndex) yield {
+ val tr = t.reduceLeft(op)
+ val cr = coll.reduce(op)
+ if (tr != cr) {
+ println("from: " + t)
+ println("and: " + coll)
+ println("reducing with " + ind)
+ println(tr)
+ println(cr)
+ }
+ ("op index: " + ind) |: tr == cr
+ }
+ results.reduceLeft(_ && _)
+ } else "has size 0" |: true
+ }
+
+ property("counts must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) =>
+ val results = for ((pred, ind) <- countPredicates.zipWithIndex) yield {
+ val tc = t.count(pred)
+ val cc = coll.count(pred)
+ if (tc != cc) {
+ println("from: " + t + " - size: " + t.size)
+ println("and: " + coll + " - size: " + coll.toList.size)
+ println(tc)
+ println(cc)
+ printDebugInfo(coll)
+ }
+ ("op index: " + ind) |: tc == cc
+ }
+ results.reduceLeft(_ && _)
+ }
+
+ property("forall must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) =>
+ val results = for ((pred, ind) <- forallPredicates.zipWithIndex)
+ yield ("op index: " + ind) |: t.forall(pred) == coll.forall(pred)
+ results.reduceLeft(_ && _)
+ }
+
+ property("exists must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) =>
+ val results = for ((pred, ind) <- existsPredicates.zipWithIndex)
+ yield ("op index: " + ind) |: t.exists(pred) == coll.exists(pred)
+ results.reduceLeft(_ && _)
+ }
+
+ property("both must find or not find an element") = forAllNoShrink(collectionPairs) { case (t, coll) =>
+ val results = for ((pred, ind) <- findPredicates.zipWithIndex) yield {
+ val ft = t.find(pred)
+ val fcoll = coll.find(pred)
+ ("op index: " + ind) |: ((ft == None && fcoll == None) || (ft != None && fcoll != None))
+ }
+ results.reduceLeft(_ && _)
+ }
+
+ property("mappings must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) =>
+ val results = for ((f, ind) <- mapFunctions.zipWithIndex) yield {
+ val ms = t.map(f)
+ val mp = coll.map(f)
+ val invs = checkDataStructureInvariants(ms, mp)
+ if (!areEqual(ms, mp) || !invs) {
+ println(t)
+ println(coll)
+ println("mapped to: ")
+ println(ms)
+ println(mp)
+ println("sizes: ")
+ println(ms.size)
+ println(mp.size)
+ println("valid: " + invs)
+ }
+ ("op index: " + ind) |: (areEqual(ms, mp) && invs)
+ }
+ results.reduceLeft(_ && _)
+ }
+
+ property("collects must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) =>
+ val results = for ((f, ind) <- partialMapFunctions.zipWithIndex) yield {
+ val ps = t.collect(f)
+ val pp = coll.collect(f)
+ if (!areEqual(ps, pp)) {
+ println(t)
+ println(coll)
+ println("collected to: ")
+ println(ps)
+ println(pp)
+ }
+ ("op index: " + ind) |: areEqual(ps, pp)
+ }
+ results.reduceLeft(_ && _)
+ }
+
+ property("flatMaps must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) =>
+ (for ((f, ind) <- flatMapFunctions.zipWithIndex)
+ yield ("op index: " + ind) |: areEqual(t.flatMap(f), coll.flatMap(f))).reduceLeft(_ && _)
+ }
+
+ property("filters must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) =>
+ (for ((p, ind) <- filterPredicates.zipWithIndex) yield {
+ val tf = t.filter(p)
+ val cf = coll.filter(p)
+ val invs = checkDataStructureInvariants(tf, cf)
+ if (tf != cf || cf != tf || !invs) {
+ printDebugInfo(coll)
+ println("Operator: " + ind)
+ println("sz: " + t.size)
+ println(t)
+ println
+ println("sz: " + coll.size)
+ println(coll)
+ println
+ println("filtered to:")
+ println
+ println(cf)
+ println
+ println(tf)
+ println
+ println("tf == cf - " + (tf == cf))
+ println("cf == tf - " + (cf == tf))
+ printDataStructureDebugInfo(cf)
+ println("valid: " + invs)
+ }
+ ("op index: " + ind) |: tf == cf && cf == tf && invs
+ }).reduceLeft(_ && _)
+ }
+
+ property("filterNots must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) =>
+ (for ((p, ind) <- filterNotPredicates.zipWithIndex) yield {
+ val tf = t.filterNot(p)
+ val cf = coll.filterNot(p)
+ if (tf != cf || cf != tf) printComparison(t, coll, tf, cf, ind)
+ ("op index: " + ind) |: tf == cf && cf == tf
+ }).reduceLeft(_ && _)
+ }
+
+ if (!isCheckingViews) property("partitions must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) =>
+ (for ((p, ind) <- partitionPredicates.zipWithIndex) yield {
+ val tpart = t.partition(p)
+ val cpart = coll.partition(p)
+ if (tpart != cpart) {
+ println("from: " + t)
+ println("and: " + coll)
+ println(cpart)
+ println(tpart)
+ }
+ ("op index: " + ind) |: tpart == cpart
+ }).reduceLeft(_ && _)
+ }
+
+ if (hasStrictOrder) property("takes must be equal") = forAllNoShrink(collectionPairsWithLengths) { case (t, coll, n) =>
+ ("take " + n + " elements") |: t.take(n) == coll.take(n)
+ }
+
+ if (hasStrictOrder) property("drops must be equal") = forAllNoShrink(collectionPairsWithLengths) { case (t, coll, n) =>
+ ("drop " + n + " elements") |: t.drop(n) == coll.drop(n)
+ }
+
+ if (hasStrictOrder) property("slices must be equal") = forAllNoShrink(collectionPairsWith2Indices)
+ { case (t, coll, fr, slicelength) =>
+ val from = if (fr < 0) 0 else fr
+ val until = if (from + slicelength > t.size) t.size else from + slicelength
+ val tsl = t.slice(from, until)
+ val collsl = coll.slice(from, until)
+ if (tsl != collsl) {
+ println("---------------------- " + from + ", " + until)
+ println("from: " + t)
+ println("and: " + coll)
+ println(tsl)
+ println(collsl)
+ println("as list: " + collsl.toList)
+ println(collsl.iterator.hasNext)
+ println(collsl.iterator.next)
+ println(collsl.iterator.hasNext)
+ println(collsl.iterator.next)
+ println(collsl.iterator.hasNext)
+ println(collsl.iterator.next)
+ println(collsl.iterator.hasNext)
+ }
+ ("slice from " + from + " until " + until) |: tsl == collsl
+ }
+
+ if (hasStrictOrder) property("splits must be equal") = forAllNoShrink(collectionPairsWithLengths) { case (t, coll, n) =>
+ val tspl = t.splitAt(n)
+ val cspl = coll.splitAt(n)
+ if (tspl != cspl) {
+ println("at: " + n)
+ println("from: " + t)
+ println("and: " + coll)
+ println(tspl)
+ println(cspl)
+ }
+ ("splitAt " + n) |: tspl == cspl
+ }
+
+ if (hasStrictOrder) property("takeWhiles must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) =>
+ (for ((pred, ind) <- takeWhilePredicates.zipWithIndex) yield {
+ val tt = t.takeWhile(pred)
+ val ct = coll.takeWhile(pred)
+ if (tt != ct) {
+ println("from: " + t)
+ println("and: " + coll)
+ println("taking while...")
+ println(tt)
+ println(ct)
+ }
+ ("operator " + ind) |: tt == ct
+ }).reduceLeft(_ && _)
+ }
+
+ if (hasStrictOrder) property("spans must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) =>
+ (for ((pred, ind) <- spanPredicates.zipWithIndex) yield {
+ val tsp = t.span(pred)
+ val csp = coll.span(pred)
+ if (tsp != csp) {
+ println("from: " + t)
+ println("and: " + coll)
+ println("span with predicate " + ind)
+ println(tsp)
+ println(csp)
+ println("---------------------------------")
+ println(coll.span(pred))
+ println("---------------------------------")
+ }
+ ("operator " + ind) |: tsp == csp
+ }).reduceLeft(_ && _)
+ }
+
+ if (hasStrictOrder) property("dropWhiles must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) =>
+ (for ((pred, ind) <- dropWhilePredicates.zipWithIndex) yield {
+ ("operator " + ind) |: t.dropWhile(pred) == coll.dropWhile(pred)
+ }).reduceLeft(_ && _)
+ }
+
+ property("folds must be equal for assoc. operators") = forAllNoShrink(collectionPairs) { case (t, coll) =>
+ (for (((first, op), ind) <- foldArguments.zipWithIndex) yield {
+ val tres = t.foldLeft(first)(op)
+ val cres = coll.fold(first)(op)
+ if (cres != tres) {
+ println("from: " + t)
+ println("and: " + coll)
+ println("folds are: ")
+ println(tres)
+ println(cres)
+ }
+ ("operator " + ind) |: tres == cres
+ }).reduceLeft(_ && _)
+ }
+
+ property("++s must be equal") = forAll(collectionTriplets) { case (t, coll, colltoadd) =>
+ try {
+ val toadd = colltoadd
+ val tr = t ++ toadd.iterator
+ val cr = coll ++ toadd.iterator
+ if (!areEqual(tr, cr)) {
+ println("from: " + t)
+ println("and: " + coll.iterator.toList)
+ println("adding: " + toadd)
+ println(tr.toList)
+ println(cr.iterator.toList)
+ }
+ ("adding " |: areEqual(tr, cr)) &&
+ (for ((trav, ind) <- (addAllTraversables).zipWithIndex) yield {
+ val tadded = t ++ trav
+ val cadded = coll ++ collection.parallel.mutable.ParArray(trav.toSeq: _*)
+ if (!areEqual(tadded, cadded)) {
+ println("----------------------")
+ println("from: " + t)
+ println("and: " + coll)
+ println("adding: " + trav)
+ println(tadded)
+ println(cadded)
+ }
+ ("traversable " + ind) |: areEqual(tadded, cadded)
+ }).reduceLeft(_ && _)
+ } catch {
+ case e: java.lang.Exception =>
+ throw e
+ }
+ }
+
+ if (hasStrictOrder) property("copies to array must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) =>
+ val tarr = newArray(t.size)
+ val collarr = newArray(coll.size)
+ t.copyToArray(tarr, 0, t.size)
+ coll.copyToArray(collarr, 0, coll.size)
+ if (tarr.toSeq != collarr.toSeq) {
+ println("from: " + t)
+ println("and: " + coll)
+ println(tarr.toSeq)
+ println(collarr.toSeq)
+ }
+ tarr.toSeq == collarr.toSeq
+ }
+
+ if (hasStrictOrder) property("scans must be equal") = forAllNoShrink(collectionPairs) {
+ case (t, coll) =>
+ (for (((first, op), ind) <- foldArguments.zipWithIndex) yield {
+ val tscan = t.scanLeft(first)(op)
+ val cscan = coll.scan(first)(op)
+ if (tscan != cscan || cscan != tscan) {
+ println("from: " + t)
+ println("and: " + coll)
+ println("scans are: ")
+ println(tscan)
+ println(cscan)
+ }
+ ("operator " + ind) |: tscan == cscan && cscan == tscan
+ }).reduceLeft(_ && _)
+ }
+
+ property("groupBy must be equal") = forAllNoShrink(collectionPairs) {
+ case (t, coll) =>
+ (for ((f, ind) <- groupByFunctions.zipWithIndex) yield {
+ val tgroup = t.groupBy(f)
+ val cgroup = coll.groupBy(f)
+ if (tgroup != cgroup || cgroup != tgroup) {
+ println("from: " + t)
+ println("and: " + coll)
+ println("groups are: ")
+ println(tgroup)
+ println(cgroup)
+ }
+ ("operator " + ind) |: tgroup == cgroup && cgroup == tgroup
+ }).reduceLeft(_ && _)
+ }
+
+}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/test/scalacheck/scala/collection/parallel/ParallelMapCheck1.scala b/test/scalacheck/scala/collection/parallel/ParallelMapCheck1.scala
new file mode 100644
index 0000000000..50aa4ad0c7
--- /dev/null
+++ b/test/scalacheck/scala/collection/parallel/ParallelMapCheck1.scala
@@ -0,0 +1,67 @@
+package scala.collection.parallel
+
+
+
+import org.scalacheck._
+import org.scalacheck.Gen
+import org.scalacheck.Gen._
+import org.scalacheck.Prop._
+import org.scalacheck.Properties
+
+import scala.collection._
+import scala.collection.parallel._
+
+
+
+
+abstract class ParallelMapCheck[K, V](collname: String) extends ParallelIterableCheck[(K, V)](collname) {
+ type CollType <: ParMap[K, V]
+
+ property("gets iterated keys") = forAllNoShrink(collectionPairs) {
+ case (t, coll) =>
+ val containsT = for ((k, v) <- t) yield (coll.get(k) == Some(v))
+ val containsSelf = coll.map { case (k, v) => coll.get(k) == Some(v) }
+ ("Par contains elements of seq map" |: containsT.forall(_ == true)) &&
+ ("Par contains elements of itself" |: containsSelf.forall(_ == true))
+ }
+
+}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/test/scalacheck/scala/collection/parallel/ParallelRangeCheck.scala b/test/scalacheck/scala/collection/parallel/ParallelRangeCheck.scala
new file mode 100644
index 0000000000..5b783fadf2
--- /dev/null
+++ b/test/scalacheck/scala/collection/parallel/ParallelRangeCheck.scala
@@ -0,0 +1,75 @@
+package scala.collection.parallel
+package immutable
+
+
+
+
+import org.scalacheck._
+import org.scalacheck.Gen
+import org.scalacheck.Gen._
+import org.scalacheck.Prop._
+import org.scalacheck.Properties
+import org.scalacheck.Arbitrary._
+
+import scala.collection._
+import scala.collection.parallel.ops._
+
+
+
+
+abstract class ParallelRangeCheck(val tasksupport: TaskSupport, descriptor: String) extends ParallelSeqCheck[Int](s"ParallelRange[Int] ($descriptor)") with ops.IntSeqOperators {
+ // ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2)
+ // ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2)
+
+ type CollType = collection.parallel.ParSeq[Int]
+
+ def hasStrictOrder = true
+
+ def isCheckingViews = false
+
+ def ofSize(vals: Seq[Gen[Int]], sz: Int) = throw new UnsupportedOperationException
+
+ override def instances(vals: Seq[Gen[Int]]): Gen[Seq[Int]] = sized { start =>
+ sized { end =>
+ sized { step =>
+ new Range(start, end, if (step != 0) step else 1)
+ }
+ }
+ }
+
+ def fromSeq(a: Seq[Int]) = a match {
+ case r: Range =>
+ val pr = ParRange(r.start, r.end, r.step, false)
+ pr.tasksupport = tasksupport
+ pr
+ case _ =>
+ val pa = new parallel.mutable.ParArray[Int](a.length)
+ pa.tasksupport = tasksupport
+ for (i <- 0 until a.length) pa(i) = a(i)
+ pa
+ }
+
+ override def traversable2Seq(t: Traversable[Int]): Seq[Int] = t match {
+ case r: Range => r
+ case _ => t.toSeq
+ }
+
+ def values = Seq(choose(-100, 100))
+
+}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/test/scalacheck/scala/collection/parallel/ParallelSeqCheck.scala b/test/scalacheck/scala/collection/parallel/ParallelSeqCheck.scala
new file mode 100644
index 0000000000..48c3d3f745
--- /dev/null
+++ b/test/scalacheck/scala/collection/parallel/ParallelSeqCheck.scala
@@ -0,0 +1,300 @@
+package scala.collection.parallel
+
+
+
+import org.scalacheck._
+import org.scalacheck.Gen
+import org.scalacheck.Gen._
+import org.scalacheck.Prop._
+import org.scalacheck.Properties
+
+import scala.collection._
+import scala.collection.parallel._
+
+
+
+
+
+abstract class ParallelSeqCheck[T](collName: String) extends ParallelIterableCheck[T](collName) with SeqOperators[T] {
+
+ type CollType <: collection.parallel.ParSeq[T]
+
+
+ def ofSize(vals: Seq[Gen[T]], sz: Int): Seq[T]
+ def fromSeq(s: Seq[T]): CollType
+
+ override def instances(vals: Seq[Gen[T]]): Gen[Seq[T]] = oneOf(
+ Gen.const(ofSize(vals, 1)),
+ sized(
+ sz =>
+ ofSize(vals, sz)
+ ),
+ for (sz <- choose(1000, 2000)) yield ofSize(vals, sz)
+ )
+
+
+ def fromTraversable(t: Traversable[T]) = fromSeq(traversable2Seq(t))
+ def traversable2Seq(t: Traversable[T]): Seq[T] = {
+ if (t.isInstanceOf[Iterable[_]]) t.asInstanceOf[Iterable[T]].iterator.toList else t.toList
+ }
+
+ override def collectionPairs: Gen[(Seq[T], CollType)] = for (inst <- instances(values)) yield (inst, fromSeq(inst))
+
+ override def collectionPairsWithLengths: Gen[(Seq[T], CollType, Int)] =
+ for (inst <- instances(values); s <- choose(0, inst.size)) yield (inst, fromSeq(inst), s);
+
+ def collectionPairsWithModifiedWithLengths: Gen[(Seq[T], CollType, ParSeq[T], Int)] =
+ for (inst <- instances(values); s <- choose(0, inst.size);
+ updateStart <- choose(0, inst.size); howMany <- choose(0, inst.size)) yield {
+ val parcoll = fromSeq(inst)
+ val parcollmodif = fromSeq(modifySlightly(inst, updateStart, howMany))
+ (inst, parcoll, parcollmodif, s)
+ }
+
+ def collectionPairsWithModified: Gen[(Seq[T], CollType, ParSeq[T])] =
+ for (inst <- instances(values); updateStart <- choose(0, inst.size); howMany <- choose(0, inst.size)) yield {
+ val parcoll = fromSeq(inst)
+ val parcollmodif = fromSeq(modifySlightly(inst, updateStart, howMany))
+ (inst, parcoll, parcollmodif)
+ }
+
+ def collectionPairsWithSliced: Gen[(Seq[T], CollType, ParSeq[T])] =
+ for (inst <- instances(values); sliceStart <- choose(0, inst.size); howMany <- choose(0, inst.size)) yield {
+ val parcoll = fromSeq(inst)
+ val parcollsliced = fromSeq(inst.slice(sliceStart, sliceStart + howMany))
+ (inst, parcoll, parcollsliced)
+ }
+
+ def collectionTripletsWith2Indices: Gen[(Seq[T], CollType, Seq[T], Int, Int)] =
+ for (inst <- instances(values); f <- choose(0, inst.size); s <- choose(0, inst.size - f);
+ third <- instances(values); sliceStart <- choose(0, inst.size); howMany <- choose(0, inst.size)) yield {
+ (inst, fromSeq(inst), inst.slice(sliceStart, sliceStart + howMany), f, s)
+ }
+
+ private def modifySlightly(coll: Seq[T], updateStart: Int, howMany: Int) = {
+ coll.patch(updateStart, coll, howMany)
+ }
+
+ property("segmentLengths must be equal") = forAllNoShrink(collectionPairsWithLengths) { case (s, coll, len) =>
+ (for ((pred, ind) <- segmentLengthPredicates.zipWithIndex) yield {
+ val slen = s.segmentLength(pred, if (len < 0) 0 else len)
+ val clen = coll.segmentLength(pred, len)
+ if (slen != clen) {
+ println("from: " + s)
+ println("and: " + coll)
+ println(slen)
+ println(clen)
+ }
+ ("operator " + ind) |: slen == clen
+ }).reduceLeft(_ && _)
+ }
+
+ property("prefixLengths must be equal") = forAllNoShrink(collectionPairs) { case (s, coll) =>
+ (for ((pred, ind) <- segmentLengthPredicates.zipWithIndex) yield {
+ ("operator " + ind) |: s.prefixLength(pred) == coll.prefixLength(pred)
+ }).reduceLeft(_ && _)
+ }
+
+ property("indexWheres must be equal") = forAllNoShrink(collectionPairsWithLengths) { case (s, coll, len) =>
+ (for ((pred, ind) <- indexWherePredicates.zipWithIndex) yield {
+ val sind = s.indexWhere(pred, len)
+ val cind = coll.indexWhere(pred, len)
+ if (sind != cind) {
+ println("from: " + s)
+ println("and: " + coll)
+ println("at: " + len)
+ println(sind)
+ println(cind)
+ }
+ ("operator " + ind) |: sind == cind
+ }).reduceLeft(_ && _)
+ }
+
+ property("lastIndexWheres must be equal") = forAllNoShrink(collectionPairsWithLengths) { case (s, coll, len) =>
+ (for ((pred, ind) <- lastIndexWherePredicates.zipWithIndex) yield {
+ val end = if (len >= s.size) s.size - 1 else len
+ val sind = s.lastIndexWhere(pred, end)
+ val cind = coll.lastIndexWhere(pred, end)
+ ("operator " + ind) |: sind == cind
+ }).reduceLeft(_ && _)
+ }
+
+ property("reverses must be equal") = forAllNoShrink(collectionPairs) { case (s, coll) =>
+ (s.length == 0 && s.getClass == classOf[collection.immutable.Range]) ||
+ {
+ val sr = s.reverse
+ val cr = coll.reverse
+ if (sr != cr) {
+ println("from: " + s)
+ println("and: " + coll)
+ println(sr)
+ println(cr)
+ }
+ sr == cr
+ }
+ }
+
+ property("reverseMaps must be equal") = forAllNoShrink(collectionPairs) { case (s, coll) =>
+ (for ((f, ind) <- reverseMapFunctions.zipWithIndex) yield {
+ ("operator " + ind) |: s.reverseMap(f) == coll.reverseMap(f)
+ }).reduceLeft(_ && _)
+ }
+
+ property("sameElements must be equal") = forAllNoShrink(collectionPairsWithModifiedWithLengths) {
+ case (s, coll, collmodif, len) =>
+ val pos = if (len < 0) 0 else len
+ val scm = s.sameElements(collmodif)
+ val ccm = coll.sameElements(collmodif)
+ if (scm != ccm) {
+ println("Comparing: " + s)
+ println("and: " + coll)
+ println("with: " + collmodif)
+ println(scm)
+ println(ccm)
+ }
+ ("Nil" |: s.sameElements(Nil) == coll.sameElements(Nil)) &&
+ ("toList" |: s.sameElements(s.toList) == coll.sameElements(coll.toList)) &&
+ ("identity" |: s.sameElements(s.map(e => e)) == coll.sameElements(coll.map(e => e))) &&
+ ("vice-versa" |: s.sameElements(coll) == coll.sameElements(s)) &&
+ ("equal" |: s.sameElements(coll)) &&
+ ("modified" |: scm == ccm) &&
+ (for ((it, ind) <- sameElementsSeqs.zipWithIndex) yield {
+ val sres = s.sameElements(it)
+ val pres = coll.sameElements(it)
+ if (sres != pres) {
+ println("Comparing: " + s)
+ println("and: " + coll)
+ println("with: " + it)
+ println(sres)
+ println(pres)
+ }
+ ("collection " + ind) |: sres == pres
+ }).reduceLeft(_ && _)
+ }
+
+ property("startsWiths must be equal") = forAllNoShrink(collectionPairsWithModifiedWithLengths) {
+ case (s, coll, collmodif, len) =>
+ val pos = if (len < 0) 0 else len
+ ("start with self" |: s.startsWith(s) == coll.startsWith(coll)) &&
+ ("tails correspond" |: (s.length == 0 || s.startsWith(s.tail, 1) == coll.startsWith(coll.tail, 1))) &&
+ ("with each other" |: coll.startsWith(s)) &&
+ ("modified" |: s.startsWith(collmodif) == coll.startsWith(collmodif)) &&
+ ("modified2" |: s.startsWith(collmodif, pos) == coll.startsWith(collmodif, pos)) &&
+ (for (sq <- startEndSeqs) yield {
+ val ss = s.startsWith(sq, pos)
+ val cs = coll.startsWith(fromSeq(sq), pos)
+ if (ss != cs) {
+ println("from: " + s)
+ println("and: " + coll)
+ println("test seq: " + sq)
+ println("from pos: " + pos)
+ println(ss)
+ println(cs)
+ println(coll.iterator.psplit(pos, coll.length - pos)(1).toList)
+ }
+ ("seq " + sq) |: ss == cs
+ }).reduceLeft(_ && _)
+ }
+
+ property("endsWiths must be equal") = forAllNoShrink(collectionPairsWithModified) {
+ case (s, coll, collmodif) =>
+ ("ends with self" |: s.endsWith(s) == coll.endsWith(s)) &&
+ ("ends with tail" |: (s.length == 0 || s.endsWith(s.tail) == coll.endsWith(coll.tail))) &&
+ ("with each other" |: coll.endsWith(s)) &&
+ ("modified" |: s.startsWith(collmodif) == coll.endsWith(collmodif)) &&
+ (for (sq <- startEndSeqs) yield {
+ val sew = s.endsWith(sq)
+ val cew = coll.endsWith(fromSeq(sq))
+ if (sew != cew) {
+ println("from: " + s)
+ println("and: " + coll)
+ println(sew)
+ println(cew)
+ }
+ ("seq " + sq) |: sew == cew
+ }).reduceLeft(_ && _)
+ }
+
+ property("unions must be equal") = forAllNoShrink(collectionPairsWithModified) { case (s, coll, collmodif) =>
+ ("modified" |: s.union(collmodif.seq) == coll.union(collmodif)) &&
+ ("empty" |: s.union(Nil) == coll.union(fromSeq(Nil)))
+ }
+
+ // This is failing with my views patch: array index out of bounds in the array iterator.
+ // Couldn't see why this and only this was impacted, could use a second pair of eyes.
+ //
+ // This was failing because some corner cases weren't added to the patch method in ParSeqLike.
+ // Curiously, this wasn't detected before.
+ //
+ if (!isCheckingViews) property("patches must be equal") = forAll(collectionTripletsWith2Indices) {
+ case (s, coll, pat, from, repl) =>
+ ("with seq" |: s.patch(from, pat, repl) == coll.patch(from, pat, repl)) &&
+ ("with par" |: s.patch(from, pat, repl) == coll.patch(from, fromSeq(pat), repl)) &&
+ ("with empty" |: s.patch(from, Nil, repl) == coll.patch(from, fromSeq(Nil), repl)) &&
+ ("with one" |: (s.length == 0 || s.patch(from, List(s(0)), 1) == coll.patch(from, fromSeq(List(coll(0))), 1)))
+ }
+
+ if (!isCheckingViews) property("updates must be equal") = forAllNoShrink(collectionPairsWithLengths) { case (s, coll, len) =>
+ val pos = if (len >= s.length) s.length - 1 else len
+ if (s.length > 0) {
+ val supd = s.updated(pos, s(0))
+ val cupd = coll.updated(pos, coll(0))
+ if (supd != cupd) {
+ println("from: " + s)
+ println("and: " + coll)
+ println(supd)
+ println(cupd)
+ }
+ "from first" |: (supd == cupd)
+ } else "trivially" |: true
+ }
+
+ property("prepends must be equal") = forAllNoShrink(collectionPairs) { case (s, coll) =>
+ s.length == 0 || s(0) +: s == coll(0) +: coll
+ }
+
+ property("appends must be equal") = forAllNoShrink(collectionPairs) { case (s, coll) =>
+ s.length == 0 || s :+ s(0) == coll :+ coll(0)
+ }
+
+ property("padTos must be equal") = forAllNoShrink(collectionPairsWithLengths) { case (s, coll, len) =>
+ val someValue = sampleValue
+ val sdoub = s.padTo(len * 2, someValue)
+ val cdoub = coll.padTo(len * 2, someValue)
+ if (sdoub != cdoub) {
+ println("from: " + s)
+ println("and: " + coll)
+ println(sdoub)
+ println(cdoub)
+ }
+ ("smaller" |: s.padTo(len / 2, someValue) == coll.padTo(len / 2, someValue)) &&
+ ("bigger" |: sdoub == cdoub)
+ }
+
+ property("corresponds must be equal") = forAllNoShrink(collectionPairsWithModified) { case (s, coll, modified) =>
+ val modifcut = modified.toSeq.slice(0, modified.length)
+ ("self" |: s.corresponds(s)(_ == _) == coll.corresponds(coll)(_ == _)) &&
+ ("modified" |: s.corresponds(modified.seq)(_ == _) == coll.corresponds(modified)(_ == _)) &&
+ ("modified2" |: s.corresponds(modifcut)(_ == _) == coll.corresponds(modifcut)(_ == _))
+ }
+
+}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/test/scalacheck/scala/collection/parallel/ParallelSetCheck.scala b/test/scalacheck/scala/collection/parallel/ParallelSetCheck.scala
new file mode 100644
index 0000000000..c22dddf96d
--- /dev/null
+++ b/test/scalacheck/scala/collection/parallel/ParallelSetCheck.scala
@@ -0,0 +1,62 @@
+package scala.collection.parallel
+
+
+
+import org.scalacheck._
+import org.scalacheck.Gen
+import org.scalacheck.Gen._
+import org.scalacheck.Prop._
+import org.scalacheck.Properties
+
+import scala.collection._
+import scala.collection.parallel._
+
+
+
+
+abstract class ParallelSetCheck[T](collname: String) extends ParallelIterableCheck[T](collname) {
+ type CollType <: ParSet[T]
+
+ property("gets iterated keys") = forAllNoShrink(collectionPairs) {
+ case (t, coll) =>
+ val containsT = for (elem <- t) yield (coll.contains(elem))
+ val containsSelf = for (elem <- coll) yield (coll.contains(elem))
+ ("Par contains elements of seq map" |: containsT.forall(_ == true)) &&
+ ("Par contains elements of itself" |: containsSelf.forall(_ == true))
+ }
+
+}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/test/scalacheck/scala/collection/parallel/immutable/ParallelVectorCheck.scala b/test/scalacheck/scala/collection/parallel/immutable/ParallelVectorCheck.scala
new file mode 100644
index 0000000000..1afcf2ce4c
--- /dev/null
+++ b/test/scalacheck/scala/collection/parallel/immutable/ParallelVectorCheck.scala
@@ -0,0 +1,67 @@
+package scala.collection
+package parallel.immutable
+
+
+
+import org.scalacheck._
+import org.scalacheck.Gen
+import org.scalacheck.Gen._
+import org.scalacheck.Prop._
+import org.scalacheck.Properties
+import org.scalacheck.Arbitrary._
+
+import scala.collection._
+import scala.collection.parallel.ops._
+
+
+import immutable.Vector
+import immutable.VectorBuilder
+
+import scala.collection.parallel.TaskSupport
+
+
+
+
+abstract class ParallelVectorCheck[T](tp: String) extends collection.parallel.ParallelSeqCheck[T]("ParVector[" + tp + "]") {
+ // ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2)
+ // ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2)
+
+ type CollType = ParVector[T]
+
+ def isCheckingViews = false
+
+ def hasStrictOrder = true
+
+ def tasksupport: TaskSupport
+
+ def ofSize(vals: Seq[Gen[T]], sz: Int) = {
+ val vb = new immutable.VectorBuilder[T]()
+ val gen = vals(rnd.nextInt(vals.size))
+ for (i <- 0 until sz) vb += sample(gen)
+ vb.result
+ }
+
+ def fromSeq(a: Seq[T]) = {
+ val pc = ParVector.newCombiner[T]
+ for (elem <- a.toList) pc += elem
+ val pv = pc.result
+ pv.tasksupport = tasksupport
+ pv
+ }
+
+}
+
+
+
+abstract class IntParallelVectorCheck(val tasksupport: TaskSupport, descriptor: String) extends ParallelVectorCheck[Int](s"Int ($descriptor)") with IntSeqOperators with IntValues {
+ override def instances(vals: Seq[Gen[Int]]) = oneOf(super.instances(vals), sized { sz =>
+ (0 until sz).toArray.toSeq
+ }, sized { sz =>
+ (-sz until 0).toArray.toSeq
+ })
+}
+
+
+
+
+
diff --git a/test/scalacheck/scala/collection/parallel/mutable/ParallelArrayCheck.scala b/test/scalacheck/scala/collection/parallel/mutable/ParallelArrayCheck.scala
new file mode 100644
index 0000000000..39370f8c38
--- /dev/null
+++ b/test/scalacheck/scala/collection/parallel/mutable/ParallelArrayCheck.scala
@@ -0,0 +1,62 @@
+package scala.collection.parallel
+package mutable
+
+
+
+import org.scalacheck._
+import org.scalacheck.Gen
+import org.scalacheck.Gen._
+import org.scalacheck.Prop._
+import org.scalacheck.Properties
+import org.scalacheck.Arbitrary._
+
+import scala.collection._
+import scala.collection.parallel.ops._
+
+
+abstract class ParallelArrayCheck[T](tp: String) extends ParallelSeqCheck[T]("ParArray[" + tp + "]") {
+ // ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2)
+ // ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2)
+
+ type CollType = ParArray[T]
+
+ def isCheckingViews = false
+
+ def hasStrictOrder = true
+
+ def tasksupport: TaskSupport
+
+ def ofSize(vals: Seq[Gen[T]], sz: Int) = {
+ val a = new mutable.ArrayBuffer[T](sz)
+ val gen = vals(rnd.nextInt(vals.size))
+ for (i <- 0 until sz) a += sample(gen)
+ a
+ }
+
+ def fromSeq(a: Seq[T]) = {
+ val pa = new ParArray[T](a.size)
+ pa.tasksupport = tasksupport
+ var i = 0
+ for (elem <- a.toList) {
+ pa(i) = elem
+ i += 1
+ }
+ pa
+ }
+
+ property("array mappings must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) =>
+ val results = for ((f, ind) <- mapFunctions.zipWithIndex)
+ yield ("op index: " + ind) |: t.map(f) == coll.map(f)
+ results.reduceLeft(_ && _)
+ }
+
+}
+
+
+abstract class IntParallelArrayCheck(val tasksupport: TaskSupport, descriptor: String) extends ParallelArrayCheck[Int](s"Int ($descriptor)") with IntSeqOperators with IntValues {
+ override def instances(vals: Seq[Gen[Int]]) = oneOf(super.instances(vals), sized { sz =>
+ (0 until sz).toArray.toSeq
+ }, sized { sz =>
+ (-sz until 0).toArray.toSeq
+ })
+}
diff --git a/test/scalacheck/scala/collection/parallel/mutable/ParallelArrayTest.scala b/test/scalacheck/scala/collection/parallel/mutable/ParallelArrayTest.scala
new file mode 100644
index 0000000000..db2b1ea01e
--- /dev/null
+++ b/test/scalacheck/scala/collection/parallel/mutable/ParallelArrayTest.scala
@@ -0,0 +1,112 @@
+// package test.scala.collection.parallel.mutable
+
+// import org.scalatest.FunSuite
+// import collection.parallel.mutable.ParallelArray
+
+// /**
+// * Notes:
+// */
+// class ParallelArrayTest extends FunSuite {
+
+// test("create new parallel array with a bad initial capacity"){
+// intercept[IllegalArgumentException]{
+// new ParallelArray(-5)
+// }
+
+// /**
+// * this currently passes, but do we want it to?
+// * does it have meaning to have an empty parallel array?
+// */
+// new ParallelArray(0)
+// ()
+// }
+
+// test("compare identical ParallelArrays"){
+// assert(new ParallelArray(5) === new ParallelArray(5))
+// assert(ParallelArray(1,2,3,4,5) === ParallelArray(1,2,3,4,5))
+// }
+
+// /**
+// * this test needs attention. how is equality defined on ParallelArrays?
+// * Well, the same way it is for normal collections, I guess. For normal arrays its reference equality.
+// * I do not think it should be that way in the case of ParallelArray-s. I'll check this with Martin.
+// */
+// test("compare non-identical ParallelArrays"){
+// assert(ParallelArray(1,2,3,4,5) != ParallelArray(1,2,3,4),
+// "compared PA's that I expect to not be identical, but they were!")
+// }
+
+// test("creation via PA object [String]"){
+// val paFromApply: ParallelArray[String] = ParallelArray("x", "1", "true", "etrijwejiorwer")
+// val paFromHandoff: ParallelArray[String] = ParallelArray.handoff(Array("x", "1", "true", "etrijwejiorwer"))
+// val paFromCopy: ParallelArray[String] = ParallelArray.createFromCopy(Array("x", "1", "true", "etrijwejiorwer"))
+// assert( paFromApply === paFromCopy )
+// assert( paFromApply === paFromCopy )
+// }
+
+// // // handoffs dont work for primitive types...
+// // test("creation via PA object [Boolean]"){
+// // val paFromApply: ParallelArray[Boolean] = ParallelArray(true, false, true, false)
+// // val paFromCopy: ParallelArray[Boolean] = ParallelArray.createFromCopy(Array(true, false, true, false))
+// // assert( paFromApply === paFromCopy )
+// // }
+// //
+// // // handoffs dont work for primitive types...
+// // test("creation via PA object [Int]"){
+// // val paFromApply: ParallelArray[Int] = ParallelArray(1, 2, 4, 3)
+// // val paFromCopy: ParallelArray[Int] = ParallelArray.createFromCopy(Array(1, 2, 4, 3))
+// // assert( paFromApply === paFromCopy )
+// // }
+
+// /**
+// * This fails because handoff is really doing a copy.
+// * TODO: look at handoff
+// */
+// test("Handoff Is Really A Handoff"){
+// val arrayToHandOff = Array("a", "x", "y", "z")
+// val paFromHandoff: ParallelArray[String] = ParallelArray.handoff(arrayToHandOff)
+// arrayToHandOff(0) = "w"
+// assert(paFromHandoff(0) === "w")
+// }
+
+// test("simple reduce"){
+// assert( ParallelArray(1,2,3,4,5).reduce(_+_) === 15 )
+// }
+
+// test("simple count"){
+// assert( ParallelArray[Int]().count(_ > 7) === 0 )
+// assert( ParallelArray(1,2,3).count(_ > 7) === 0 )
+// assert( ParallelArray(1,2,3).count(_ <= 3) === 3 )
+// assert( ParallelArray(1,2,3,4,5,6,7,8,9,10).count(_ > 7 ) === 3 )
+// }
+
+// test("simple forall"){
+// assert( ParallelArray[Int]().forall(_ > 7) === true )
+// assert( ParallelArray(1,2,3).forall(_ > 3) === false )
+// assert( ParallelArray(1,2,3).forall(_ <= 3) === true )
+// assert( ParallelArray(1,2,3,4,5,6,7,8,9,10).forall(_ > 0) === true )
+// assert( ParallelArray(1,2,3,4,5,6,7,8,9,10).forall(_ < 5) === false )
+// }
+
+// /**
+// */
+// test("simple foreach"){
+// val buf = new java.util.concurrent.ArrayBlockingQueue[Int](10000)
+// ParallelArray((1 to 10000):_*).foreach(buf add _)
+// (1 to 10000).foreach(i => assert( buf contains i, "buf should have contained:" + i ))
+// }
+
+// test("simple exists"){
+// assert( ParallelArray[Int]().exists(_ => true) === false )
+// assert( ParallelArray(1,2,3).forall(_ > 3) === false )
+// assert( ParallelArray(1,2,3,4,5,6,7,8,9,10).exists(_ > 7) === true )
+// }
+
+// test("simple filter"){
+// assert(ParallelArray(1,2,3,4,5).filter( _ < 4 ) === ParallelArray(1,2,3))
+// }
+
+// test("simple map test"){
+// assert(ParallelArray(1,2,3,4,5).map( (_:Int) * 10 ) === ParallelArray(10,20,30,40,50))
+// }
+// }
diff --git a/test/scalacheck/scala/collection/parallel/mutable/ParallelArrayViewCheck.scala b/test/scalacheck/scala/collection/parallel/mutable/ParallelArrayViewCheck.scala
new file mode 100644
index 0000000000..fb09a5bbb7
--- /dev/null
+++ b/test/scalacheck/scala/collection/parallel/mutable/ParallelArrayViewCheck.scala
@@ -0,0 +1,122 @@
+// package scala.collection.parallel
+// package mutable
+
+
+
+
+
+
+// import org.scalacheck._
+// import org.scalacheck.Gen
+// import org.scalacheck.Gen._
+// import org.scalacheck.Prop._
+// import org.scalacheck.Properties
+// import org.scalacheck.Arbitrary._
+
+// import scala.collection.TraversableView
+// import scala.collection.mutable.ArrayBuffer
+// import scala.collection.parallel.ops._
+// import scala.collection.mutable.ArraySeq
+
+
+
+// abstract class ParallelArrayViewCheck[T](tp: String)
+// extends ParallelSeqCheck[T]("ParallelSeqView[" + tp + ", ParallelArray[" + tp + "]]") {
+// // ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2)
+// // ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2)
+
+// type CollType = ParallelSeqView[T, ParallelArray[T], ArraySeq[T]]
+
+// def isCheckingViews = true
+
+// def instances(vals: Seq[Gen[T]]): Gen[Seq[T]] = sized { sz =>
+// val a = new ArrayBuffer[T](sz)
+// val gen = vals(rnd.nextInt(vals.size))
+// for (i <- 0 until sz) a += sample(gen)
+// a
+// }
+
+// def fromSeq(a: Seq[T]) = {
+// val pa = new ParallelArray[T](a.size)
+// var i = 0
+// for (elem <- a) {
+// pa(i) = elem
+// i += 1
+// }
+// pa.view
+// }
+
+// property("forces must be equal") = forAllNoShrink(collectionPairs) { case (s, coll) =>
+// val smodif = (s ++ s).reverse.take(s.length).reverse.zip(s).drop(s.length / 2)
+// val cmodif = (coll ++ s).reverse.take(s.length).reverse.zip(s).drop(s.length / 2).force
+// smodif == cmodif
+// }
+
+// }
+
+
+// object IntParallelArrayViewCheck extends ParallelArrayViewCheck[Int]("Int") with IntSeqOperators with IntValues {
+// override def instances(vals: Seq[Gen[Int]]) = oneOf(super.instances(vals), sized { sz =>
+// (0 until sz).toArray.toSeq
+// }, sized { sz =>
+// (-sz until 0).toArray.toSeq
+// })
+// }
+
+
+// abstract class ParallelArrayViewComposedCheck[T](tp: String)
+// extends ParallelSeqCheck[T]("ParallelSeqView[" + tp + "], ParallelArray[" + tp + "].++.patch.reverse.take.reverse") {
+// ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2)
+// ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2)
+
+// type CollType = collection.parallel.ParallelSeq[T]
+
+// def isCheckingViews = true
+
+// def instances(vals: Seq[Gen[T]]): Gen[Seq[T]] = sized { sz =>
+// val a = new ArrayBuffer[T](sz)
+// val gen = vals(rnd.nextInt(vals.size))
+// for (i <- 0 until sz) a += sample(gen)
+// a
+// }
+
+// def fromSeq(a: Seq[T]) = {
+// val pa = new ParallelArray[T](a.size)
+// var i = 0
+// for (elem <- a) {
+// pa(i) = elem
+// i += 1
+// }
+// val modified = (pa.view ++ a).patch(0, a, a.length).reverse
+// val original = modified.take(modified.length / 2).reverse
+// original
+// }
+
+// }
+
+
+// object IntParallelArrayViewComposedCheck extends ParallelArrayViewComposedCheck[Int]("Int") with IntSeqOperators with IntValues {
+// override def instances(vals: Seq[Gen[Int]]) = oneOf(super.instances(vals), sized { sz =>
+// (0 until sz).toArray.toSeq
+// }, sized { sz =>
+// (-sz until 0).toArray.toSeq
+// })
+// }
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/test/scalacheck/scala/collection/parallel/mutable/ParallelCtrieCheck.scala b/test/scalacheck/scala/collection/parallel/mutable/ParallelCtrieCheck.scala
new file mode 100644
index 0000000000..ebdcf78bea
--- /dev/null
+++ b/test/scalacheck/scala/collection/parallel/mutable/ParallelCtrieCheck.scala
@@ -0,0 +1,101 @@
+package scala.collection.parallel
+package mutable
+
+
+
+import org.scalacheck._
+import org.scalacheck.Gen
+import org.scalacheck.Gen._
+import org.scalacheck.Prop._
+import org.scalacheck.Properties
+import org.scalacheck.Arbitrary._
+
+import scala.collection._
+import scala.collection.parallel.ops._
+
+
+
+abstract class ParallelConcurrentTrieMapCheck[K, V](tp: String) extends ParallelMapCheck[K, V]("mutable.ParConcurrentTrieMap[" + tp + "]") {
+ // ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2)
+ // ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2)
+
+ type CollType = ParTrieMap[K, V]
+
+ def isCheckingViews = false
+
+ def hasStrictOrder = false
+
+ def tasksupport: TaskSupport
+
+ def ofSize(vals: Seq[Gen[(K, V)]], sz: Int) = {
+ val ct = new concurrent.TrieMap[K, V]
+ val gen = vals(rnd.nextInt(vals.size))
+ for (i <- 0 until sz) ct += sample(gen)
+ ct
+ }
+
+ def fromTraversable(t: Traversable[(K, V)]) = {
+ val pct = new ParTrieMap[K, V]
+ pct.tasksupport = tasksupport
+ var i = 0
+ for (kv <- t.toList) {
+ pct += kv
+ i += 1
+ }
+ pct
+ }
+
+}
+
+
+abstract class IntIntParallelConcurrentTrieMapCheck(val tasksupport: TaskSupport, descriptor: String) extends ParallelConcurrentTrieMapCheck[Int, Int](s"Int, Int ($descriptor)")
+with PairOperators[Int, Int]
+with PairValues[Int, Int]
+{
+ def intvalues = new IntValues {}
+ def kvalues = intvalues.values
+ def vvalues = intvalues.values
+
+ val intoperators = new IntOperators {}
+ def voperators = intoperators
+ def koperators = intoperators
+
+ override def printDataStructureDebugInfo(ds: AnyRef) = ds match {
+ case pm: ParTrieMap[k, v] =>
+ println("Mutable parallel ctrie")
+ case _ =>
+ println("could not match data structure type: " + ds.getClass)
+ }
+
+ override def checkDataStructureInvariants(orig: Traversable[(Int, Int)], ds: AnyRef) = ds match {
+ // case pm: ParHashMap[k, v] if 1 == 0 => // disabled this to make tests faster
+ // val invs = pm.brokenInvariants
+
+ // val containsall = (for ((k, v) <- orig) yield {
+ // if (pm.asInstanceOf[ParHashMap[Int, Int]].get(k) == Some(v)) true
+ // else {
+ // println("Does not contain original element: " + (k, v))
+ // false
+ // }
+ // }).foldLeft(true)(_ && _)
+
+
+ // if (invs.isEmpty) containsall
+ // else {
+ // println("Invariants broken:\n" + invs.mkString("\n"))
+ // false
+ // }
+ case _ => true
+ }
+
+}
+
+
+
+
+
+
+
+
+
+
diff --git a/test/scalacheck/scala/collection/parallel/mutable/ParallelHashMapCheck.scala b/test/scalacheck/scala/collection/parallel/mutable/ParallelHashMapCheck.scala
new file mode 100644
index 0000000000..06fdb66080
--- /dev/null
+++ b/test/scalacheck/scala/collection/parallel/mutable/ParallelHashMapCheck.scala
@@ -0,0 +1,100 @@
+package scala.collection.parallel
+package mutable
+
+
+
+import org.scalacheck._
+import org.scalacheck.Gen
+import org.scalacheck.Gen._
+import org.scalacheck.Prop._
+import org.scalacheck.Properties
+import org.scalacheck.Arbitrary._
+
+import scala.collection._
+import scala.collection.parallel.ops._
+
+
+abstract class ParallelHashMapCheck[K, V](tp: String) extends ParallelMapCheck[K, V]("mutable.ParHashMap[" + tp + "]") {
+ // ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2)
+ // ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2)
+
+ type CollType = ParHashMap[K, V]
+
+ def isCheckingViews = false
+
+ def hasStrictOrder = false
+
+ def tasksupport: TaskSupport
+
+ def ofSize(vals: Seq[Gen[(K, V)]], sz: Int) = {
+ val hm = new mutable.HashMap[K, V]
+ val gen = vals(rnd.nextInt(vals.size))
+ for (i <- 0 until sz) hm += sample(gen)
+ hm
+ }
+
+ def fromTraversable(t: Traversable[(K, V)]) = {
+ val phm = new ParHashMap[K, V]
+ phm.tasksupport = tasksupport
+ var i = 0
+ for (kv <- t.toList) {
+ phm += kv
+ i += 1
+ }
+ phm
+ }
+
+}
+
+
+abstract class IntIntParallelHashMapCheck(val tasksupport: TaskSupport, descriptor: String) extends ParallelHashMapCheck[Int, Int](s"Int, Int ($descriptor)")
+with PairOperators[Int, Int]
+with PairValues[Int, Int]
+{
+ def intvalues = new IntValues {}
+ def kvalues = intvalues.values
+ def vvalues = intvalues.values
+
+ val intoperators = new IntOperators {}
+ def voperators = intoperators
+ def koperators = intoperators
+
+ override def printDataStructureDebugInfo(ds: AnyRef) = ds match {
+ case pm: ParHashMap[k, v] =>
+ println("Mutable parallel hash map\n" + pm.hashTableContents.debugInformation)
+ case _ =>
+ println("could not match data structure type: " + ds.getClass)
+ }
+
+ override def checkDataStructureInvariants(orig: Traversable[(Int, Int)], ds: AnyRef) = ds match {
+ // case pm: ParHashMap[k, v] if 1 == 0 => // disabled this to make tests faster
+ // val invs = pm.brokenInvariants
+
+ // val containsall = (for ((k, v) <- orig) yield {
+ // if (pm.asInstanceOf[ParHashMap[Int, Int]].get(k) == Some(v)) true
+ // else {
+ // println("Does not contain original element: " + (k, v))
+ // false
+ // }
+ // }).foldLeft(true)(_ && _)
+
+
+ // if (invs.isEmpty) containsall
+ // else {
+ // println("Invariants broken:\n" + invs.mkString("\n"))
+ // false
+ // }
+ case _ => true
+ }
+
+}
+
+
+
+
+
+
+
+
+
+
diff --git a/test/scalacheck/scala/collection/parallel/mutable/ParallelHashSetCheck.scala b/test/scalacheck/scala/collection/parallel/mutable/ParallelHashSetCheck.scala
new file mode 100644
index 0000000000..a968ed053f
--- /dev/null
+++ b/test/scalacheck/scala/collection/parallel/mutable/ParallelHashSetCheck.scala
@@ -0,0 +1,97 @@
+package scala.collection.parallel
+package mutable
+
+
+
+import org.scalacheck._
+import org.scalacheck.Gen
+import org.scalacheck.Gen._
+import org.scalacheck.Prop._
+import org.scalacheck.Properties
+import org.scalacheck.Arbitrary._
+
+import scala.collection._
+import scala.collection.parallel.ops._
+
+
+abstract class ParallelHashSetCheck[T](tp: String) extends ParallelSetCheck[T]("mutable.ParHashSet[" + tp + "]") {
+ // ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2)
+ // ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2)
+
+ type CollType = ParHashSet[T]
+
+ def isCheckingViews = false
+
+ def hasStrictOrder = false
+
+ def tasksupport: TaskSupport
+
+ def ofSize(vals: Seq[Gen[T]], sz: Int) = {
+ val hm = new mutable.HashSet[T]
+ val gen = vals(rnd.nextInt(vals.size))
+ for (i <- 0 until sz) hm += sample(gen)
+ hm
+ }
+
+ def fromTraversable(t: Traversable[T]) = {
+ val phs = new ParHashSet[T]
+ phs.tasksupport = tasksupport
+ var i = 0
+ for (kv <- t.toList) {
+ phs += kv
+ i += 1
+ }
+ phs
+ }
+
+}
+
+
+abstract class IntParallelHashSetCheck(val tasksupport: TaskSupport, descriptor: String) extends ParallelHashSetCheck[Int](s"Int ($descriptor)")
+with IntOperators
+with IntValues
+{
+ override def printDataStructureDebugInfo(ds: AnyRef) = ds match {
+ case pm: ParHashSet[t] =>
+ println("Mutable parallel hash set")
+ case _ =>
+ println("could not match data structure type: " + ds.getClass)
+ }
+
+ override def checkDataStructureInvariants(orig: Traversable[Int], ds: AnyRef) = ds match {
+ // case pm: ParHashSet[t] if 1 == 0 =>
+ // // for an example of how not to write code proceed below
+ // val invs = pm.brokenInvariants
+
+ // val containsall = (for (elem <- orig) yield {
+ // if (pm.asInstanceOf[ParHashSet[Int]](elem) == true) true
+ // else {
+ // println("Does not contain original element: " + elem)
+ // println(pm.hashTableContents.table.find(_ == elem))
+ // println(pm.hashTableContents.table.indexOf(elem))
+ // false
+ // }
+ // }).foldLeft(true)(_ && _)
+
+
+ // if (invs.isEmpty) {
+ // if (!containsall) println(pm.debugInformation)
+ // containsall
+ // } else {
+ // println("Invariants broken:\n" + invs.mkString("\n"))
+ // false
+ // }
+ case _ => true
+ }
+
+}
+
+
+
+
+
+
+
+
+
+
diff --git a/test/scalacheck/scala/pc.scala b/test/scalacheck/scala/pc.scala
new file mode 100644
index 0000000000..10d0643be8
--- /dev/null
+++ b/test/scalacheck/scala/pc.scala
@@ -0,0 +1,61 @@
+// package here to be able access the package-private implementation and shutdown the pool
+package scala
+
+import org.scalacheck._
+import scala.collection.parallel._
+
+class ParCollProperties extends Properties("Parallel collections") {
+
+ def includeAllTestsWith(support: TaskSupport, descriptor: String) {
+ // parallel arrays with default task support
+ include(new mutable.IntParallelArrayCheck(support, descriptor) { })
+
+ // parallel ranges
+ include(new immutable.ParallelRangeCheck(support, descriptor) { })
+
+ // parallel immutable hash maps (tries)
+ include(new immutable.IntIntParallelHashMapCheck(support, descriptor) { })
+
+ // parallel immutable hash sets (tries)
+ include(new immutable.IntParallelHashSetCheck(support, descriptor) { })
+
+ // parallel mutable hash maps (tables)
+ include(new mutable.IntIntParallelHashMapCheck(support, descriptor) { })
+
+ // parallel ctrie
+ include(new mutable.IntIntParallelConcurrentTrieMapCheck(support, descriptor) { })
+
+ // parallel mutable hash sets (tables)
+ include(new mutable.IntParallelHashSetCheck(support, descriptor) { })
+
+ // parallel vectors
+ include(new immutable.IntParallelVectorCheck(support, descriptor) { })
+ }
+
+ includeAllTestsWith(defaultTaskSupport, "defaultTaskSupport")
+
+ val ec = scala.concurrent.ExecutionContext.fromExecutorService(java.util.concurrent.Executors.newFixedThreadPool(5))
+ val ectasks = new collection.parallel.ExecutionContextTaskSupport(ec)
+ includeAllTestsWith(ectasks, "ectasks")
+
+ // no post test hooks in scalacheck, so cannot do:
+ // ec.shutdown()
+
+}
+
+/*
+def main(args: Array[String]) {
+ val pc = new ParCollProperties
+ org.scalacheck.Test.checkProperties(
+ org.scalacheck.Test.Params(
+ rng = new java.util.Random(5134L),
+ testCallback = new ConsoleReporter(0),
+ workers = 1,
+ minSize = 0,
+ maxSize = 4000,
+ minSuccessfulTests = 5
+ ),
+ pc
+ )
+}
+*/
diff --git a/test/scalacheck/scala/reflect/quasiquotes/ArbitraryTreesAndNames.scala b/test/scalacheck/scala/reflect/quasiquotes/ArbitraryTreesAndNames.scala
new file mode 100644
index 0000000000..2f2be70403
--- /dev/null
+++ b/test/scalacheck/scala/reflect/quasiquotes/ArbitraryTreesAndNames.scala
@@ -0,0 +1,297 @@
+package scala.reflect.quasiquotes
+
+import org.scalacheck._, Prop._, Gen._, Arbitrary._
+import scala.reflect.runtime.universe._, internal._, Flag._
+
+trait ArbitraryTreesAndNames {
+ def smallList[T](size: Int, g: Gen[T]) = {
+ val n: Int = choose(0, size / 2 + 1).sample match {
+ case Some(i) => i
+ case None => 0
+ }
+ containerOfN[List, T](n, g)
+ }
+
+ def shortIdent(len: Int) =
+ for(name <- identifier)
+ yield if(name.length <= len) name
+ else name.substring(0, len - 1)
+
+ def genTermName = for(name <- shortIdent(8)) yield TermName(name)
+ def genTypeName = for(name <- shortIdent(8)) yield TypeName(name)
+ def genName = oneOf(genTermName, genTypeName)
+
+ def genFlagSet = oneOf(
+ TRAIT, INTERFACE, MUTABLE, MACRO,
+ DEFERRED, ABSTRACT, FINAL, SEALED,
+ IMPLICIT, LAZY, OVERRIDE, PRIVATE,
+ PROTECTED, LOCAL, CASE, ABSOVERRIDE,
+ BYNAMEPARAM, PARAM, COVARIANT, CONTRAVARIANT,
+ DEFAULTPARAM, PRESUPER, DEFAULTINIT
+ )
+
+ def genModifiers = for(flagset <- genFlagSet) yield Modifiers(flagset)
+
+ def genConstant =
+ for(value <- oneOf(arbitrary[Byte], arbitrary[Short], arbitrary[Char],
+ arbitrary[Int], arbitrary[Long], arbitrary[Float],
+ arbitrary[Double], arbitrary[Boolean], arbitrary[String]))
+ yield Constant(value)
+
+ def genAnnotated(size: Int, argGen: Int => Gen[Tree]) =
+ for(annot <- genTree(size - 1); arg <- argGen(size - 1))
+ yield Annotated(annot, arg)
+
+ def genAlternative(size: Int): Gen[Alternative] =
+ for(trees <- smallList(size, genTree(size - 1)))
+ yield Alternative(trees)
+
+ def genAppliedTypeTree(size: Int) =
+ for(tpt <- genTree(size - 1) if tpt.isType;
+ args <- smallList(size, genTree(size - 1)))
+ yield AppliedTypeTree(tpt, args)
+
+ def genApply(size: Int) =
+ for(fun <- genTree(size - 1);
+ args <- smallList(size, genTree(size - 1)))
+ yield Apply(fun, args)
+
+ def genAssign(size: Int) =
+ for(lhs <- genTree(size - 1); rhs <- genTree(size - 1))
+ yield Assign(lhs, rhs)
+
+ def genAssignOrNamedArg(size: Int) =
+ for(lhs <- genTree(size - 1); rhs <- genTree(size - 1))
+ yield AssignOrNamedArg(lhs, rhs)
+
+ def genBind(size: Int, nameGen: Gen[Name]) =
+ for(name <- nameGen; body <- genTree(size - 1))
+ yield Bind(name, body)
+
+ def genBlock(size: Int) =
+ for(stats <- smallList(size, genTree(size - 1)); expr <- genTree(size - 1))
+ yield Block(stats, expr)
+
+ def genCaseDef(size: Int) =
+ for(pat <- genTree(size - 1); guard <- genTree(size - 1); body <- genTree(size - 1))
+ yield CaseDef(pat, guard, body)
+
+ def genClassDef(size: Int) =
+ for(mods <- genModifiers; name <- genTypeName;
+ tparams <- smallList(size, genTypeDef(size - 1));
+ impl <- genTemplate(size - 1))
+ yield ClassDef(mods, name, tparams, impl)
+
+ def genCompoundTypeTree(size: Int) =
+ for(templ <- genTemplate(size - 1))
+ yield CompoundTypeTree(templ)
+
+ def genDefDef(size: Int) =
+ for(mods <- genModifiers; name <- genTermName;
+ tpt <- genTree(size -1); rhs <- genTree(size - 1);
+ tparams <- smallList(size, genTypeDef(size - 1));
+ vparamss <- smallList(size, smallList(size, genValDef(size - 1))))
+ yield DefDef(mods, name, tparams, vparamss, tpt, rhs)
+
+ def genExistentialTypeTree(size: Int) =
+ for(tpt <- genTree(size - 1); where <- smallList(size, oneOf(genValDef(size - 1), genTypeDef(size - 1))))
+ yield ExistentialTypeTree(tpt, where)
+
+ def genFunction(size: Int) =
+ for(vparams <- smallList(size, genValDef(size - 1)); body <- genTree(size - 1))
+ yield Function(vparams, body)
+
+ def genIdent(nameGen: Gen[Name] = genName) =
+ for(name <- nameGen) yield Ident(name)
+
+ def genIf(size: Int) =
+ for(cond <- genTree(size - 1); thenp <- genTree(size - 1); elsep <- genTree(size - 1))
+ yield If(cond, thenp, elsep)
+
+ def genImport(size: Int) =
+ for(expr <- genTree(size - 1); selectors <- smallList(size, genImportSelector(size - 1)))
+ yield Import(expr, selectors)
+
+ def genImportSelector(size: Int) =
+ for(name <- genName; namePos <- arbitrary[Int]; rename <- genName; renamePos <- arbitrary[Int])
+ yield ImportSelector(name, namePos, rename, renamePos)
+
+ def genTemplate(size: Int) =
+ for(parents <- smallList(size, genTree(size - 1));
+ self <- genValDef(size - 1);
+ body <- smallList(size, genTree(size - 1)))
+ yield Template(parents, self, body)
+
+ def genLabelDef(size: Int) =
+ for(name <- genTermName; params <- smallList(size, genIdent()); rhs <- genTree(size - 1))
+ yield LabelDef(name, params, rhs)
+
+ def genLiteral =
+ for(const <- genConstant) yield Literal(const)
+
+ def genMatch(size: Int) =
+ for(selector <- genTree(size - 1); cases <- smallList(size, genCaseDef(size - 1)))
+ yield Match(selector, cases)
+
+ def genModuleDef(size: Int) =
+ for(mods <- genModifiers; name <- genTermName; impl <- genTemplate(size - 1))
+ yield ModuleDef(mods, name, impl)
+
+ def genNew(size: Int) =
+ for(tpt <- genTree(size - 1))
+ yield New(tpt)
+
+ def genRefTree(size: Int) =
+ oneOf(genSelect(size), genIdent(), genSelectFromTypeTree(size))
+
+ def genPackageDef(size: Int) =
+ for(reftree <- genRefTree(size - 1); stats <- smallList(size, genTree(size - 1)))
+ yield PackageDef(reftree, stats)
+
+ def genTypeSelect(size: Int) =
+ for(qual <- genTree(size - 1); name <- genTypeName)
+ yield Select(qual, name)
+
+ def genSelect(size: Int, nameGen: Gen[Name] = genName) =
+ for(qual <- genTree(size - 1); name <- nameGen)
+ yield Select(qual, name)
+
+ def genSelectFromTypeTree(size: Int) =
+ for(qual <- genTreeIsType(size - 1); name <- genTypeName)
+ yield SelectFromTypeTree(qual, name)
+
+ def genReferenceToBoxed(size: Int) =
+ for(ident <- genIdent())
+ yield ReferenceToBoxed(ident)
+
+ def genReturn(size: Int) =
+ for(expr <- genTree(size - 1))
+ yield Return(expr)
+
+ def genSingletonTypeTree(size: Int) =
+ for(expr <- genTree(size - 1))
+ yield SingletonTypeTree(expr)
+
+ def genStar(size: Int) =
+ for(expr <- genTree(size - 1))
+ yield Star(expr)
+
+ def genSuper(size: Int) =
+ for(qual <- genTree(size - 1); mix <- genTypeName)
+ yield Super(qual, mix)
+
+ def genThis(size: Int) =
+ for(qual <- genTypeName)
+ yield This(qual)
+
+ def genThrow(size: Int) =
+ for(expr <- genTree(size - 1))
+ yield Throw(expr)
+
+ def genTry(size: Int) =
+ for(block <- genTree(size - 1);
+ catches <- smallList(size, genCaseDef(size - 1));
+ finalizer <- genTree(size - 1))
+ yield Try(block, catches, finalizer)
+
+ def genTypeApply(size: Int) =
+ for(fun <- genTreeIsTerm(size - 1); args <- smallList(size, genTree(size - 1)))
+ yield TypeApply(fun, args)
+
+ def genTypeBoundsTree(size: Int) =
+ for(lo <- genTree(size - 1); hi <- genTree(size - 1))
+ yield TypeBoundsTree(lo, hi)
+
+ def genTypeDef(size: Int): Gen[TypeDef] =
+ for(mods <- genModifiers; name <- genTypeName;
+ tparams <- smallList(size, genTypeDef(size - 1)); rhs <- genTree(size - 1))
+ yield TypeDef(mods, name, tparams, rhs)
+
+ def genTypeTree: Gen[TypeTree] = TypeTree()
+
+ def genTyped(size: Int) =
+ for(expr <- genTree(size - 1); tpt <- genTree(size - 1))
+ yield Typed(expr, tpt)
+
+ def genUnApply(size: Int) =
+ for(fun <- genTree(size - 1); args <- smallList(size, genTree(size - 1)))
+ yield UnApply(fun, args)
+
+ def genValDef(size: Int) =
+ for(mods <- genModifiers; name <- genTermName;
+ tpt <- genTree(size - 1); rhs <- genTree(size - 1))
+ yield ValDef(mods, name, tpt, rhs)
+
+ def genTree(size: Int): Gen[Tree] =
+ if (size <= 1) oneOf(EmptyTree: Gen[Tree], genTreeIsTerm(size), genTreeIsType(size))
+ else oneOf(genTree(1),
+ // these trees are neither terms nor types
+ genPackageDef(size - 1), genModuleDef(size - 1),
+ genCaseDef(size - 1), genDefDef(size - 1),
+ genTypeDef(size - 1), genTemplate(size - 1),
+ genClassDef(size - 1), genValDef(size - 1),
+ genImport(size - 1))
+
+ def genTreeIsTerm(size: Int): Gen[Tree] =
+ if (size <= 1) oneOf(genLiteral, genIdent(genTermName))
+ else oneOf(genTreeIsTerm(1), genBind(size - 1, genTermName),
+ genAnnotated(size - 1, genTreeIsTerm), genSelect(size - 1, genTermName),
+ genAlternative(size - 1), genApply(size - 1), genAssign(size - 1),
+ genAssignOrNamedArg(size - 1), genBlock(size - 1), genFunction(size - 1),
+ genIf(size - 1), genLabelDef(size - 1), genMatch(size - 1), genNew(size - 1),
+ genReturn(size - 1), genStar(size - 1), genSuper(size - 1), genThis(size - 1),
+ genThrow(size - 1), genTry(size - 1), genTypeApply(size - 1),
+ genTyped(size - 1), genUnApply(size - 1))
+
+ def genTreeIsType(size: Int): Gen[Tree] =
+ if (size <= 1) genIdent(genTypeName)
+ else oneOf(genTreeIsType(1), genAnnotated(size - 1, genTreeIsType),
+ genBind(size - 1, genTypeName), genSelect(size - 1, genTypeName),
+ genSingletonTypeTree(size - 1), genSelectFromTypeTree(size - 1),
+ genExistentialTypeTree(size - 1), genCompoundTypeTree(size - 1),
+ genAppliedTypeTree(size - 1), genTypeBoundsTree(size - 1))
+
+ /* These are marker types that allow to write tests that
+ * depend specifically on Trees that are terms or types.
+ * They are transparently transformed to trees through
+ * implicit conversions and liftables for quasiquotes.
+ */
+
+ case class TreeIsTerm(tree: Tree) { require(tree.isTerm, showRaw(tree)) }
+ case class TreeIsType(tree: Tree) { require(tree.isType, showRaw(tree)) }
+
+ def genTreeIsTermWrapped(size: Int) =
+ for(tit <- genTreeIsTerm(size)) yield TreeIsTerm(tit)
+
+ def genTreeIsTypeWrapped(size: Int) =
+ for(tit <- genTreeIsType(size)) yield TreeIsType(tit)
+
+ implicit val liftTreeIsTerm = Liftable[TreeIsTerm] { _.tree }
+ implicit val liftTreeIsType = Liftable[TreeIsType] { _.tree }
+ implicit def treeIsTerm2tree(tit: TreeIsTerm): Tree = tit.tree
+ implicit def treeIsType2tree(tit: TreeIsType): Tree = tit.tree
+
+ implicit val arbConstant: Arbitrary[Constant] = Arbitrary(genConstant)
+ implicit val arbModifiers: Arbitrary[Modifiers] = Arbitrary(genModifiers)
+ implicit val arbTermName: Arbitrary[TermName] = Arbitrary(genTermName)
+ implicit val arbTypeName: Arbitrary[TypeName] = Arbitrary(genTypeName)
+ implicit val arbName: Arbitrary[Name] = Arbitrary(genName)
+
+ // Trees generators are bound by this size to make
+ // generation times shorter and less memory hungry.
+ // TODO: is there any better solution?
+ val maxTreeSize = 5
+
+ def arbitrarySized[T](gen: Int => Gen[T]) =
+ Arbitrary(sized(s => gen(s.min(maxTreeSize))))
+
+ implicit val arbLiteral: Arbitrary[Literal] = Arbitrary(genLiteral)
+ implicit val arbIdent: Arbitrary[Ident] = Arbitrary(genIdent())
+ implicit val arbValDef: Arbitrary[ValDef] = arbitrarySized(genValDef)
+ implicit val arbDefDef: Arbitrary[DefDef] = arbitrarySized(genDefDef)
+ implicit val arbTypeDef: Arbitrary[TypeDef] = arbitrarySized(genTypeDef)
+ implicit val arbBind: Arbitrary[Bind] = arbitrarySized(genBind(_, genName))
+ implicit val arbTree: Arbitrary[Tree] = arbitrarySized(genTree)
+ implicit val arbTreeIsTerm: Arbitrary[TreeIsTerm] = arbitrarySized(genTreeIsTermWrapped)
+ implicit val arbTreeIsType: Arbitrary[TreeIsType] = arbitrarySized(genTreeIsTypeWrapped)
+} \ No newline at end of file
diff --git a/test/scalacheck/scala/reflect/quasiquotes/DefinitionConstructionProps.scala b/test/scalacheck/scala/reflect/quasiquotes/DefinitionConstructionProps.scala
new file mode 100644
index 0000000000..9d35c9229d
--- /dev/null
+++ b/test/scalacheck/scala/reflect/quasiquotes/DefinitionConstructionProps.scala
@@ -0,0 +1,455 @@
+package scala.reflect.quasiquotes
+
+import org.scalacheck._, Prop._, Gen._, Arbitrary._
+import scala.reflect.runtime.universe._, Flag._, internal.reificationSupport.ScalaDot
+
+object DefinitionConstructionProps
+ extends QuasiquoteProperties("definition construction")
+ with ClassConstruction
+ with TraitConstruction
+ with TypeDefConstruction
+ with ValDefConstruction
+ with PatDefConstruction
+ with DefConstruction
+ with PackageConstruction
+ with ImportConstruction {
+
+ val x: Tree = q"val x: Int"
+ property("SI-6842 a1") = test { assertEqAst(q"def f($x) = 0", "def f(x: Int) = 0") }
+ property("SI-6842 a2") = test { assertEqAst(q"class C($x)", "class C(val x: Int)") }
+ property("SI-6842 a3") = test { assertEqAst(q"class C { $x => }", "class C { x: Int => }") }
+ property("SI-6842 a4") = test { assertEqAst(q"trait B { $x => }", "trait B { x: Int => }") }
+ property("SI-6842 a5") = test { assertEqAst(q"object A { $x => }", "object A { x: Int => }") }
+
+ val t: Tree = q"type T"
+ property("SI-6842 b1") = test { assertEqAst(q"def f[$t] = 0", "def f[T] = 0") }
+ property("SI-6842 b2") = test { assertEqAst(q"class C[$t]", "class C[T]") }
+ property("SI-6842 b3") = test { assertEqAst(q"trait B[$t]", "trait B[T]") }
+}
+
+trait ClassConstruction { self: QuasiquoteProperties =>
+ val anyRef = ScalaDot(TypeName("AnyRef"))
+ val emtpyConstructor =
+ DefDef(Modifiers(), termNames.CONSTRUCTOR, List(),
+ List(List()), TypeTree(), Block(List(pendingSuperCall), Literal(Constant(()))))
+ def classWith(name: TypeName, parents: List[Tree] = List(anyRef), body: List[DefDef] = Nil) =
+ ClassDef(
+ Modifiers(), name, List(),
+ Template(parents, emptyValDef, emtpyConstructor :: body))
+
+ property("construct case class") = test {
+ val params = q"val x: Int" :: q"val y: Int" :: Nil
+ val name = TypeName("Point")
+ assertEqAst(q"$CASE class $name(..$params)", "case class Point(x: Int, y: Int)")
+ }
+
+ property("case class bare param") = test {
+ assertEqAst(q"$CASE class Point(x: Int, y: Int)", "case class Point(private[this] val x: Int, private[this] val y: Int)")
+ }
+
+ property("generate default constructors automatically") = test {
+ val parents = List.empty[Tree]
+ assertEqAst(q"class Foo extends ..$parents", "class Foo")
+ }
+
+ property("unquote term name into class") = forAll { (rname: TypeName) =>
+ // add prefix to avoid failure in case rname is keyword
+ val name = TypeName("prefix$" + rname)
+ eqAst(q"class $name", "class " + name.toString)
+ }
+
+ property("unquote method into class") = forAll { (name: TypeName, method: DefDef) =>
+ q"class $name { $method }" ≈ classWith(name, body = List(method))
+ }
+
+ property("unquote members into class") = forAll { (name: TypeName, defs: List[DefDef], extra: DefDef) =>
+ q"""class $name {
+ ..$defs
+ $extra
+ }""" ≈ classWith(name, body = defs :+ extra)
+ }
+
+ property("unquote type name into class parents") = forAll { (name: TypeName, parent: TypeName) =>
+ q"class $name extends $parent" ≈ classWith(name, parents = List(Ident(parent)))
+ }
+
+ property("param flags are consistent with raw code") = test {
+ val pubx = q"val x: Int"
+ val privx = q"private[this] val x: Int"
+ assertEqAst(q" class C(x: Int)", " class C(x: Int) ")
+ assertEqAst(q"case class C(x: Int)", "case class C(x: Int) ")
+ assertEqAst(q" class C($pubx) ", " class C(val x: Int) ")
+ assertEqAst(q"case class C($pubx) ", "case class C(x: Int) ")
+ assertEqAst(q" class C($privx)", " class C(x: Int) ")
+ assertEqAst(q"case class C($privx)", "case class C(private[this] val x: Int)")
+ }
+
+ property("SI-8333") = test {
+ assertEqAst(q"{ $NoMods class C }", "{ class C }")
+ }
+
+ property("SI-8332") = test {
+ val args = q"val a: Int; val b: Int"
+ assertEqAst(q"class C(implicit ..$args)", "class C(implicit val a: Int, val b: Int)")
+ }
+
+ property("SI-8451: inline secondary constructors") = test {
+ assertEqAst(q"class C(x: Int) { def this() = this(0) }", "class C(x: Int) { def this() = this(0) }")
+ }
+
+ property("SI-8451: unquoted secondary constructors") = test {
+ val secondaryCtor = q"def this() = this(0)"
+ assertEqAst(q"class C(x: Int) { $secondaryCtor }", "class C(x: Int) { def this() = this(0) }")
+ }
+}
+
+trait TraitConstruction { self: QuasiquoteProperties =>
+ property("unquote name into trait def") = test {
+ val Foo = TypeName("Foo")
+ assert(q"trait $Foo" ≈ q"trait Foo")
+ }
+
+ property("unquote type params into trait def") = test {
+ val tparams = q"type A" :: q"type B" :: Nil
+ assert(q"trait Foo[..$tparams]" ≈ q"trait Foo[A, B]")
+ }
+
+ property("unquote defs into trait body") = test {
+ val body = q"def foo" :: q"val bar: Baz" :: Nil
+ assert(q"trait Foo { ..$body }" ≈ q"trait Foo { def foo; val bar: Baz }")
+ }
+
+ property("unquote parents into trait") = test {
+ val parents = tq"A" :: tq"B" :: Nil
+ assert(q"trait Foo extends ..$parents" ≈ q"trait Foo extends A with B")
+ }
+
+ property("unquote early valdef into trait") = test {
+ val x = q"val x: Int = 1"
+ assertEqAst(q"trait T extends { $x } with Any", "trait T extends { val x: Int = 1} with Any")
+ }
+
+ property("construct trait with early valdef") = test {
+ assertEqAst(q"trait T extends { val x: Int = 1 } with Any", "trait T extends { val x: Int = 1 } with Any")
+ }
+
+ property("unquote defs into early block") = test {
+ val defs = q"val x: Int = 0" :: q"type Foo = Bar" :: Nil
+ assert(q"trait T extends { ..$defs } with Bippy" ≈
+ q"trait T extends { val x: Int = 0; type Foo = Bar} with Bippy")
+ }
+
+ property("fail on splicing of non-valid early tree") = test {
+ val defn = q"def x: Int = 0"
+ assertThrows[IllegalArgumentException] { q"trait T extends { $defn } with Bar" }
+ }
+}
+
+trait TypeDefConstruction { self: QuasiquoteProperties =>
+ property("unquote type name into typedef") = forAll { (name1: TypeName, name2: TypeName) =>
+ q"type $name1 = $name2" ≈ TypeDef(Modifiers(), name1, List(), Ident(name2))
+ }
+
+ property("unquote type names into type bounds") = forAll { (T1: TypeName, T2: TypeName, T3: TypeName) =>
+ q"type $T1 >: $T2 <: $T3" ≈
+ TypeDef(
+ Modifiers(DEFERRED), T1, List(),
+ TypeBoundsTree(Ident(T2), Ident(T3)))
+ }
+
+ property("unquote trees names into type bounds") = forAll { (T: TypeName, t1: Tree, t2: Tree) =>
+ q"type $T >: $t1 <: $t2" ≈
+ TypeDef(
+ Modifiers(DEFERRED), T, List(),
+ TypeBoundsTree(t1, t2))
+ }
+
+ property("unquote tparams into typedef (1)") = forAll { (T: TypeName, targs: List[TypeDef], t: Tree) =>
+ q"type $T[..$targs] = $t" ≈ TypeDef(Modifiers(), T, targs, t)
+ }
+
+ property("unquote tparams into typedef (2)") = forAll { (T: TypeName, targs1: List[TypeDef], targs2: List[TypeDef], t: Tree) =>
+ q"type $T[..$targs1, ..$targs2] = $t" ≈ TypeDef(Modifiers(), T, targs1 ++ targs2, t)
+ }
+
+ property("unquote tparams into typedef (3)") = forAll { (T: TypeName, targ: TypeDef, targs: List[TypeDef], t: Tree) =>
+ q"type $T[$targ, ..$targs] = $t" ≈ TypeDef(Modifiers(), T, targ :: targs, t)
+ }
+
+ property("unquote typename into typedef with default bounds") = forAll { (T1: TypeName, T2: TypeName, t: Tree) =>
+ q"type $T1[$T2 >: Any <: Nothing] = $t" ≈
+ TypeDef(
+ Modifiers(), T1,
+ List(TypeDef(
+ Modifiers(PARAM), T2,
+ List(),
+ TypeBoundsTree(
+ Ident(TypeName("Any")),
+ Ident(TypeName("Nothing"))))),
+ t)
+ }
+
+ property("unquote type names into compound type tree") = forAll { (T: TypeName, A: TypeName, B: TypeName) =>
+ q"type $T = $A with $B" ≈
+ TypeDef(
+ Modifiers(), T, List(),
+ CompoundTypeTree(
+ Template(List(Ident(A), Ident(B)), ValDef(Modifiers(PRIVATE), termNames.WILDCARD, TypeTree(), EmptyTree), List())))
+ }
+
+ property("unquote trees into existential type tree") = forAll {
+ (T1: TypeName, T2: TypeName, X: TypeName, Lo: TypeName, Hi: TypeName) =>
+
+ q"type $T1 = $T2[$X] forSome { type $X >: $Lo <: $Hi }" ≈
+ TypeDef(
+ Modifiers(), T1, List(),
+ ExistentialTypeTree(
+ AppliedTypeTree(Ident(T2), List(Ident(X))),
+ List(
+ TypeDef(Modifiers(DEFERRED), X, List(), TypeBoundsTree(Ident(Lo), Ident(Hi))))))
+ }
+
+ property("unquote tree into singleton type tree") = forAll { (name: TypeName, t: Tree) =>
+ q"type $name = $t.type" ≈ q"type $name = ${SingletonTypeTree(t)}"
+ }
+
+ property("unquote into applied type tree") = forAll { (T1: TypeName, T2: TypeName, args: List[Tree]) =>
+ q"type $T1 = $T2[..$args]" ≈
+ TypeDef(Modifiers(), T1, List(),
+ if(args.nonEmpty) AppliedTypeTree(Ident(T2), args) else Ident(T2))
+ }
+}
+
+trait ValDefConstruction { self: QuasiquoteProperties =>
+ property("unquote into val") = forAll { (name: TermName, tpt: Tree, rhs: Tree) =>
+ q"val $name: $tpt = $rhs" ≈ ValDef(Modifiers(), name, tpt, rhs)
+ }
+
+ property("unquote into var") = forAll { (name: TermName, tpt: Tree, rhs: Tree) =>
+ q"var $name: $tpt = $rhs" ≈ ValDef(Modifiers(MUTABLE), name, tpt, rhs)
+ }
+
+ // left tree is not a pattern due to Si-8211
+ property("SI-8202") = test {
+ assertEqAst(q"val (x: Int) = 1", "val x: Int = 1")
+ }
+}
+
+trait PatDefConstruction { self: QuasiquoteProperties =>
+ property("unquote pattern into pat def") = test {
+ val pat = pq"(a, b)"
+ assertEqAst(q"val $pat = (1, 2)", "val (a, b) = (1, 2)")
+ val tpt = tq"(Int, Int)"
+ assertEqAst(q"val $pat: $tpt = (1, 2)", "val (a, b): (Int, Int) = (1, 2)")
+ }
+
+ property("unquote pattern into pat def within other pattern (1)") = test {
+ val pat = pq"(a, b)"
+ assertEqAst(q"val Foo($pat) = Foo((1, 2))", "val Foo((a, b)) = Foo((1, 2))")
+ val tpt = tq"Foo"
+ assertEqAst(q"val Foo($pat): $tpt = Foo((1, 2))", "val Foo((a, b)): Foo = Foo((1, 2))")
+ }
+
+ property("unquote patterns into pat def within other pattern (2)") = test {
+ val pat1 = pq"(a, b)"; val pat2 = pq"(c, d)"
+ assertEqAst(q"val ($pat1, $pat2) = ((1, 2), (3, 4))", "val ((a, b), (c, d)) = ((1, 2), (3, 4))")
+ val tpt = tq"((Int, Int), (Int, Int))"
+ assertEqAst(q"val ($pat1, $pat2): $tpt = ((1, 2), (3, 4))", "val ((a, b), (c, d)): ((Int, Int), (Int, Int)) = ((1, 2), (3, 4))")
+ }
+
+ property("unquote pattern without free vars into pat def") = test {
+ val pat = pq"((1, 2), 3)"
+ assertEqAst(q"val $pat = ((1, 2), 3)", "{ val ((1, 2), 3) = ((1, 2), 3) }")
+ val tpt = tq"((Int, Int), Int)"
+ assertEqAst(q"val $pat: $tpt = ((1, 2), 3)","{ val ((1, 2), 3): ((Int, Int), Int) = ((1, 2), 3) }")
+ }
+
+ // won't result into pattern match due to SI-8211
+ property("unquote typed pat into pat def") = test {
+ val pat = pq"x: Int"
+ assertEqAst(q"val $pat = 2", "{ val x: Int = 2 }")
+ }
+}
+
+trait MethodConstruction { self: QuasiquoteProperties =>
+ property("unquote paramss into defdef") = test {
+ val paramss = List(q"val x: Int") :: List(q"val y: Int = 1") :: Nil
+ assert(q"def foo(...$paramss)" ≈ parse("def foo(x: Int)(y: Int = 1)"))
+ }
+
+ property("unquote tparams into defdef") = test {
+ val tparams = q"type A" :: q"type B <: Bippy" :: Nil
+ assert(q"def foo[..$tparams]" ≈ parse("def foo[A, B <: Bippy]"))
+ }
+
+ def assertSameAnnots(tree: {def mods: Modifiers}, annots: List[Tree]) =
+ assert(tree.mods.annotations ≈ annots,
+ s"${tree.mods.annotations} =/= ${annots}")
+
+ def assertSameAnnots(tree1: {def mods: Modifiers}, tree2: {def mods: Modifiers}) =
+ assert(tree1.mods.annotations ≈ tree2.mods.annotations,
+ s"${tree1.mods.annotations} =/= ${tree2.mods.annotations}")
+
+ property("unquote type name into annotation") = test {
+ val name = TypeName("annot")
+ assertSameAnnots(q"@$name def foo", List(q"new $name"))
+ }
+
+ property("unquote ident into annotation") = test {
+ val name = TypeName("annot")
+ val ident = Ident(name)
+ assertSameAnnots(q"@$ident def foo", List(q"new $name"))
+ }
+
+ property("unquote idents into annotation") = test {
+ val idents = List(Ident(TypeName("annot1")), Ident(TypeName("annot2")))
+ assertSameAnnots(q"@..$idents def foo",
+ idents.map { ident => Apply(Select(New(ident), termNames.CONSTRUCTOR), List()) })
+ }
+
+ property("unquote constructor calls into annotation") = test {
+ val ctorcalls = List(q"new a1", q"new a2")
+ assertSameAnnots(q"@..$ctorcalls def foo", ctorcalls)
+ }
+
+ property("unquote multiple annotations (1)") = test {
+ val annot1 = q"new a1"
+ val annot2 = q"new a2"
+ val res = q"@$annot1 @$annot2 def foo"
+ assertSameAnnots(res, List(annot1, annot2))
+ }
+
+ property("unquote multiple annotations (2)") = test {
+ val annot1 = q"new a1"
+ val annots = List(q"new a2", q"new a3")
+ val res = q"@$annot1 @..$annots def foo"
+ assertSameAnnots(res, annot1 :: annots)
+ }
+
+ property("unquote annotations with arguments (1)") = test {
+ val a = q"new a(x)"
+ assertSameAnnots(q"@$a def foo", q"@a(x) def foo")
+ }
+
+ property("unquote annotations with arguments (2)") = test {
+ val a = TypeName("a")
+ assertSameAnnots(q"@$a(x) def foo", q"@a(x) def foo")
+ }
+
+ property("unquote annotations with arguments (3") = test {
+ val a = Ident(TypeName("a"))
+ assertSameAnnots(q"@$a(x) def foo", q"@a(x) def foo")
+ }
+
+ property("unquote improper tree into annot") = test {
+ val t = tq"Foo[Baz]"
+ assertThrows[IllegalArgumentException] {
+ q"@$t def foo"
+ }
+ }
+
+ property("can't unquote annotations with arguments specified twice") = test {
+ val a = q"new a(x)"
+ assertThrows[IllegalArgumentException] {
+ q"@$a(y) def foo"
+ }
+ }
+
+ property("unquote annotation with targs") = test {
+ val a = q"new Foo[A, B]"
+ assertEqAst(q"@$a def foo", "@Foo[A,B] def foo")
+ }
+
+ property("unquote annotation with multiple argument lists") = test {
+ val a = q"new Foo(a)(b)"
+ assertEqAst(q"@$a def foo", "@Foo(a)(b) def foo")
+ }
+}
+
+trait PackageConstruction { self: QuasiquoteProperties =>
+ property("unquote select into package name") = test {
+ val name = q"foo.bar"
+ assertEqAst(q"package $name { }", "package foo.bar { }")
+ }
+
+ property("splice name into package name") = test{
+ val name = TermName("bippy")
+ assertEqAst(q"package $name { }", "package bippy { }")
+ }
+
+ property("unquote members into package body") = test {
+ val members = q"class C" :: q"object O" :: Nil
+ assertEqAst(q"package foo { ..$members }", "package foo { class C; object O }")
+ }
+
+ property("unquote illegal members into package body") = test {
+ val f = q"def f"
+ assertThrows[IllegalArgumentException] { q"package foo { $f }" }
+ val v = q"val v = 0"
+ assertThrows[IllegalArgumentException] { q"package foo { $v }" }
+ val expr = q"x + 1"
+ assertThrows[IllegalArgumentException] { q"package foo { $expr }" }
+ }
+
+ property("unquote name into package object") = test {
+ val foo = TermName("foo")
+ assertEqAst(q"package object $foo", "package object foo")
+ }
+
+ property("unquote parents into package object") = test {
+ val parents = tq"a" :: tq"b" :: Nil
+ assertEqAst(q"package object foo extends ..$parents",
+ "package object foo extends a with b")
+ }
+
+ property("unquote members into package object") = test {
+ val members = q"def foo" :: q"val x = 1" :: Nil
+ assertEqAst(q"package object foo { ..$members }",
+ "package object foo { def foo; val x = 1 }")
+ }
+
+ property("unquote early def into package object") = test {
+ val edefs = q"val x = 1" :: q"type I = Int" :: Nil
+ assertEqAst(q"package object foo extends { ..$edefs } with Any",
+ "package object foo extends { val x = 1; type I = Int } with Any")
+ }
+}
+
+trait DefConstruction { self: QuasiquoteProperties =>
+ property("construct implicit args (1)") = test {
+ val x = q"val x: Int"
+ assertEqAst(q"def foo(implicit $x) = x", "def foo(implicit x: Int) = x")
+ }
+
+ property("construct implicit args (2)") = test {
+ val xs = q"val x1: Int" :: q"val x2: Long" :: Nil
+ assertEqAst(q"def foo(implicit ..$xs) = x1 + x2", "def foo(implicit x1: Int, x2: Long) = x1 + x2")
+ }
+}
+
+trait ImportConstruction { self: QuasiquoteProperties =>
+ property("construct wildcard import") = test {
+ val sel = pq"_"
+ assert(q"import foo.$sel" ≈ q"import foo._")
+ }
+
+ property("construct named import") = test {
+ val sel = pq"bar"
+ assert(q"import foo.$sel" ≈ q"import foo.bar")
+ }
+
+ property("construct renaming import") = test {
+ val sel = pq"bar -> baz"
+ assert(q"import foo.$sel" ≈ q"import foo.{bar => baz}")
+ }
+
+ property("construct unimport import") = test {
+ val sels = pq"poison -> _" :: pq"_" :: Nil
+ assert(q"import foo.{..$sels}" ≈ q"import foo.{poison => _, _}")
+ }
+
+ property("construct mixed import") = test {
+ val sels = pq"a -> b" :: pq"c -> _" :: pq"_" :: Nil
+ assert(q"import foo.{..$sels}" ≈ q"import foo.{a => b, c => _, _}")
+ }
+}
diff --git a/test/scalacheck/scala/reflect/quasiquotes/DefinitionDeconstructionProps.scala b/test/scalacheck/scala/reflect/quasiquotes/DefinitionDeconstructionProps.scala
new file mode 100644
index 0000000000..54ec966836
--- /dev/null
+++ b/test/scalacheck/scala/reflect/quasiquotes/DefinitionDeconstructionProps.scala
@@ -0,0 +1,292 @@
+package scala.reflect.quasiquotes
+
+import org.scalacheck._, Prop._, Gen._, Arbitrary._
+import scala.reflect.runtime.universe._, Flag._, internal.reificationSupport.SyntacticClassDef
+
+object DefinitionDeconstructionProps
+ extends QuasiquoteProperties("definition deconstruction")
+ with TraitDeconstruction
+ with ClassDeconstruction
+ with ObjectDeconstruction
+ with ModsDeconstruction
+ with ValVarDeconstruction
+ with DefDeconstruction
+ with PackageDeconstruction
+ with ImportDeconstruction
+
+trait TraitDeconstruction { self: QuasiquoteProperties =>
+ property("exhaustive trait matcher") = test {
+ def matches(line: String) {
+ val q"""$mods trait $name[..$targs]
+ extends { ..$early } with ..$parents { $self => ..$body }""" = parse(line)
+ }
+ matches("trait Foo")
+ matches("trait Foo[T]")
+ matches("trait Foo { def bar }")
+ matches("trait Foo extends Bar with Baz")
+ matches("trait Foo { self: Bippy => val x: Int = 1}")
+ matches("trait Foo extends { val early: Int = 1 } with Bar { val late = early }")
+ matches("private[Gap] trait Foo")
+ }
+}
+
+trait ObjectDeconstruction { self: QuasiquoteProperties =>
+ property("exhaustive object matcher") = test {
+ def matches(line: String) = {
+ val q"""$mods object $name extends { ..$early } with ..$parents { $self => ..$body }""" = parse(line)
+ }
+ matches("object Foo")
+ matches("object Foo extends Bar[T]")
+ matches("object Foo extends { val early: T = v } with Bar")
+ matches("object Foo extends Foo { selfy => body }")
+ matches("private[Bippy] object Foo extends Bar with Baz")
+ }
+}
+
+trait ClassDeconstruction { self: QuasiquoteProperties =>
+ property("class without params") = test {
+ val q"class $name { ..$body }" = q"class Foo { def bar = 3 }"
+ assert(body ≈ List(q"def bar = 3"))
+ }
+
+ property("class constructor") = test {
+ val q"class $name(...$argss)" = q"class Foo(x: Int)(y: Int)"
+ assert(argss.length == 2)
+ }
+
+ property("class parents") = test {
+ val q"class $name extends ..$parents" = q"class Foo extends Bar with Blah"
+ assert(parents ≈ List(tq"Bar", tq"Blah"))
+ }
+
+ property("class selfdef") = test {
+ val q"class $name { $self => }" = q"class Foo { self: T => }"
+ assert(self.name ≈ TermName("self") && self.tpt ≈ tq"T")
+ }
+
+ property("class tparams") = test {
+ val q"class $name[..$tparams]" = q"class Foo[A, B]"
+ assert(tparams.map { _.name } == List(TypeName("A"), TypeName("B")))
+ }
+
+ property("deconstruct bare case class") = test {
+ val q"$mods class $name(..$args) extends ..$parents" = q"case class Foo(x: Int)"
+ }
+
+ property("exhaustive class matcher") = test {
+ def matches(line: String) {
+ val tree = parse(line)
+ val q"""$classMods0 class $name0[..$targs0] $ctorMods0(...$argss0)
+ extends { ..$early0 } with ..$parents0 { $self0 => ..$body0 }""" = tree
+ val q"""$classMods1 class $name1[..$targs1] $ctorMods1(...$argss1)(implicit ..$impl)
+ extends { ..$early1 } with ..$parents1 { $self1 => ..$body1 }""" = tree
+ }
+ matches("class Foo")
+ matches("class Foo[T]")
+ matches("class Foo[T] @annot")
+ matches("class Foo extends Bar with Baz")
+ matches("class Foo { body }")
+ matches("class Foo extends { val early = 0 } with Any")
+ matches("abstract class Foo")
+ matches("private[Baz] class Foo")
+ matches("class Foo(first: A)(second: B)")
+ matches("class Foo(first: A) extends Bar(first) with Baz")
+ matches("class Foo private (first: A) { def bar }")
+ matches("class Foo { self => bar(self) }")
+ matches("case class Foo(x: Int)")
+ }
+
+ property("SI-7979") = test {
+ val PARAMACCESSOR = (1 << 29).toLong.asInstanceOf[FlagSet]
+ assertThrows[MatchError] {
+ val SyntacticClassDef(_, _, _, _, _, _, _, _, _) =
+ ClassDef(
+ Modifiers(), TypeName("Foo"), List(),
+ Template(
+ List(Select(Ident(TermName("scala")), TypeName("AnyRef"))),
+ noSelfType,
+ List(
+ //ValDef(Modifiers(PRIVATE | LOCAL | PARAMACCESSOR), TermName("x"), Ident(TypeName("Int")), EmptyTree),
+ DefDef(Modifiers(), termNames.CONSTRUCTOR, List(), List(List(ValDef(Modifiers(PARAM | PARAMACCESSOR), TermName("x"),
+ Ident(TypeName("Int")), EmptyTree))), TypeTree(), Block(List(pendingSuperCall), Literal(Constant(())))))))
+ }
+ }
+
+ property("SI-8332") = test {
+ val q"class C(implicit ..$args)" = q"class C(implicit i: I, j: J)"
+ val q"$imods val i: I" :: q"$jmods val j: J" :: Nil = args
+ assert(imods.hasFlag(IMPLICIT))
+ assert(jmods.hasFlag(IMPLICIT))
+ }
+}
+
+trait ModsDeconstruction { self: QuasiquoteProperties =>
+ property("deconstruct mods") = test {
+ val mods = Modifiers(IMPLICIT | PRIVATE, TermName("foobar"), Nil)
+ val q"$mods0 def foo" = q"$mods def foo"
+ assert(mods0 ≈ mods)
+ }
+
+ property("@$annot def foo") = forAll { (annotName: TypeName) =>
+ val q"@$annot def foo" = q"@$annotName def foo"
+ annot ≈ Apply(Select(New(Ident(annotName)), termNames.CONSTRUCTOR), List())
+ }
+
+ property("@$annot(..$args) def foo") = forAll { (annotName: TypeName, tree: Tree) =>
+ val q"@$annot(..$args) def foo" = q"@$annotName($tree) def foo"
+ annot ≈ Ident(annotName) && args ≈ List(tree)
+ }
+
+ property("@..$annots def foo") = test {
+ val a = q"new a"
+ val b = q"new b"
+ val q"@..$annots def foo" = q"@$a @$b def foo"
+ annots ≈ List(a, b)
+ }
+
+ property("@$annot @..$annots def foo") = test {
+ val a = q"new a"
+ val b = q"new b"
+ val c = q"new c"
+ val q"@$first @..$rest def foo" = q"@$a @$b @$c def foo"
+ assert(first ≈ a)
+ assert(rest ≈ List(b, c))
+ }
+
+ property("@..$anots @$annot def foo") = test {
+ val a = q"new a"
+ val b = q"new b"
+ val c = q"new c"
+ val q"@..$init @$last def foo" = q"@$a @$b @$c def foo"
+ assert(init ≈ List(a, b))
+ assert(last ≈ c)
+ }
+}
+
+trait ValVarDeconstruction { self: QuasiquoteProperties =>
+ property("exhaustive val matcher") = test {
+ def matches(line: String) { val q"$mods val $name: $tpt = $rhs" = parse(line) }
+ matches("val x: Int")
+ matches("val x: Int = 1")
+ matches("lazy val x: Int = 1")
+ matches("implicit val x = 1")
+ assertThrows[MatchError] { matches("var x = 1") }
+ }
+
+ property("exhaustive var matcher") = test {
+ def matches(line: String) { val q"$mods var $name: $tpt = $rhs" = parse(line) }
+ matches("var x: Int")
+ matches("var x: Int = 1")
+ matches("var x = 1")
+ assertThrows[MatchError] { matches("val x = 1") }
+ }
+}
+
+trait PackageDeconstruction { self: QuasiquoteProperties =>
+ property("exhaustive package matcher") = test {
+ def matches(line: String) { val q"package $name { ..$body }" = parse(line) }
+ matches("package foo { }")
+ matches("package foo { class C }")
+ matches("package foo.bar { }")
+ matches("package bippy.bongo { object A; object B }")
+ matches("package bippy { package bongo { object O } }")
+ }
+
+ property("exhaustive package object matcher") = test {
+ def matches(line: String) {
+ val q"package object $name extends { ..$early } with ..$parents { $self => ..$body }" = parse(line)
+ }
+ matches("package object foo")
+ matches("package object foo { def baz }")
+ matches("package object foo { self => }")
+ matches("package object foo extends mammy with daddy { def baz }")
+ matches("package object foo extends { val early = 1 } with daddy")
+ assertThrows[MatchError] { matches("object foo") }
+ }
+}
+
+trait DefDeconstruction { self: QuasiquoteProperties =>
+ property("exhaustive def matcher") = test {
+ def matches(line: String) = {
+ val t = parse(line)
+ val q"$mods0 def $name0[..$targs0](...$argss0): $restpe0 = $body0" = t
+ val q"$mods1 def $name1[..$targs1](...$argss1)(implicit ..$impl1): $restpe1 = $body1" = t
+ }
+ matches("def foo = foo")
+ matches("implicit def foo: Int = 2")
+ matches("def foo[T](x: T): T = x")
+ matches("def foo[A: B] = implicitly[B[A]]")
+ matches("private def foo = 0")
+ matches("def foo[A <% B] = null")
+ matches("def foo(one: One)(two: Two) = (one, two)")
+ matches("def foo[T](args: T*) = args.toList")
+ }
+
+ property("extract implicit arg list (1)") = test {
+ val q"def foo(...$argss)(implicit ..$impl)" = q"def foo(x: Int)(implicit y: Int)"
+ assert(impl ≈ List(q"${Modifiers(IMPLICIT | PARAM)} val y: Int"))
+ }
+
+ property("extract implicit arg list (2)") = test {
+ val q"def foo(...$argss)(implicit ..$impl)" = q"def foo(x: Int)"
+ assert(impl.isEmpty)
+ }
+
+ property("SI-8451") = test {
+ val q"def this(..$params) = this(..$args)" = q"def this(x: Int) = this(0)"
+ assert(params ≈ List(q"${Modifiers(PARAM)} val x: Int"))
+ assert(args ≈ List(q"0"))
+ }
+}
+
+trait ImportDeconstruction { self: QuasiquoteProperties =>
+ property("exhaustive import matcher") = test {
+ def matches(line: String) = {
+ val q"import $ref.{..$sels}" = parse(line)
+ }
+ matches("import foo.bar")
+ matches("import foo.{bar, baz}")
+ matches("import foo.{a => b, c => d}")
+ matches("import foo.{poision => _, _}")
+ matches("import foo.bar.baz._")
+ }
+
+ property("extract import binding") = test {
+ val q"import $_.$sel" = q"import foo.bar"
+ val pq"bar" = sel
+ }
+
+ property("extract import wildcard") = test {
+ val q"import $_.$sel" = q"import foo._"
+ val pq"_" = sel
+ }
+
+ property("extract import rename") = test {
+ val q"import $_.$sel" = q"import foo.{bar => baz}"
+ val pq"bar -> baz" = sel
+ val pq"$left -> $right" = sel
+ val pq"bar" = left
+ val pq"baz" = right
+ }
+
+ property("extract import unimport") = test {
+ val q"import $_.$sel" = q"import foo.{bar => _}"
+ val pq"bar -> _" = sel
+ val pq"$left -> $right" = sel
+ val pq"bar" = left
+ val pq"_" = right
+ }
+
+ property("unquote names into import selector") = forAll {
+ (expr: Tree, plain: TermName, oldname: TermName, newname: TermName, discard: TermName) =>
+
+ val Import(expr1, List(
+ ImportSelector(plain11, _, plain12, _),
+ ImportSelector(oldname1, _, newname1, _),
+ ImportSelector(discard1, _, wildcard, _))) =
+ q"import $expr.{$plain, $oldname => $newname, $discard => _}"
+
+ expr1 ≈ expr && plain11 == plain12 && plain12 == plain &&
+ oldname1 == oldname && newname1 == newname && discard1 == discard && wildcard == termNames.WILDCARD
+ }
+}
diff --git a/test/scalacheck/scala/reflect/quasiquotes/DeprecationProps.scala b/test/scalacheck/scala/reflect/quasiquotes/DeprecationProps.scala
new file mode 100644
index 0000000000..9662586aef
--- /dev/null
+++ b/test/scalacheck/scala/reflect/quasiquotes/DeprecationProps.scala
@@ -0,0 +1,54 @@
+package scala.reflect.quasiquotes
+
+import org.scalacheck._, Prop._, Gen._, Arbitrary._
+import scala.reflect.runtime.universe._
+
+object DeprecationProps extends QuasiquoteProperties("deprecation") {
+ val tname = TypeName("Foo")
+ val tpt = tq"Foo"
+ val tpe = typeOf[Int]
+ val sym = tpe.typeSymbol.asType
+ val argss = List(q"x") :: List(q"y") :: Nil
+ val args = q"x" :: q"y" :: Nil
+
+ property("new tpt argss") = test {
+ assert(q"new $tpt(...$argss)" ≈ New(tpt, argss))
+ }
+
+ property("new tpe args") = test {
+ assert(q"new $tpe(..$args)" ≈ New(tpe, args: _*))
+ }
+
+ property("new tpe args") = test {
+ assert(q"new ${sym.toType}(..$args)" ≈ New(sym, args: _*))
+ }
+
+ property("apply sym args") = test {
+ assert(q"$sym(..$args)" ≈ Apply(sym, args: _*))
+ }
+
+ property("applyconstructor") = test {
+ assert(q"new $tpt(..$args)" ≈ ApplyConstructor(tpt, args))
+ }
+
+ property("super sym name") = test {
+ assert(q"$sym.super[$tname].x".qualifier ≈ Super(sym, tname))
+ }
+
+ property("throw tpe args") = test {
+ assert(q"throw new $tpe(..$args)" ≈ Throw(tpe, args: _*))
+ }
+
+ property("casedef pat body") = test {
+ val pat = pq"foo"
+ val body = q"bar"
+ assert(cq"$pat => $body" ≈ CaseDef(pat, body))
+ }
+
+ property("try body cases") = test {
+ val cases = (pq"a", q"b") :: (pq"c", q"d") :: Nil
+ val newcases = cases.map { case (pat, body) => cq"$pat => $body" }
+ val body = q"foo"
+ assert(q"try $body catch { case ..$newcases }" ≈ Try(body, cases: _*))
+ }
+} \ No newline at end of file
diff --git a/test/scalacheck/scala/reflect/quasiquotes/ErrorProps.scala b/test/scalacheck/scala/reflect/quasiquotes/ErrorProps.scala
new file mode 100644
index 0000000000..4f1c61eeff
--- /dev/null
+++ b/test/scalacheck/scala/reflect/quasiquotes/ErrorProps.scala
@@ -0,0 +1,215 @@
+package scala.reflect.quasiquotes
+
+import org.scalacheck._, Prop._, Gen._, Arbitrary._
+
+object ErrorProps extends QuasiquoteProperties("errors") {
+ property("can't extract two .. rankinalities in a row") = fails(
+ "Can't extract with .. here",
+ """
+ val xs = List(q"x1", q"x2")
+ val q"f(..$xs1, ..$xs2)" = xs
+ """)
+
+ property("can't unquote with given rank") = fails(
+ "Can't unquote List[StringBuilder], consider using .. or providing an implicit instance of Liftable[List[StringBuilder]]",
+ """
+ import java.lang.StringBuilder
+ val xs: List[StringBuilder] = Nil
+ q"$xs"
+ """)
+
+ property("unquote typename into typedef with default bounds") = fails(
+ "reflect.runtime.universe.Name expected but reflect.runtime.universe.TypeDef found",
+ """
+ val T1 = TypeName("T1")
+ val T2 = q"type T"
+ val t = EmptyTree
+ q"type $T1[$T2 >: _root_.scala.Any <: _root_.scala.Nothing] = $t" ≈
+ TypeDef(Modifiers(), T1, List(T2), t)
+ """)
+
+ property("can't unquote annotations with ... rank") = fails(
+ "Can't unquote with ... here",
+ """
+ val annots = List(List(q"Foo"))
+ q"@...$annots def foo"
+ """)
+
+ property("only literal string arguments") = fails(
+ "Quasiquotes can only be used with literal strings",
+ """
+ val s: String = "foo"
+ StringContext(s).q()
+ """)
+
+ property("don't know how to unquote inside of strings") = fails(
+ "Don't know how to unquote here",
+ """
+ val x: Tree = EmptyTree
+ StringContext("\"", "\"").q(x)
+ """)
+
+ property("non-liftable type ..") = fails(
+ "Can't unquote List[StringBuilder] with .., consider omitting the dots or providing an implicit instance of Liftable[StringBuilder]",
+ """
+ import java.lang.StringBuilder
+ val bazs = List(new StringBuilder)
+ q"f(..$bazs)"
+ """)
+
+ property("non-liftable type ...") = fails(
+ "Can't unquote List[List[StringBuilder]] with .., consider using ... or providing an implicit instance of Liftable[StringBuilder]",
+ """
+ import java.lang.StringBuilder
+ val bazs = List(List(new StringBuilder))
+ q"f(..$bazs)"
+ """)
+
+ property("use .. rank or provide liftable") = fails(
+ "Can't unquote List[StringBuilder], consider using .. or providing an implicit instance of Liftable[List[StringBuilder]]",
+ """
+ import java.lang.StringBuilder
+ val lst: List[StringBuilder] = Nil
+ q"f($lst)"
+ """)
+
+ property("use ... rank or provide liftable") = fails(
+ "Can't unquote List[List[StringBuilder]], consider using ... or providing an implicit instance of Liftable[List[List[StringBuilder]]]",
+ """
+ import java.lang.StringBuilder
+ val xs: List[List[StringBuilder]] = Nil
+ q"$xs"
+ """)
+
+ property("not liftable or natively supported") = fails(
+ "Can't unquote StringBuilder, consider providing an implicit instance of Liftable[StringBuilder]",
+ """
+ import java.lang.StringBuilder
+ val sb = new StringBuilder
+ q"f($sb)"
+ """)
+
+ property("can't unquote with ... rank here") = fails(
+ "Can't unquote with ... here",
+ """
+ val lst: List[List[Tree]] = Nil; val t = EmptyTree
+ q"f(...$lst, $t)"
+ """)
+
+ property("name expected") = fails(
+ "reflect.runtime.universe.Name expected but reflect.runtime.universe.Tree found",
+ """
+ val t = EmptyTree
+ q"class $t"
+ """)
+
+ property("flags or mods expected") = fails(
+ "reflect.runtime.universe.FlagSet or reflect.runtime.universe.Modifiers expected but reflect.runtime.universe.Tree found",
+ """
+ val t = EmptyTree
+ q"$t def foo"
+ """)
+
+ property("cant unquote flags together with mods") = fails(
+ "Can't unquote flags together with modifiers, consider merging flags into modifiers",
+ """
+ val f = Flag.IMPLICIT; val m = NoMods
+ q"$f $m def foo"
+ """)
+
+ property("can't unquote mods with annots") = fails(
+ "Can't unquote modifiers together with annotations, consider merging annotations into modifiers",
+ """
+ val m = NoMods
+ q"@annot $m def foo"
+ """)
+
+ property("can't unquote modifiers with inline flags") = fails(
+ "Can't unquote modifiers together with flags, consider merging flags into modifiers",
+ """
+ val m = NoMods
+ q"$m implicit def foo"
+ """)
+
+ property("can't unquote multiple mods") = fails(
+ "Can't unquote multiple modifiers, consider merging them into a single modifiers instance",
+ """
+ val m1 = NoMods; val m2 = NoMods
+ q"$m1 $m2 def foo"
+ """)
+
+ property("can't extract mods with annots") = fails(
+ "Can't extract modifiers together with annotations, consider extracting just modifiers",
+ """
+ val q"@$annot $mods def foo" = EmptyTree
+ """)
+
+ property("can't extract multiple mods") = fails(
+ "Can't extract multiple modifiers together, consider extracting a single modifiers instance",
+ """
+ val q"$m1 $m2 def foo" = EmptyTree
+ """)
+
+ property("can't unquote values of Null") = fails(
+ "Can't unquote Null, bottom type values often indicate programmer mistake",
+ """
+ val n = null
+ q"$n"
+ """)
+
+ property("can't unquote values of Nothing") = fails(
+ "Can't unquote Nothing, bottom type values often indicate programmer mistake",
+ """
+ def n = ???
+ q"$n"
+ """)
+
+ property("SI-8211: check unbound placeholder parameters") = fails(
+ "unbound placeholder parameter",
+ """
+ q"_"
+ """)
+
+ property("SI-8211: check unbound wildcard types") = fails(
+ "unbound wildcard type",
+ """
+ tq"_"
+ """)
+
+ property("SI-8420: don't crash on splicing of non-unliftable native type (1)") = fails(
+ "Can't unquote List[reflect.runtime.universe.Symbol] with .., consider omitting the dots or providing an implicit instance of Liftable[reflect.runtime.universe.Symbol]",
+ """
+ val l: List[Symbol] = Nil
+ q"f(..$l)"
+ """)
+
+ property("SI-8420: don't crash on splicing of non-unliftable native type (2)") = fails(
+ "Can't unquote List[reflect.runtime.universe.FlagSet] with .., consider omitting the dots or providing an implicit instance of Liftable[reflect.runtime.universe.FlagSet]",
+ """
+ val l: List[FlagSet] = Nil
+ q"f(..$l)"
+ """)
+
+ property("SI-8420: don't crash on splicing of non-unliftable native type (3)") = fails(
+ "Can't unquote List[reflect.runtime.universe.Modifiers] with .., consider omitting the dots or providing an implicit instance of Liftable[reflect.runtime.universe.Modifiers]",
+ """
+ val l: List[Modifiers] = Nil
+ q"f(..$l)"
+ """)
+
+ property("SI-8451 construction: disallow everything except for constructor calls in secondary constructor bodies") = fails(
+ "'this' expected but unquotee found",
+ """
+ val rhs1 = q"this(0)"
+ val ctor1 = q"def this(x: Int) = $rhs1"
+ """)
+
+ property("SI-8451 deconstruction: disallow everything except for constructor calls in secondary constructor bodies") = fails(
+ "'this' expected but unquotee found",
+ """
+ val q"def this(..$params) = $rhs2" = q"def this(x: Int) = this(0)"
+ """)
+
+ // // Make sure a nice error is reported in this case
+ // { import Flag._; val mods = NoMods; q"lazy $mods val x: Int" }
+}
diff --git a/test/scalacheck/scala/reflect/quasiquotes/ForProps.scala b/test/scalacheck/scala/reflect/quasiquotes/ForProps.scala
new file mode 100644
index 0000000000..d19ead8792
--- /dev/null
+++ b/test/scalacheck/scala/reflect/quasiquotes/ForProps.scala
@@ -0,0 +1,72 @@
+package scala.reflect.quasiquotes
+
+import org.scalacheck._, Prop._, Gen._, Arbitrary._
+import scala.reflect.runtime.universe._, Flag._, internal.reificationSupport._
+
+object ForProps extends QuasiquoteProperties("for") {
+ case class ForEnums(val value: List[Tree])
+
+ def genSimpleBind: Gen[Bind] =
+ for(name <- genTermName)
+ yield pq"$name @ _"
+
+ def genForFilter: Gen[Tree] =
+ for(cond <- genIdent(genTermName))
+ yield fq"if $cond"
+
+ def genForFrom: Gen[Tree] =
+ for(lhs <- genSimpleBind; rhs <- genIdent(genTermName))
+ yield fq"$lhs <- $rhs"
+
+ def genForEq: Gen[Tree] =
+ for(lhs <- genSimpleBind; rhs <- genIdent(genTermName))
+ yield fq"$lhs = $rhs"
+
+ def genForEnums(size: Int): Gen[ForEnums] =
+ for(first <- genForFrom; rest <- listOfN(size, oneOf(genForFrom, genForFilter, genForEq)))
+ yield new ForEnums(first :: rest)
+
+ implicit val arbForEnums: Arbitrary[ForEnums] = arbitrarySized(genForEnums)
+
+ property("construct-reconstruct for") = forAll { (enums: ForEnums, body: Tree) =>
+ val SyntacticFor(recoveredEnums, recoveredBody) = SyntacticFor(enums.value, body)
+ recoveredEnums ≈ enums.value && recoveredBody ≈ body
+ }
+
+ property("construct-reconstruct for-yield") = forAll { (enums: ForEnums, body: Tree) =>
+ val SyntacticForYield(recoveredEnums, recoveredBody) = SyntacticForYield(enums.value, body)
+ recoveredEnums ≈ enums.value && recoveredBody ≈ body
+ }
+
+ val abcde = List(fq"a <-b", fq"if c", fq"d = e")
+ val foobarbaz = pq"foo @ Bar(baz)"
+ val fv = q"f(v)"
+
+ property("construct/deconstruct for loop with fq") = test {
+ val for0 = q"for(..$abcde) $fv"
+ assertEqAst(for0, "for(a <- b; if c; d = e) f(v)")
+ val q"for(..$enums) $body" = for0
+ assert(enums ≈ abcde)
+ assert(body ≈ fv)
+ }
+
+ property("construct/deconstruct valfrom with fq") = test {
+ assert(fq"$foobarbaz <- $fv" ≈ fq"foo @ Bar(baz) <- f(v)")
+ val fq"$lhs <- $rhs" = fq"$foobarbaz <- $fv"
+ assert(lhs ≈ foobarbaz)
+ assert(rhs ≈ fv)
+ }
+
+ property("construct/deconstruct valeq with fq") = test {
+ assert(fq"$foobarbaz = $fv" ≈ fq"foo @ Bar(baz) = f(v)")
+ val fq"$lhs = $rhs" = fq"$foobarbaz = $fv"
+ assert(lhs ≈ foobarbaz)
+ assert(rhs ≈ fv)
+ }
+
+ property("construct/deconstruct filter with fq") = test {
+ assert(fq"if $fv" ≈ fq"if f(v)")
+ val fq"if $cond" = fq"if $fv"
+ assert(cond ≈ fv)
+ }
+} \ No newline at end of file
diff --git a/test/scalacheck/scala/reflect/quasiquotes/LiftableProps.scala b/test/scalacheck/scala/reflect/quasiquotes/LiftableProps.scala
new file mode 100644
index 0000000000..90e5adba58
--- /dev/null
+++ b/test/scalacheck/scala/reflect/quasiquotes/LiftableProps.scala
@@ -0,0 +1,176 @@
+package scala.reflect.quasiquotes
+
+import org.scalacheck._, Prop._, Gen._, Arbitrary._
+import scala.reflect.runtime.universe._, Flag._
+
+object LiftableProps extends QuasiquoteProperties("liftable") {
+ property("unquote byte") = test {
+ val c: Byte = 0
+ assert(q"$c" ≈ Literal(Constant(c)))
+ assert(q"${0: Byte}" ≈ Literal(Constant(c)))
+ }
+
+ property("unquote short") = test {
+ val c: Short = 0
+ assert(q"$c" ≈ Literal(Constant(c)))
+ assert(q"${0: Short}" ≈ Literal(Constant(c)))
+ }
+
+ property("unquote char") = test {
+ val c: Char = 'c'
+ assert(q"$c" ≈ Literal(Constant(c)))
+ assert(q"${'c'}" ≈ Literal(Constant(c)))
+ }
+
+ property("unquote int") = test {
+ val c: Int = 0
+ assert(q"$c" ≈ Literal(Constant(c)))
+ assert(q"${0: Int}" ≈ Literal(Constant(c)))
+ }
+
+ property("unquote long") = test {
+ val c: Long = 0
+ assert(q"$c" ≈ Literal(Constant(c)))
+ assert(q"${0: Long}" ≈ Literal(Constant(c)))
+ }
+
+ property("unquote float") = test {
+ val c: Float = 0.0f
+ assert(q"$c" ≈ Literal(Constant(c)))
+ assert(q"${0.0f: Float}" ≈ Literal(Constant(c)))
+ }
+
+ property("unquote double") = test {
+ val c: Double = 0.0
+ assert(q"$c" ≈ Literal(Constant(c)))
+ assert(q"${0.0: Double}" ≈ Literal(Constant(c)))
+ }
+
+ property("unquote boolean") = test {
+ val c: Boolean = false
+ assert(q"$c" ≈ Literal(Constant(c)))
+ assert(q"${true}" ≈ Literal(Constant(true)))
+ assert(q"${false}" ≈ Literal(Constant(false)))
+ }
+
+ property("unquote string") = test {
+ val c: String = "s"
+ assert(q"$c" ≈ Literal(Constant(c)))
+ assert(q"${"s"}" ≈ Literal(Constant(c)))
+ }
+
+ property("unquote unit") = test {
+ val c: Unit = ()
+ assert(q"$c" ≈ Literal(Constant(c)))
+ assert(q"${()}" ≈ Literal(Constant(c)))
+ }
+
+ property("lift symbol") = test {
+ val s = rootMirror.staticClass("scala.Int")
+ assert(q"$s" ≈ Ident(s))
+ }
+
+ property("lift type") = test {
+ val tpe = rootMirror.staticClass("scala.Int").toType
+ assert(q"$tpe" ≈ TypeTree(tpe))
+ }
+
+ property("lift type tag") = test {
+ val tag = TypeTag.Int
+ assert(q"$tag" ≈ TypeTree(tag.tpe))
+ }
+
+ property("lift weak type tag") = test {
+ val tag = WeakTypeTag.Int
+ assert(q"$tag" ≈ TypeTree(tag.tpe))
+ }
+
+ property("lift constant") = test {
+ val const = Constant(0)
+ assert(q"$const" ≈ q"0")
+ }
+
+ val immutable = q"$scalapkg.collection.immutable"
+
+ property("lift list variants") = test {
+ val lst = List(1, 2)
+ assert(q"$lst" ≈ q"$immutable.List(1, 2)")
+ assert(q"f(..$lst)" ≈ q"f(1, 2)")
+ val llst = List(List(1), List(2))
+ assert(q"f(..$llst)" ≈ q"f($immutable.List(1), $immutable.List(2))")
+ assert(q"f(...$llst)" ≈ q"f(1)(2)")
+ }
+
+ property("lift list of tree") = test {
+ val lst = List(q"a", q"b")
+ assert(q"$lst" ≈ q"$immutable.List(a, b)")
+ }
+
+ property("lift tuple") = test {
+ assert(q"${(1, 2)}" ≈ q"(1, 2)")
+ assert(q"${(1, 2, 3)}" ≈ q"(1, 2, 3)")
+ assert(q"${(1, 2, 3, 4)}" ≈ q"(1, 2, 3, 4)")
+ assert(q"${(1, 2, 3, 4, 5)}" ≈ q"(1, 2, 3, 4, 5)")
+ assert(q"${(1, 2, 3, 4, 5, 6)}" ≈ q"(1, 2, 3, 4, 5, 6)")
+ assert(q"${(1, 2, 3, 4, 5, 6, 7)}" ≈ q"(1, 2, 3, 4, 5, 6, 7)")
+ assert(q"${(1, 2, 3, 4, 5, 6, 7, 8)}" ≈ q"(1, 2, 3, 4, 5, 6, 7, 8)")
+ assert(q"${(1, 2, 3, 4, 5, 6, 7, 8, 9)}" ≈ q"(1, 2, 3, 4, 5, 6, 7, 8, 9)")
+ assert(q"${(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)}" ≈ q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)")
+ assert(q"${(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11)}" ≈ q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11)")
+ assert(q"${(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12)}" ≈ q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12)")
+ assert(q"${(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13)}" ≈ q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13)")
+ assert(q"${(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14)}" ≈ q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14)")
+ assert(q"${(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15)}" ≈ q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15)")
+ assert(q"${(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16)}" ≈ q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16)")
+ assert(q"${(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17)}" ≈ q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17)")
+ assert(q"${(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18)}" ≈ q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18)")
+ assert(q"${(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19)}" ≈ q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19)")
+ assert(q"${(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20)}" ≈ q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20)")
+ assert(q"${(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21)}" ≈ q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21)")
+ assert(q"${(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22)}" ≈ q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22)")
+ }
+
+ property("lift nil") = test {
+ val nil = Nil
+ assert(q"$nil" ≈ q"scala.collection.immutable.Nil")
+ }
+
+ property("lift some") = test {
+ val some1 = Some(1)
+ assert(q"$some1" ≈ q"scala.Some(1)")
+ val some2: Option[Int] = Some(1)
+ assert(q"$some2" ≈ q"scala.Some(1)")
+ }
+
+ property("lift none") = test {
+ val none1 = None
+ assert(q"$none1" ≈ q"scala.None")
+ val none2: Option[Int] = None
+ assert(q"$none2" ≈ q"scala.None")
+ }
+
+ property("lift left") = test {
+ val left1 = Left(1)
+ assert(q"$left1" ≈ q"scala.util.Left(1)")
+ val left2: Left[Int, Int] = Left(1)
+ assert(q"$left2" ≈ q"scala.util.Left(1)")
+ val left3: Either[Int, Int] = Left(1)
+ assert(q"$left3" ≈ q"scala.util.Left(1)")
+ }
+
+ property("lift right") = test {
+ val right1 = Right(1)
+ assert(q"$right1" ≈ q"scala.util.Right(1)")
+ val right2: Right[Int, Int] = Right(1)
+ assert(q"$right2" ≈ q"scala.util.Right(1)")
+ val right3: Either[Int, Int] = Right(1)
+ assert(q"$right3" ≈ q"scala.util.Right(1)")
+ }
+
+ property("lift xml comment") = test {
+ implicit val liftXmlComment = Liftable[xml.Comment] { comment =>
+ q"new _root_.scala.xml.Comment(${comment.commentText})"
+ }
+ assert(q"${xml.Comment("foo")}" ≈ q"<!--foo-->")
+ }
+}
diff --git a/test/scalacheck/scala/reflect/quasiquotes/PatternConstructionProps.scala b/test/scalacheck/scala/reflect/quasiquotes/PatternConstructionProps.scala
new file mode 100644
index 0000000000..e62a004adc
--- /dev/null
+++ b/test/scalacheck/scala/reflect/quasiquotes/PatternConstructionProps.scala
@@ -0,0 +1,38 @@
+package scala.reflect.quasiquotes
+
+import org.scalacheck._, Prop._, Gen._, Arbitrary._
+import scala.reflect.runtime.universe._, Flag._
+
+object PatternConstructionProps extends QuasiquoteProperties("pattern construction") {
+ property("unquote bind") = forAll { (bind: Bind) =>
+ pq"$bind" ≈ bind
+ }
+
+ property("unquote name into bind") = forAll { (name: TermName) =>
+ pq"$name" ≈ Bind(name, Ident(termNames.WILDCARD))
+ }
+
+ property("unquote name and tree into bind") = forAll { (name: TermName, tree: Tree) =>
+ pq"$name @ $tree" ≈ Bind(name, tree)
+ }
+
+ property("unquote type name into typed") = forAll { (name: TypeName) =>
+ pq"_ : $name" ≈ Typed(Ident(termNames.WILDCARD), Ident(name))
+ }
+
+ property("unquote tree into typed") = forAll { (typ: Tree) =>
+ pq"_ : $typ" ≈ Typed(Ident(termNames.WILDCARD), typ)
+ }
+
+ property("unquote into apply") = forAll { (pat: Tree, subpat: Tree) =>
+ pq"$pat($subpat)" ≈ Apply(pat, List(subpat))
+ }
+
+ property("unquote into casedef") = forAll { (pat: Tree, cond: Tree, body: Tree) =>
+ cq"$pat if $cond => $body" ≈ CaseDef(pat, cond, body)
+ }
+
+ property("unquote into alternative") = forAll { (first: Tree, rest: List[Tree]) =>
+ pq"$first | ..$rest" ≈ Alternative(first :: rest)
+ }
+}
diff --git a/test/scalacheck/scala/reflect/quasiquotes/PatternDeconstructionProps.scala b/test/scalacheck/scala/reflect/quasiquotes/PatternDeconstructionProps.scala
new file mode 100644
index 0000000000..182e905c04
--- /dev/null
+++ b/test/scalacheck/scala/reflect/quasiquotes/PatternDeconstructionProps.scala
@@ -0,0 +1,46 @@
+package scala.reflect.quasiquotes
+
+import org.scalacheck._, Prop._, Gen._, Arbitrary._
+import scala.reflect.runtime.universe._, Flag._
+
+object PatternDeconstructionProps extends QuasiquoteProperties("pattern deconstruction") {
+ property("extract bind") = forAll { (bind: Bind) =>
+ val pq"$bind0" = pq"$bind"
+ bind0 ≈ bind
+ }
+
+ property("extract bind and subpattern") = forAll { (name: TermName, subp: Tree) =>
+ val pq"$name0 @ $subp0" = pq"$name @ $subp"
+ name0 ≈ name && subp0 ≈ subp
+ }
+
+ property("extract typed") = forAll { (typ: Tree) =>
+ val pq"_ : $typ0" = pq"_ : $typ"
+ typ0 ≈ typ
+ }
+
+ property("extract apply") = forAll { (pat: Tree, subpat: Tree) =>
+ val pq"$pat0($subpat0)" = pq"$pat($subpat)"
+ pat0 ≈ pat && subpat0 ≈ subpat
+ }
+
+ property("extract apply many") = forAll { (pat: Tree, subpats: List[Tree]) =>
+ val pq"$pat0(..$subpats0)" = pq"$pat(..$subpats)"
+ pat0 ≈ pat && subpats0 ≈ subpats
+ }
+
+ property("extract apply last") = forAll { (pat: Tree, subpats: List[Tree], subpatlast: Tree) =>
+ val pq"$pat0(..$subpats0, $subpatlast0)" = pq"$pat(..$subpats, $subpatlast)"
+ pat0 ≈ pat && subpats0 ≈ subpats && subpatlast0 ≈ subpatlast
+ }
+
+ property("extract casedef") = forAll { (pat: Tree, cond: Tree, body: Tree) =>
+ val cq"$pat0 if $cond0 => $body0" = cq"$pat if $cond => $body"
+ pat0 ≈ pat && cond0 ≈ cond && body0 ≈ body
+ }
+
+ property("extract alternative") = forAll { (first: Tree, rest: List[Tree]) =>
+ val pq"$first1 | ..$rest1" = pq"$first | ..$rest"
+ first1 ≈ first && rest1 ≈ rest
+ }
+}
diff --git a/test/scalacheck/scala/reflect/quasiquotes/QuasiquoteProperties.scala b/test/scalacheck/scala/reflect/quasiquotes/QuasiquoteProperties.scala
new file mode 100644
index 0000000000..13e231891d
--- /dev/null
+++ b/test/scalacheck/scala/reflect/quasiquotes/QuasiquoteProperties.scala
@@ -0,0 +1,122 @@
+package scala.reflect.quasiquotes
+
+import org.scalacheck._, Prop._, Gen._, Arbitrary._
+import scala.tools.reflect.{ToolBox, ToolBoxError}
+import scala.reflect.runtime.currentMirror
+import scala.reflect.runtime.universe._, Flag._, internal.reificationSupport.setSymbol
+
+abstract class QuasiquoteProperties(name: String) extends Properties(name) with ArbitraryTreesAndNames with Helpers
+
+trait Helpers {
+ /** Runs a code block and returns proof confirmation
+ * if no exception has been thrown while executing code
+ * block. This is useful for simple one-off tests.
+ */
+ def test[T](block: => T) =
+ Prop { params =>
+ block
+ Result(Prop.Proof)
+ }
+
+ object simplify extends Transformer {
+ object SimplifiedName {
+ val st = scala.reflect.runtime.universe.asInstanceOf[scala.reflect.internal.SymbolTable]
+ val FreshName = new st.FreshNameExtractor
+ def unapply[T <: Name](name: T): Option[T] = name.asInstanceOf[st.Name] match {
+ case FreshName(prefix) =>
+ Some((if (name.isTermName) TermName(prefix) else TypeName(prefix)).asInstanceOf[T])
+ }
+ }
+
+ override def transform(tree: Tree): Tree = tree match {
+ case Ident(SimplifiedName(name)) => Ident(name)
+ case ValDef(mods, SimplifiedName(name), tpt, rhs) => ValDef(mods, name, transform(tpt), transform(rhs))
+ case Bind(SimplifiedName(name), rhs) => Bind(name, rhs)
+ case _ =>
+ super.transform(tree)
+ }
+
+ def apply(tree: Tree): Tree = transform(tree)
+ }
+
+ implicit class TestSimilarTree(tree1: Tree) {
+ def ≈(tree2: Tree) = simplify(tree1).equalsStructure(simplify(tree2))
+ }
+
+ implicit class TestSimilarListTree(lst: List[Tree]) {
+ def ≈(other: List[Tree]) = (lst.length == other.length) && lst.zip(other).forall { case (t1, t2) => t1 ≈ t2 }
+ }
+
+ implicit class TestSimilarListListTree(lst: List[List[Tree]]) {
+ def ≈(other: List[List[Tree]]) = (lst.length == other.length) && lst.zip(other).forall { case (l1, l2) => l1 ≈ l2 }
+ }
+
+ implicit class TestSimilarName(name: Name) {
+ def ≈(other: Name) = name == other
+ }
+
+ implicit class TestSimilarMods(mods: Modifiers) {
+ def ≈(other: Modifiers) = (mods.flags == other.flags) && (mods.privateWithin ≈ other.privateWithin) && (mods.annotations ≈ other.annotations)
+ }
+
+ def assertThrows[T <: AnyRef](f: => Any)(implicit manifest: Manifest[T]): Unit = {
+ val clazz = manifest.runtimeClass.asInstanceOf[Class[T]]
+ val thrown =
+ try {
+ f
+ false
+ } catch {
+ case u: Throwable =>
+ if (!clazz.isAssignableFrom(u.getClass))
+ assert(false, s"wrong exception: $u")
+ true
+ }
+ if(!thrown)
+ assert(false, "exception wasn't thrown")
+ }
+
+ def assertEqAst(tree: Tree, code: String) = assert(eqAst(tree, code))
+ def eqAst(tree: Tree, code: String) = tree ≈ parse(code)
+
+ val toolbox = currentMirror.mkToolBox()
+ val parse = toolbox.parse(_)
+ val compile = toolbox.compile(_)
+ val eval = toolbox.eval(_)
+
+ def typecheck(tree: Tree) = toolbox.typecheck(tree)
+
+ def typecheckTyp(tree: Tree) = {
+ val q"type $_ = $res" = typecheck(q"type T = $tree")
+ res
+ }
+
+ def typecheckPat(tree: Tree) = {
+ val q"$_ match { case $res => }" = typecheck(q"((): Any) match { case $tree => }")
+ res
+ }
+
+ def fails(msg: String, block: String) = {
+ def result(ok: Boolean, description: String = "") = {
+ val status = if (ok) Prop.Proof else Prop.False
+ val labels = if (description != "") Set(description) else Set.empty[String]
+ Prop { new Prop.Result(status, Nil, Set.empty, labels) }
+ }
+ try {
+ compile(parse(s"""
+ object Wrapper extends Helpers {
+ import scala.reflect.runtime.universe._
+ $block
+ }
+ """))
+ result(false, "given code doesn't fail to typecheck")
+ } catch {
+ case ToolBoxError(emsg, _) =>
+ if (!emsg.contains(msg))
+ result(false, s"error message '${emsg}' is not the same as expected '$msg'")
+ else
+ result(true)
+ }
+ }
+
+ val scalapkg = setSymbol(Ident(TermName("scala")), definitions.ScalaPackage)
+}
diff --git a/test/scalacheck/scala/reflect/quasiquotes/RuntimeErrorProps.scala b/test/scalacheck/scala/reflect/quasiquotes/RuntimeErrorProps.scala
new file mode 100644
index 0000000000..4e389f1560
--- /dev/null
+++ b/test/scalacheck/scala/reflect/quasiquotes/RuntimeErrorProps.scala
@@ -0,0 +1,77 @@
+package scala.reflect.quasiquotes
+
+import org.scalacheck._, Prop._, Gen._, Arbitrary._
+import scala.reflect.runtime.universe._, Flag._
+
+object RuntimeErrorProps extends QuasiquoteProperties("errors") {
+ def testFails[T](block: =>T) = test {
+ assertThrows[IllegalArgumentException] {
+ block
+ }
+ }
+
+ property("default param anon function") = testFails {
+ val param = q"val x: Int = 1"
+ q"{ $param => x + 1 }"
+ }
+
+ property("non-casedef case") = testFails {
+ val x = q"x"
+ q"foo match { case $x }"
+ }
+
+ property("non-new annotation") = testFails {
+ val annot = q"foo"
+ q"@$annot def foo"
+ }
+
+ property("non-valdef param") = testFails {
+ val param = q"foo"
+ q"def foo($param)"
+ }
+
+ property("non-valdef class param") = testFails {
+ val param = q"foo"
+ q"class Foo($param)"
+ }
+
+ property("non-typedef type param") = testFails {
+ val tparam = tq"T"
+ q"class C[$tparam]"
+ }
+
+ property("non-definition refine stat") = testFails {
+ val stat = q"foo"
+ tq"Foo { $stat }"
+ }
+
+ property("non-definition early def") = testFails {
+ val stat = q"foo"
+ q"class Foo extends { $stat } with Bar"
+ }
+
+ property("type apply for definition") = testFails {
+ val defn = q"def foo"
+ q"$defn[foo]"
+ }
+
+ property("non-val selftype") = testFails {
+ val foo = q"foo"
+ q"class Foo { $foo => }"
+ }
+
+ property("for empty enums") = testFails {
+ val enums = List.empty[Tree]
+ q"for(..$enums) 0"
+ }
+
+ property("for starts with non-from enum") = testFails {
+ val enums = fq"foo = bar" :: Nil
+ q"for(..$enums) 0"
+ }
+
+ property("for invalid enum") = testFails {
+ val enums = q"foo" :: Nil
+ q"for(..$enums) 0"
+ }
+}
diff --git a/test/scalacheck/scala/reflect/quasiquotes/TermConstructionProps.scala b/test/scalacheck/scala/reflect/quasiquotes/TermConstructionProps.scala
new file mode 100644
index 0000000000..61faaefe51
--- /dev/null
+++ b/test/scalacheck/scala/reflect/quasiquotes/TermConstructionProps.scala
@@ -0,0 +1,327 @@
+package scala.reflect.quasiquotes
+
+import org.scalacheck._, Prop._, Gen._, Arbitrary._
+import scala.reflect.runtime.universe._, Flag._
+
+object TermConstructionProps extends QuasiquoteProperties("term construction") {
+ property("unquote single tree return tree itself") = forAll { (t: Tree) =>
+ q"$t" ≈ t
+ }
+
+ property("unquote trees into if expression") = forAll { (t1: Tree, t2: Tree, t3: Tree) =>
+ q"if($t1) $t2 else $t3" ≈ If(t1, t2, t3)
+ }
+
+ property("unquote trees into ascriptiopn") = forAll { (t1: Tree, t2: Tree) =>
+ q"$t1 : $t2" ≈ Typed(t1, t2)
+ }
+
+ property("unquote trees into apply") = forAll { (t1: Tree, t2: Tree, t3: Tree) =>
+ q"$t1($t2, $t3)" ≈ Apply(t1, List(t2, t3))
+ }
+
+ property("unquote trees with .. rank into apply") = forAll { (ts: List[Tree]) =>
+ q"f(..$ts)" ≈ Apply(q"f", ts)
+ }
+
+ property("unquote iterable into apply") = forAll { (trees: List[Tree]) =>
+ val itrees: Iterable[Tree] = trees
+ q"f(..$itrees)" ≈ Apply(q"f", trees)
+ }
+
+ property("unquote trees with ... rank into apply") = forAll { (ts1: List[Tree], ts2: List[Tree]) =>
+ val argss = List(ts1, ts2)
+ q"f(...$argss)" ≈ Apply(Apply(q"f", ts1), ts2)
+ }
+
+ property("unquote term name into assign") = forAll { (name: TermName, t: Tree) =>
+ q"$name = $t" ≈ Assign(Ident(name), t)
+ }
+
+ property("unquote trees into block") = forAll { (t1: Tree, t2: Tree, t3: Tree) =>
+ blockInvariant(q"""{
+ $t1
+ $t2
+ $t3
+ }""", List(t1, t2, t3))
+ }
+
+
+ property("unquote tree into new") = forAll { (tree: Tree) =>
+ q"new $tree" ≈ Apply(Select(New(tree), termNames.CONSTRUCTOR), List())
+ }
+
+ property("unquote tree into return") = forAll { (tree: Tree) =>
+ q"return $tree" ≈ Return(tree)
+ }
+
+ property("unquote a list of arguments") = forAll { (fun: Tree, args: List[Tree]) =>
+ q"$fun(..$args)" ≈ Apply(fun, args)
+ }
+
+ property("unquote list and non-list fun arguments") = forAll { (fun: Tree, arg1: Tree, arg2: Tree, args: List[Tree]) =>
+ q"$fun(..$args, $arg1, $arg2)" ≈ Apply(fun, args ++ List(arg1) ++ List(arg2)) &&
+ q"$fun($arg1, ..$args, $arg2)" ≈ Apply(fun, List(arg1) ++ args ++ List(arg2)) &&
+ q"$fun($arg1, $arg2, ..$args)" ≈ Apply(fun, List(arg1) ++ List(arg2) ++ args)
+ }
+
+ property("unquote into new") = forAll { (name: TypeName, body: List[Tree]) =>
+ q"new $name { ..$body }" ≈
+ q"""{
+ final class $$anon extends $name {
+ ..$body
+ }
+ new $$anon
+ }"""
+ }
+
+ property("unquote type name into this") = forAll { (T: TypeName) =>
+ q"$T.this" ≈ This(T)
+ }
+
+ property("unquote tree into throw") = forAll { (t: Tree) =>
+ q"throw $t" ≈ Throw(t)
+ }
+
+ property("unquote trees into type apply") = forAll { (fun: TreeIsTerm, types: List[Tree]) =>
+ q"$fun[..$types]" ≈ (if (types.nonEmpty) TypeApply(fun, types) else fun)
+ }
+
+ property("unquote trees into while loop") = forAll { (cond: Tree, body: Tree) =>
+ val LabelDef(_, List(), If(cond1, Block(List(body1), Apply(_, List())), Literal(Constant(())))) = q"while($cond) $body"
+ body1 ≈ body && cond1 ≈ cond
+ }
+
+ property("unquote trees into do while loop") = forAll { (cond: Tree, body: Tree) =>
+ val LabelDef(_, List(), Block(List(body1), If(cond1, Apply(_, List()), Literal(Constant(()))))) = q"do $body while($cond)"
+ body1 ≈ body && cond1 ≈ cond
+ }
+
+ def blockInvariant(quote: Tree, trees: List[Tree]) =
+ quote ≈ (trees match {
+ case Nil => q"{}"
+ case _ :+ last if !last.isTerm => Block(trees, q"()")
+ case head :: Nil => head
+ case init :+ last => Block(init, last)
+ })
+
+ property("unquote list of trees into block (1)") = forAll { (trees: List[Tree]) =>
+ blockInvariant(q"{ ..$trees }", trees)
+ }
+
+ property("unquote list of trees into block (2)") = forAll { (trees1: List[Tree], trees2: List[Tree]) =>
+ blockInvariant(q"{ ..$trees1 ; ..$trees2 }", trees1 ++ trees2)
+ }
+
+ property("unquote list of trees into block (3)") = forAll { (trees: List[Tree], tree: Tree) =>
+ blockInvariant(q"{ ..$trees; $tree }", trees :+ tree)
+ }
+
+ property("unquote term into brackets") = test {
+ val a = q"a"
+ assert(q"($a)" ≈ a)
+ }
+
+ property("unquote terms into tuple") = test {
+ val a1 = q"a1"
+ val a2 = q"a2"
+ val as = List(a1, a2)
+ assert(q"(..$as)" ≈ q"scala.Tuple2($a1, $a2)")
+ assert(q"(a0, ..$as)" ≈ q"scala.Tuple3(a0, $a1, $a2)")
+ }
+
+ property("unquote empty list into tuple") = test {
+ val empty = List[Tree]()
+ assert(q"(..$empty)" ≈ q"()")
+ }
+
+ property("unquote single element list into tuple") = test {
+ val xs = q"x" :: Nil
+ assert(q"(..$xs)" ≈ xs.head)
+ }
+
+ property("function param flags are the same") = test {
+ val xy = q"val x: A" :: q"val y: B" :: Nil
+ assertEqAst(q"(..$xy) => x + y", "(x: A, y: B) => x + y")
+ }
+
+ property("anonymous functions don't support default values") = test {
+ val x = q"val x: Int = 1"
+ assertThrows[IllegalArgumentException] { q"($x) => x" }
+ }
+
+ property("assign variable") = test {
+ val v = q"v"
+ val value = q"foo"
+ assertEqAst(q"$v = $value", "v = foo")
+ }
+
+ property("assign update 1") = test {
+ val v = q"v"
+ val args = q"1" :: q"2" :: Nil
+ val value = q"foo"
+ assertEqAst(q"$v(..$args) = $value", "v(1, 2) = foo")
+ }
+
+ property("assign update 2") = test {
+ val a = q"v(0)"
+ val value = q"foo"
+ assertEqAst(q"$a = $value", "v(0) = foo")
+ }
+
+ property("assign or named arg") = test {
+ val assignx = q"x = 1"
+ assertEqAst(q"f($assignx)", "f(x = 1)")
+ }
+
+ property("fresh names are regenerated at each evaluation") = test {
+ def plusOne = q"{ _ + 1 }"
+ assert(!plusOne.equalsStructure(plusOne))
+ def whileTrue = q"while(true) false"
+ assert(!whileTrue.equalsStructure(whileTrue))
+ def withEvidence = q"def foo[T: X]"
+ assert(!withEvidence.equalsStructure(withEvidence))
+ }
+
+ property("make sure inference doesn't infer any") = test {
+ val l1 = List(q"foo")
+ val l2 = List(q"bar")
+ val baz = q"baz"
+ assert(q"f(..${l1 ++ l2})" ≈ q"f(foo, bar)")
+ assert(q"f(..${l1 ++ l2}, $baz)" ≈ q"f(foo, bar, baz)")
+ assert(q"f(${if (true) q"a" else q"b"})" ≈ q"f(a)")
+ }
+
+ property("unquote iterable of non-parametric type") = test {
+ object O extends Iterable[Tree] { def iterator = List(q"foo").iterator }
+ q"f(..$O)"
+ }
+
+ property("SI-8016") = test {
+ val xs = q"1" :: q"2" :: Nil
+ assertEqAst(q"..$xs", "{1; 2}")
+ assertEqAst(q"{..$xs}", "{1; 2}")
+ }
+
+ property("SI-6842") = test {
+ val cases: List[Tree] = cq"a => b" :: cq"_ => c" :: Nil
+ assertEqAst(q"1 match { case ..$cases }", "1 match { case a => b case _ => c }")
+ assertEqAst(q"try 1 catch { case ..$cases }", "try 1 catch { case a => b case _ => c }")
+ }
+
+ property("SI-8009") = test {
+ q"`foo`".asInstanceOf[reflect.internal.SymbolTable#Ident].isBackquoted
+ }
+
+ property("SI-8148") = test {
+ val q"($a, $b) => $_" = q"_ + _"
+ assert(a.name != b.name)
+ }
+
+ property("SI-7275 a") = test {
+ val t = q"stat1; stat2"
+ assertEqAst(q"..$t", "{stat1; stat2}")
+ }
+
+ property("SI-7275 b") = test {
+ def f(t: Tree) = q"..$t"
+ assertEqAst(f(q"stat1; stat2"), "{stat1; stat2}")
+ }
+
+ property("SI-7275 c1") = test {
+ object O
+ implicit val liftO = Liftable[O.type] { _ => q"foo; bar" }
+ assertEqAst(q"f(..$O)", "f(foo, bar)")
+ }
+
+ property("SI-7275 c2") = test {
+ object O
+ implicit val liftO = Liftable[O.type] { _ => q"{ foo; bar }; { baz; bax }" }
+ assertEqAst(q"f(...$O)", "f(foo, bar)(baz, bax)")
+ }
+
+ property("SI-7275 d") = test {
+ val l = q"a; b" :: q"c; d" :: Nil
+ assertEqAst(q"f(...$l)", "f(a, b)(c, d)")
+ val l2: Iterable[Tree] = l
+ assertEqAst(q"f(...$l2)", "f(a, b)(c, d)")
+ }
+
+ property("SI-7275 e") = test {
+ val t = q"{ a; b }; { c; d }"
+ assertEqAst(q"f(...$t)", "f(a, b)(c, d)")
+ }
+
+ property("SI-7275 e2") = test {
+ val t = q"{ a; b }; c; d"
+ assertEqAst(q"f(...$t)", "f(a, b)(c)(d)")
+ }
+
+ property("remove synthetic unit") = test {
+ val q"{ ..$stats1 }" = q"{ def x = 2 }"
+ assert(stats1 ≈ List(q"def x = 2"))
+ val q"{ ..$stats2 }" = q"{ class X }"
+ assert(stats2 ≈ List(q"class X"))
+ val q"{ ..$stats3 }" = q"{ type X = Int }"
+ assert(stats3 ≈ List(q"type X = Int"))
+ val q"{ ..$stats4 }" = q"{ val x = 2 }"
+ assert(stats4 ≈ List(q"val x = 2"))
+ }
+
+ property("don't remove user-defined unit") = test {
+ val q"{ ..$stats }" = q"{ def x = 2; () }"
+ assert(stats ≈ List(q"def x = 2", q"()"))
+ }
+
+ property("empty-tree is not a block") = test {
+ assertThrows[MatchError] {
+ val q"{ ..$stats1 }" = q" "
+ }
+ }
+
+ property("empty block is synthetic unit") = test {
+ val q"()" = q"{}"
+ val q"{..$stats}" = q"{}"
+ assert(stats.isEmpty)
+ assertEqAst(q"{..$stats}", "{}")
+ assertEqAst(q"{..$stats}", "()")
+ }
+
+ property("consistent variable order") = test {
+ val q"$a = $b = $c = $d = $e = $f = $g = $h = $k = $l" = q"a = b = c = d = e = f = g = h = k = l"
+ assert(a ≈ q"a" && b ≈ q"b" && c ≈ q"c" && d ≈ q"d" && e ≈ q"e" && g ≈ q"g" && h ≈ q"h" && k ≈ q"k" && l ≈ q"l")
+ }
+
+ property("SI-8385 a") = test {
+ assertEqAst(q"(foo.x = 1)(2)", "(foo.x = 1)(2)")
+ }
+
+ property("SI-8385 b") = test {
+ assertEqAst(q"(() => ())()", "(() => ())()")
+ }
+
+ property("match scrutinee may not be empty") = test {
+ assertThrows[IllegalArgumentException] {
+ val scrutinee = q""
+ val cases = List(cq"_ =>")
+ q"$scrutinee match { case ..$cases }"
+ }
+ }
+
+ property("construct partial function") = test {
+ val cases = List(cq"a => b", cq"c => d")
+ assertEqAst(q"{ case ..$cases }", "{ case a => b case c => d }")
+ }
+
+ property("SI-8609 a") = test {
+ val q1 = q"val x = 1"
+ val q2 = q"..$q1; val y = 2"
+ assert(q2 ≈ q"{ val x = 1; val y = 2 }")
+ }
+
+ property("SI-8609 b") = test {
+ val q1 = q"import foo.bar"
+ val q2 = q"..$q1; val y = 2"
+ assert(q2 ≈ q"{ import foo.bar; val y = 2 }")
+ }
+}
diff --git a/test/scalacheck/scala/reflect/quasiquotes/TermDeconstructionProps.scala b/test/scalacheck/scala/reflect/quasiquotes/TermDeconstructionProps.scala
new file mode 100644
index 0000000000..73bfba41bc
--- /dev/null
+++ b/test/scalacheck/scala/reflect/quasiquotes/TermDeconstructionProps.scala
@@ -0,0 +1,258 @@
+package scala.reflect.quasiquotes
+
+import org.scalacheck._, Prop._, Gen._, Arbitrary._
+import scala.reflect.runtime.universe._, Flag._
+
+object TermDeconstructionProps extends QuasiquoteProperties("term deconstruction") {
+ property("f(..x) = f") = test {
+ // see SI-8008
+ assertThrows[MatchError] {
+ val q"f(..$args)" = q"f"
+ }
+ }
+
+ property("f(x)") = forAll { (x: Tree) =>
+ val q"f($x1)" = q"f($x)"
+ x1 ≈ x
+ }
+
+ property("f(..xs)") = forAll { (x1: Tree, x2: Tree) =>
+ val q"f(..$xs)" = q"f($x1, $x2)"
+ xs ≈ List(x1, x2)
+ }
+
+ property("f(y, ..ys)") = forAll { (x1: Tree, x2: Tree, x3: Tree) =>
+ val q"f($y, ..$ys)" = q"f($x1, $x2, $x3)"
+ y ≈ x1 && ys ≈ List(x2, x3)
+ }
+
+ property("f(y1, y2, ..ys)") = forAll { (x1: Tree, x2: Tree, x3: Tree) =>
+ val q"f($y1, $y2, ..$ys)" = q"f($x1, $x2, $x3)"
+ y1 ≈ x1 && y2 ≈ x2 && ys ≈ List(x3)
+ }
+
+ property("f(y1, ..ys, yn)") = forAll { (x1: Tree, x2: Tree, x3: Tree, x4: Tree) =>
+ val q"f($y1, ..$ys, $yn)" = q"f($x1, $x2, $x3, $x4)"
+ y1 ≈ x1 && ys ≈ List(x2, x3) && yn ≈ x4
+ }
+
+ property("f(..ys, y_{n-1}, y_n)") = forAll { (x1: Tree, x2: Tree, x3: Tree, x4: Tree) =>
+ val q"f(..$ys, $yn1, $yn)" = q"f($x1, $x2, $x3, $x4)"
+ ys ≈ List(x1, x2) && yn1 ≈ x3 && yn ≈ x4
+ }
+
+ property("f(...xss)") = forAll { (x1: Tree, x2: Tree) =>
+ val q"f(...$xss)" = q"f($x1)($x2)"
+ xss ≈ List(List(x1), List(x2))
+ }
+
+ property("f(...$xss)(..$last)") = forAll { (x1: Tree, x2: Tree, x3: Tree) =>
+ val q"f(...$xss)(..$last)" = q"f($x1)($x2)($x3)"
+ xss ≈ List(List(x1), List(x2)) && last ≈ List(x3)
+ }
+
+ property("f(...$xss)(..$lastinit, $lastlast)") = forAll { (x1: Tree, x2: Tree, x3: Tree, x4: Tree) =>
+ val q"f(...$xss)(..$lastinit, $lastlast)" = q"f($x1)($x2, $x3, $x4)"
+ xss ≈ List(List(x1)) && lastinit ≈ List(x2, x3) && lastlast ≈ x4
+ }
+
+ property("f(...xss) = f") = forAll { (x1: Tree, x2: Tree) =>
+ val q"f(...$xss)" = q"f"
+ xss ≈ List()
+ }
+
+ property("deconstruct unit as tuple") = test {
+ val q"(..$xs)" = q"()"
+ assert(xs.isEmpty)
+ }
+
+ property("deconstruct tuple") = test {
+ val q"(..$xs)" = q"(a, b)"
+ assert(xs ≈ List(q"a", q"b"))
+ }
+
+ property("deconstruct tuple mixed") = test {
+ val q"($first, ..$rest)" = q"(a, b, c)"
+ assert(first ≈ q"a")
+ assert(rest ≈ List(q"b", q"c"))
+ }
+
+ property("deconstruct tuple last element") = test {
+ val q"($first, ..$rest, $last)" = q"(a, b, c, d)"
+ assert(first ≈ q"a")
+ assert(rest ≈ List(q"b", q"c"))
+ assert(last ≈ q"d")
+ }
+
+ property("deconstruct expr as tuple") = test {
+ val q"(..$elems)" = q"foo"
+ assert(elems ≈ List(q"foo"))
+ }
+
+ property("deconstruct cases") = test {
+ val q"$x match { case ..$cases }" = q"x match { case 1 => case 2 => }"
+ assert(x ≈ q"x")
+ assert(cases ≈ List(cq"1 =>", cq"2 =>"))
+ }
+
+ property("deconstruct splitting last case") = test {
+ val q"$_ match { case ..$cases case $last }" = q"x match { case 1 => case 2 => case 3 => }"
+ assert(cases ≈ List(cq"1 =>", cq"2 =>"))
+ assert(last ≈ cq"3 =>")
+ }
+
+ property("deconstruct block") = test {
+ val q"{ ..$xs }" = q"{ x1; x2; x3 }"
+ assert(xs ≈ List(q"x1", q"x2", q"x3"))
+ }
+
+ property("deconstruct last element of a block") = test {
+ val q"{ ..$xs; $x }" = q"x1; x2; x3; x4"
+ assert(xs ≈ List(q"x1", q"x2", q"x3"))
+ assert(x ≈ q"x4")
+ }
+
+ property("exhaustive function matcher") = test {
+ def matches(line: String) { val q"(..$args) => $body" = parse(line) }
+ matches("() => bippy")
+ matches("(y: Y) => y oh y")
+ matches("(x: X, y: Y) => x and y")
+ }
+
+ property("exhaustive new pattern") = test {
+ def matches(line: String) {
+ val q"new { ..$early } with $name[..$targs](...$vargss) with ..$mixin { $self => ..$body }" = parse(line)
+ }
+ matches("new foo")
+ matches("new foo { body }")
+ matches("new foo[t]")
+ matches("new foo(x)")
+ matches("new foo[t](x)")
+ matches("new foo[t](x) { body }")
+ matches("new foo with bar")
+ matches("new foo with bar { body }")
+ matches("new { anonymous }")
+ matches("new { val early = 1 } with Parent[Int] { body }")
+ matches("new Foo { selfie => }")
+ }
+
+ property("exhaustive assign pattern") = test {
+ def matches(tree: Tree) { val q"$rhs = $lhs" = tree }
+ matches(parse("left = right"))
+ matches(parse("arr(1) = 2"))
+ matches(AssignOrNamedArg(EmptyTree, EmptyTree))
+ }
+
+ property("deconstruct update 1") = test {
+ val q"$obj(..$args) = $value" = q"foo(bar) = baz"
+ assert(obj ≈ q"foo")
+ assert(args ≈ List(q"bar"))
+ assert(value ≈ q"baz")
+ }
+
+ property("deconstruct update 2") = test {
+ val q"$left = $value" = q"foo(bar) = baz"
+ assert(left ≈ q"foo(bar)")
+ assert(value ≈ q"baz")
+ }
+
+ property("deconstruct while loop") = test {
+ val q"while($cond) $body" = parse("while(cond) body")
+ assert(cond ≈ q"cond")
+ assert(body ≈ q"body")
+ }
+
+ property("deconstruct do while loop") = test {
+ val q"do $body while($cond)" = parse("do body while(cond)")
+ assert(cond ≈ q"cond")
+ assert(body ≈ q"body")
+ }
+
+ property("deconstruct anonymous function with placeholders") = test {
+ val q"{ $f(_) }" = q"{ foo(_) }"
+ assert(f ≈ q"foo")
+ val q"{ _.$member }" = q"{ _.foo }"
+ assert(member ≈ TermName("foo"))
+ val q"{ _ + $x }" = q"{ _ + x }"
+ assert(x ≈ q"x")
+ val q"{ _ * _ }" = q"{ _ * _ }"
+ }
+
+ property("si-8275 a") = test {
+ val cq"_ => ..$stats" = cq"_ => foo; bar"
+ assert(stats ≈ List(q"foo", q"bar"))
+ }
+
+ property("si-8275 b") = test {
+ val cq"_ => ..$init; $last" = cq"_ => a; b; c"
+ assert(init ≈ List(q"a", q"b"))
+ assert(last ≈ q"c")
+ }
+
+ property("si-8275 c") = test {
+ val cq"_ => ..$stats" = cq"_ =>"
+ assert(stats.isEmpty)
+ assertEqAst(q"{ case _ => ..$stats }", "{ case _ => }")
+ }
+
+ property("can't flatten type into block") = test {
+ assertThrows[IllegalArgumentException] {
+ val tpt = tq"List[Int]"
+ q"..$tpt; ()"
+ }
+ }
+
+ property("term select doesn't match type select") = test {
+ assertThrows[MatchError] {
+ val q"$qual.$name" = tq"foo.bar"
+ }
+ }
+
+ property("type application doesn't match applied type") = test {
+ assertThrows[MatchError] {
+ val q"$f[..$targs]" = tq"foo[bar]"
+ }
+ }
+
+ property("match doesn't match partial function") = test {
+ assertThrows[MatchError] {
+ val q"$_ match { case ..$_ }" = q"{ case _ => }"
+ }
+ }
+
+ property("deconstruct partial function") = test {
+ val q"{ case ..$cases }" = q"{ case a => b case c => d }"
+ val List(cq"a => b", cq"c => d") = cases
+ }
+
+ property("SI-8350 `new C` and `new C()` are equivalent") = test {
+ val q"new C" = q"new C()"
+ val q"new C()" = q"new C"
+ }
+
+ property("SI-8350 new applications extracted only for non-empty ctor calls") = test{
+ val q"new $c1" = q"new C()"
+ assert(c1 ≈ tq"C")
+ val q"new $c2" = q"new C(x)"
+ assert(c2 ≈ q"${tq"C"}(x)")
+ }
+
+ property("SI-8350 original test case") = test {
+ val q"new ..$parents" = q"new Foo with Bar"
+ assert(parents ≈ List(tq"Foo", tq"Bar"))
+ }
+
+ property("SI-8387 new is not an application") = test {
+ val `new` = q"new F(x)"
+ val q"$f(...$argss)" = `new`
+ assert(f ≈ `new`)
+ assert(argss.isEmpty)
+ }
+
+ property("SI-8703 extract block with single expression") = test {
+ val q"{ $a }" = Block(Nil, q"1")
+ val Literal(Constant(1)) = a
+ val q"{ $b }" = q"2"
+ val Literal(Constant(2)) = b
+ }
+}
diff --git a/test/scalacheck/scala/reflect/quasiquotes/TypeConstructionProps.scala b/test/scalacheck/scala/reflect/quasiquotes/TypeConstructionProps.scala
new file mode 100644
index 0000000000..c96018b317
--- /dev/null
+++ b/test/scalacheck/scala/reflect/quasiquotes/TypeConstructionProps.scala
@@ -0,0 +1,44 @@
+package scala.reflect.quasiquotes
+
+import org.scalacheck._, Prop._, Gen._, Arbitrary._
+import scala.reflect.runtime.universe._, Flag._, internal.reificationSupport.ScalaDot
+
+object TypeConstructionProps extends QuasiquoteProperties("type construction") {
+ property("bare idents contain type names") = test {
+ tq"x" ≈ Ident(TypeName("x"))
+ }
+
+ property("unquote type names into AppliedTypeTree") = forAll { (name1: TypeName, name2: TypeName) =>
+ tq"$name1[$name2]" ≈ AppliedTypeTree(Ident(name1), List(Ident(name2)))
+ }
+
+ property("tuple type") = test {
+ val empty = List[Tree]()
+ val ts = List(tq"t1", tq"t2")
+ assert(tq"(..$empty)" ≈ ScalaDot(TypeName("Unit")))
+ assert(tq"(..$ts)" ≈ tq"scala.Tuple2[t1, t2]")
+ assert(tq"(t0, ..$ts)" ≈ tq"scala.Tuple3[t0, t1, t2]")
+ }
+
+ property("single-element tuple type") = test {
+ val ts = q"T" :: Nil
+ assert(tq"(..$ts)" ≈ ts.head)
+ }
+
+ property("refined type") = test {
+ val stats = q"def foo" :: q"val x: Int" :: q"type Y = String" :: Nil
+ assert(tq"T { ..$stats }" ≈ tq"T { def foo; val x: Int; type Y = String }")
+ }
+
+ property("function type") = test {
+ val argtpes = tq"A" :: tq"B" :: Nil
+ val restpe = tq"C"
+ assert(tq"..$argtpes => $restpe" ≈ tq"(A, B) => C")
+ }
+
+ property("empty tq") = test {
+ val tt: TypeTree = tq""
+ assert(tt.tpe == null)
+ assert(tt.original == null)
+ }
+}
diff --git a/test/scalacheck/scala/reflect/quasiquotes/TypeDeconstructionProps.scala b/test/scalacheck/scala/reflect/quasiquotes/TypeDeconstructionProps.scala
new file mode 100644
index 0000000000..fc8554d61f
--- /dev/null
+++ b/test/scalacheck/scala/reflect/quasiquotes/TypeDeconstructionProps.scala
@@ -0,0 +1,80 @@
+package scala.reflect.quasiquotes
+
+import org.scalacheck._, Prop._, Gen._, Arbitrary._
+import scala.reflect.runtime.universe._, Flag._
+
+object TypeDeconstructionProps extends QuasiquoteProperties("type deconstruction") {
+ property("ident(type name)") = forAll { (name: TypeName) =>
+ val t = Ident(name)
+ val tq"$t1" = t
+ t1 ≈ t
+ }
+
+ property("applied type tree") = forAll { (name1: TypeName, name2: TypeName) =>
+ val tq"$a[$b]" = AppliedTypeTree(Ident(name1), List(Ident(name2)))
+ a ≈ Ident(name1) && b ≈ Ident(name2)
+ }
+
+ property("tuple type (1)") = test {
+ val tq"(..$empty)" = tq"_root_.scala.Unit"
+ assert(empty.isEmpty)
+ }
+
+ property("tuple type (2)") = test {
+ val tq"(..$ts)" = tq"(t1, t2)"
+ assert(ts ≈ List(tq"t1", tq"t2"))
+ }
+
+ property("tuple type (3)") = test {
+ val tq"($head, ..$tail)" = tq"(t0, t1, t2)"
+ assert(head ≈ tq"t0")
+ assert(tail ≈ List(tq"t1", tq"t2"))
+ }
+
+ property("tuple type (4)") = test {
+ val tq"(..$init, $last)" = tq"(t0, t1, t2)"
+ assert(init ≈ List(tq"t0", tq"t1"))
+ assert(last ≈ tq"t2")
+ }
+
+ property("tuple type (5)") = test {
+ val tq"(..$ts)" = tq"T"
+ assert(ts ≈ List(tq"T"))
+ }
+
+ property("refined type") = test {
+ val tq"T { ..$stats }" = tq"T { def foo; val x: Int; type Y = String }"
+ assert(stats ≈ List(q"def foo", q"val x: Int", q"type Y = String"))
+ }
+
+ property("function type (1)") = test {
+ val tq"..$argtpes => $restpe" = tq"(A, B) => C"
+ assert(argtpes ≈ List(tq"A", tq"B"))
+ assert(restpe ≈ tq"C")
+ }
+
+ property("function type (2)") = test {
+ val tq"(..$argtpes, $arglast) => $restpe" = tq"(A, B, C) => D"
+ assert(argtpes ≈ List(tq"A", tq"B"))
+ assert(arglast ≈ tq"C")
+ assert(restpe ≈ tq"D")
+ }
+
+ property("match empty type tree") = test {
+ val tq"" = TypeTree()
+ // matches because type tree isn't syntactic without original
+ val tq"" = tq"${typeOf[Int]}"
+ }
+
+ property("type select doesn't match term select") = test {
+ assertThrows[MatchError] {
+ val tq"$qual.$name" = q"foo.bar"
+ }
+ }
+
+ property("applied type doesn't match type appliction") = test {
+ assertThrows[MatchError] {
+ val tq"$tpt[..$tpts]" = q"foo[bar]"
+ }
+ }
+}
diff --git a/test/scalacheck/scala/reflect/quasiquotes/TypecheckedProps.scala b/test/scalacheck/scala/reflect/quasiquotes/TypecheckedProps.scala
new file mode 100644
index 0000000000..4646388c86
--- /dev/null
+++ b/test/scalacheck/scala/reflect/quasiquotes/TypecheckedProps.scala
@@ -0,0 +1,217 @@
+package scala.reflect.quasiquotes
+
+import org.scalacheck._, Prop._, Gen._, Arbitrary._
+import scala.reflect.runtime.universe._, Flag._, internal.reificationSupport._
+
+object TypecheckedProps extends QuasiquoteProperties("typechecked")
+ with TypecheckedTypes {
+ property("tuple term") = test {
+ val q"(..$elements)" = typecheck(q"(1, 2)")
+ assert(elements ≈ List(q"1", q"2"))
+ }
+
+ property("for/for-yield") = test {
+ val enums = fq"x <- xs" :: fq"x1 = x + 1" :: fq"if x1 % 2 == 0" :: Nil
+ val body = q"x1"
+ val xs = q"val xs = List(1, 2, 3)"
+ val q"$_; for(..$enums0) yield $body0" = typecheck(q"$xs; for(..$enums) yield $body")
+ assert(enums0 ≈ enums)
+ assert(body0 ≈ body)
+ val q"$_; for(..$enums1) $body1" = typecheck(q"$xs; for(..$enums) $body")
+ assert(enums1 ≈ enums)
+ assert(body1 ≈ body)
+ }
+
+ property("for .filter instead of .withFilter") = test {
+ val enums = fq"foo <- new Foo" :: fq"if foo != null" :: Nil
+ val body = q"foo"
+ val q"$_; for(..$enums1) yield $body1" = typecheck(q"""
+ class Foo { def map(f: Any => Any) = this; def withFilter(cond: Any => Boolean) = this }
+ for(..$enums) yield $body
+ """)
+ assert(enums1 ≈ enums)
+ assert(body1 ≈ body)
+ }
+
+ property("extract UnApply (1)") = test {
+ val q"object $_ { $_; $_; $m }" = typecheck(q"""
+ object Test {
+ class Cell(val x: Int)
+ object Cell { def unapply(c: Cell) = Some(c.x) }
+ new Cell(0) match { case Cell(v) => v }
+ }
+ """)
+ val q"$_ match { case $f(..$args) => $_ }" = m
+ assert(f ≈ pq"Test.this.Cell")
+ assert(args ≈ List(pq"v"))
+ }
+
+ property("extract UnApply (2)") = test {
+ val q"object $_ { $_; $m }" = typecheck(q"""
+ object Test {
+ case class Cell(val x: Int)
+ new Cell(0) match { case Cell(v) => v }
+ }
+ """)
+ val q"$_ match { case ${f: TypeTree}(..$args) => $_ }" = m
+ assert(f.original ≈ pq"Test.this.Cell")
+ assert(args ≈ List(pq"v"))
+ }
+
+ property("extract inferred val type") = test {
+ val typechecked = typecheck(q"val x = 42")
+ val q"val x = 42" = typechecked
+ val q"val x: ${tq""} = 42" = typechecked
+ val q"val x: ${t: Type} = 42" = typechecked
+ }
+
+ property("class with param (1)") = test {
+ val paramName = TermName("x")
+ val q"class $_($param)" = typecheck(q"class Test(val $paramName: Int)")
+
+ assert(param.name == paramName)
+ }
+
+ property("class with param (2)") = test {
+ val paramName = TermName("y")
+ val q"{class $_($param)}" = typecheck(q"class Test(val $paramName: Int = 3)")
+
+ assert(param.name == paramName)
+ assert(param.rhs ≈ q"3")
+ }
+
+ property("class with params") = test {
+ val pName1 = TermName("x1")
+ val pName2 = TermName("x2")
+ val q"{class $_($param1)(..$params2)}" = typecheck(q"class Test(val x0: Float)(val $pName1: Int = 3, $pName2: String)")
+
+ val List(p1, p2, _*) = params2
+
+ assert(p1.name == pName1)
+ assert(p2.name == pName2)
+ assert(params2.size == 2)
+ }
+
+ property("implicit class") = test {
+ val clName = TypeName("Test")
+ val paramName = TermName("x")
+ val q"{implicit class $name($param)}" = typecheck(q"implicit class $clName(val $paramName: String)")
+
+ assert(name == clName)
+ assert(param.name == paramName)
+ }
+
+ property("block with lazy") = test {
+ val lazyName = TermName("x")
+ val lazyRhsVal = 42
+ val lazyRhs = Literal(Constant(lazyRhsVal))
+ val q"{ $mods val $pname: $_ = $rhs }" = typecheck(q"{lazy val $lazyName = $lazyRhsVal}")
+
+ assert(pname == lazyName)
+ assert(rhs ≈ lazyRhs)
+ }
+
+ property("class with lazy") = test {
+ val clName = TypeName("Test")
+ val paramName = TermName("x")
+ val q"class $name{lazy val $pname = $_}" = typecheck(q"class $clName {lazy val $paramName = 42}")
+
+ assert(name == clName)
+ assert(pname == paramName)
+ }
+
+ property("case class with object") = test {
+ val defName = TermName("z")
+ val defRhsVal = 42
+ val defRhs = Literal(Constant(defRhsVal))
+ val q"object $_{ $_; object $_ extends ..$_ {def $name = $rhs} }" =
+ typecheck(q"""
+ object Test{
+ case class C(x: Int) { def y = x };
+ object C { def $defName = $defRhsVal }
+ }""")
+
+ assert(name == defName)
+ assert(rhs ≈ defRhs)
+ }
+
+ property("partial function") = test {
+ val q"{ case ..$cases }: $ascr" = typecheck(q"{ case 1 => () }: PartialFunction[Int, Unit]")
+ assert(cases ≈ q"{ case 1 => () }".cases)
+ }
+}
+
+trait TypecheckedTypes { self: QuasiquoteProperties =>
+ property("type ident") = test {
+ val q"$_; type $_ = $tpt" = typecheck(q"class C; type T = C")
+ val tq"C" = tpt
+ }
+
+ property("type select") = test {
+ val tq"scala.Int" = typecheckTyp(tq"Int")
+ }
+
+ property("this type select") = test {
+ val q"class $_ { $_; type $_ = $tpt }" = typecheck(q"class C { type A = Int; type B = this.A }")
+ val tq"this.$name" = tpt
+ val TypeName("A") = name
+ }
+
+ property("super type select") = test {
+ val q"$_; class $_ extends $_ { type $_ = $tpt }" =
+ typecheck(q"class C1 { type A = Int }; class C2 extends C1 { type B = super[C1].A }")
+ val tq"$empty.super[$c1].$a" = tpt
+ val TypeName("") = empty
+ val TypeName("C1") = c1
+ val TypeName("A") = a
+ }
+
+ property("applied type") = test {
+ val tt = typecheckTyp(tq"Map[Int, Int]")
+ val tq"$tpt[..$tpts]" = tt
+ val tq"scala.Predef.Map" = tpt
+ val List(tq"scala.Int", tq"scala.Int") = tpts
+ }
+
+ property("tuple type") = test {
+ val tq"(..$els0)" = typecheckTyp(tq"Unit")
+ assert(els0.isEmpty)
+ val tq"(..$els1)" = typecheckTyp(tq"(Int, Int)")
+ val List(tq"scala.Int", tq"scala.Int") = els1
+ }
+
+ property("function type") = test {
+ val tq"(..$argtpes) => $restpe" = typecheckTyp(tq"(Int, Int) => Int")
+ val List(tq"scala.Int", tq"scala.Int") = argtpes
+ val tq"scala.Int" = restpe
+ }
+
+ property("compound type") = test {
+ val tq"..$parents { ..$defns }" = typecheckTyp(tq"Int { def x: Int }")
+ val List(tq"Int") = parents
+ val List(q"def x: Int") = defns
+ }
+
+ property("singleton type") = test {
+ val tq"$ref.type" = typecheckTyp(tq"scala.Predef.type")
+ val q"scala.Predef" = ref
+ }
+
+ property("type projection") = test {
+ val tq"$tpt#$name" = typecheckTyp(tq"({ type T = Int })#T")
+ val TypeName("T") = name
+ val tq"{ type T = Int }" = tpt
+ }
+
+ property("annotated type") = test {
+ val tq"$tpt @$annot" = typecheckTyp(tq"Int @unchecked")
+ val tq"scala.Int" = tpt
+ val tq"unchecked" = annot
+ }
+
+ property("existential type") = test {
+ val tq"$tpt forSome { ..$defns }" = typecheckTyp(tq"T forSome { type T }")
+ val tq"T" = tpt
+ val q"type T" :: Nil = defns
+ }
+}
diff --git a/test/scalacheck/scala/reflect/quasiquotes/UnliftableProps.scala b/test/scalacheck/scala/reflect/quasiquotes/UnliftableProps.scala
new file mode 100644
index 0000000000..4c2f2280ca
--- /dev/null
+++ b/test/scalacheck/scala/reflect/quasiquotes/UnliftableProps.scala
@@ -0,0 +1,168 @@
+package scala.reflect.quasiquotes
+
+import org.scalacheck._, Prop._, Gen._, Arbitrary._
+import scala.reflect.runtime.universe._, Flag._
+
+object UnliftableProps extends QuasiquoteProperties("unliftable") {
+ property("unlift name") = test {
+ val termname0 = TermName("foo")
+ val typename0 = TypeName("foo")
+ val q"${termname1: TermName}" = Ident(termname0)
+ assert(termname1 == termname0)
+ val q"${typename1: TypeName}" = Ident(typename0)
+ assert(typename1 == typename0)
+ val q"${name1: Name}" = Ident(termname0)
+ assert(name1 == termname0)
+ val q"${name2: Name}" = Ident(typename0)
+ assert(name2 == typename0)
+ }
+
+ property("unlift type") = test {
+ val q"${tpe: Type}" = TypeTree(typeOf[Int])
+ assert(tpe =:= typeOf[Int])
+ }
+
+ property("unlift constant") = test {
+ val q"${const: Constant}" = Literal(Constant("foo"))
+ assert(const == Constant("foo"))
+ }
+
+ property("unlift char") = test {
+ val q"${c: Char}" = Literal(Constant('0'))
+ assert(c.isInstanceOf[Char] && c == '0')
+ }
+
+ property("unlift byte") = test {
+ val q"${b: Byte}" = Literal(Constant(0: Byte))
+ assert(b.isInstanceOf[Byte] && b == 0)
+ }
+
+ property("unlift short") = test {
+ val q"${s: Short}" = Literal(Constant(0: Short))
+ assert(s.isInstanceOf[Short] && s == 0)
+ }
+
+ property("unlift int") = test {
+ val q"${i: Int}" = Literal(Constant(0: Int))
+ assert(i.isInstanceOf[Int] && i == 0)
+ }
+
+ property("unlift long") = test {
+ val q"${l: Long}" = Literal(Constant(0L: Long))
+ assert(l.isInstanceOf[Long] && l == 0L)
+ }
+
+ property("unlift float") = test {
+ val q"${f: Float}" = Literal(Constant(0.0f: Float))
+ assert(f.isInstanceOf[Float] && f == 0.0f)
+ }
+
+ property("unlift double") = test {
+ val q"${d: Double}" = Literal(Constant(0.0: Double))
+ assert(d.isInstanceOf[Double] && d == 0.0)
+ }
+
+ property("unlift bool") = test {
+ val q"${b: Boolean}" = q"true"
+ assert(b.isInstanceOf[Boolean] && b == true)
+ }
+
+ property("unlift string") = test {
+ val q"${s: String}" = q""" "foo" """
+ assert(s.isInstanceOf[String] && s == "foo")
+ }
+
+ property("unlift scala.symbol") = test {
+ val q"${s: scala.Symbol}" = q"'foo"
+ assert(s.isInstanceOf[scala.Symbol] && s == 'foo)
+ }
+
+ implicit def unliftList[T: Unliftable]: Unliftable[List[T]] = Unliftable {
+ case q"scala.collection.immutable.List(..$args)" if args.forall { implicitly[Unliftable[T]].unapply(_).nonEmpty } =>
+ val ut = implicitly[Unliftable[T]]
+ args.flatMap { ut.unapply(_) }
+ }
+
+ property("unlift list (1)") = test {
+ val orig = List(1, 2)
+ val q"${l1: List[Int]}" = q"$orig" // q"List(1, 2)"
+ assert(l1 == orig)
+ val q"f(..${l2: List[Int]})" = q"f(..$orig)" // q"f(1, 2)
+ assert(l2 == orig)
+ }
+
+ property("unlift list (2)") = test {
+ val orig2 = List(List(1, 2), List(3))
+ val q"f(${l3: List[List[Int]]})" = q"f($orig2)" // q"f(List(List(1, 2), List(3)))
+ assert(l3 == orig2)
+ val q"f(..${l4: List[List[Int]]})" = q"f(..$orig2)" // q"f(List(1, 2), List(3))"
+ assert(l4 == orig2)
+ val q"f(...${l5: List[List[Int]]})" = q"f(...$orig2)" // q"f(1, 2)(3)
+ assert(l5 == orig2)
+ }
+
+ property("don't unlift non-tree unquotee (1)") = test {
+ val q"${a: TermName}.${b: TermName}" = q"a.b"
+ assert(a == TermName("a"))
+ assert(b == TermName("b"))
+ }
+
+ property("don't unlift non-tree unquotee (2)") = test {
+ val q"${mods: Modifiers} def foo" = q"def foo"
+ assert(mods == Modifiers(DEFERRED))
+ }
+
+ property("unlift tuple") = test {
+ val q"${t2: (Int, Int)}" = q"(1, 2)"
+ val q"${t3: (Int, Int, Int)}" = q"(1, 2, 3)"
+ val q"${t4: (Int, Int, Int, Int)}" = q"(1, 2, 3, 4)"
+ val q"${t5: (Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5)"
+ val q"${t6: (Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6)"
+ val q"${t7: (Int, Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6, 7)"
+ val q"${t8: (Int, Int, Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6, 7, 8)"
+ val q"${t9: (Int, Int, Int, Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6, 7, 8, 9)"
+ val q"${t10: (Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)"
+ val q"${t11: (Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11)"
+ val q"${t12: (Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12)"
+ val q"${t13: (Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13)"
+ val q"${t14: (Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14)"
+ val q"${t15: (Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15)"
+ val q"${t16: (Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16)"
+ val q"${t17: (Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17)"
+ val q"${t18: (Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18)"
+ val q"${t19: (Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19)"
+ val q"${t20: (Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20)"
+ val q"${t21: (Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21)"
+ val q"${t22: (Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22)"
+ // assert(t1 == Tuple1(1))
+ assert(t2 == (1, 2))
+ assert(t3 == (1, 2, 3))
+ assert(t4 == (1, 2, 3, 4))
+ assert(t5 == (1, 2, 3, 4, 5))
+ assert(t6 == (1, 2, 3, 4, 5, 6))
+ assert(t7 == (1, 2, 3, 4, 5, 6, 7))
+ assert(t8 == (1, 2, 3, 4, 5, 6, 7, 8))
+ assert(t9 == (1, 2, 3, 4, 5, 6, 7, 8, 9))
+ assert(t10 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10))
+ assert(t11 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))
+ assert(t12 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12))
+ assert(t13 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13))
+ assert(t14 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14))
+ assert(t15 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15))
+ assert(t16 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16))
+ assert(t17 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17))
+ assert(t18 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18))
+ assert(t19 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19))
+ assert(t20 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20))
+ assert(t21 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21))
+ assert(t22 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22))
+ }
+
+ property("unlift xml comment") = test {
+ implicit val unliftXmlComment = Unliftable[xml.Comment] {
+ case q"new _root_.scala.xml.Comment(${value: String})" => xml.Comment(value)
+ }
+ val q"${comment: xml.Comment}" = q"<!--foo-->"
+ assert(comment.commentText == "foo")
+ }
+}
diff --git a/test/scalacheck/scala/tools/nsc/scaladoc/CommentFactoryTest.scala b/test/scalacheck/scala/tools/nsc/scaladoc/CommentFactoryTest.scala
new file mode 100644
index 0000000000..afee3eed86
--- /dev/null
+++ b/test/scalacheck/scala/tools/nsc/scaladoc/CommentFactoryTest.scala
@@ -0,0 +1,189 @@
+package scala.tools.nsc.scaladoc
+
+import org.scalacheck._
+import org.scalacheck.Prop._
+
+import scala.tools.nsc.Global
+import scala.tools.nsc.doc
+import scala.tools.nsc.doc.base.comment._
+import scala.tools.nsc.doc.model._
+import scala.tools.nsc.doc.model.diagram._
+
+class Factory(val g: Global, val s: doc.Settings)
+ extends doc.model.ModelFactory(g, s) {
+ thisFactory: Factory
+ with ModelFactoryImplicitSupport
+ with ModelFactoryTypeSupport
+ with DiagramFactory
+ with CommentFactory
+ with doc.model.TreeFactory
+ with MemberLookup =>
+
+ def strip(c: Comment): Option[Inline] = {
+ c.body match {
+ case Body(List(Paragraph(Chain(List(Summary(inner)))))) => Some(inner)
+ case _ => None
+ }
+ }
+
+ def getComment(s: String): Comment =
+ parse(s, "", scala.tools.nsc.util.NoPosition, null)
+
+ def parseComment(s: String): Option[Inline] =
+ strip(getComment(s))
+
+ def createBody(s: String) =
+ parse(s, "", scala.tools.nsc.util.NoPosition, null).body
+}
+
+object CommentFactoryTest extends Properties("CommentFactory") {
+ val factory = {
+ val settings = new doc.Settings((str: String) => {})
+ val reporter = new scala.tools.nsc.reporters.ConsoleReporter(settings)
+ val g = new Global(settings, reporter)
+ (new Factory(g, settings)
+ with ModelFactoryImplicitSupport
+ with ModelFactoryTypeSupport
+ with DiagramFactory
+ with CommentFactory
+ with doc.model.TreeFactory
+ with MemberLookup)
+ }
+
+ def parse(src: String, dst: Inline): Boolean = {
+ factory.parseComment(src) match {
+ case Some(inline) =>
+ inline == dst
+ case _ =>
+ false
+ }
+ }
+
+ property("parse") = parse(
+ "/** One two three */",
+ Text("One two three")
+ )
+ property("parse") = parse(
+ "/** One `two` three */",
+ Chain(List(Text("One "), Monospace(Text("two")), Text(" three")))
+ )
+
+ property("parse") = parse(
+ """
+/** One two
+ * three */""",
+ Text("One two\nthree")
+ )
+ property("parse") = parse(
+ """
+/** One `two`
+ * three */""",
+ Chain(List(Text("One "), Monospace(Text("two")), Text("\n"), Text("three")))
+ )
+
+ property("parse") = parse(
+ """
+/** One `two`
+ * three */""",
+ Chain(List(Text("One "), Monospace(Text("two")), Text("\n"), Text(" three")))
+ )
+
+ property("parse") = parse(
+ """
+/** One
+ * `two` three */""",
+ Chain(List(Text("One"), Text("\n"), Monospace(Text("two")), Text(" three")))
+ )
+
+ property("Trac #4361 - ^...^") = parse(
+ """
+/**
+ * hello ^world^ */""",
+ Chain(List(Text("hello "), Superscript(Text("world"))))
+ )
+
+ property("Trac #4361 - single ^ symbol") = parse(
+ """
+/**
+ * <pre>
+ * hello ^world
+ * </pre>
+ *
+ */""",
+ Chain(List(Text(""), Text("\n"),
+
+
+ HtmlTag("<pre>\nhello ^world\n</pre>")))
+ )
+
+ property("Trac #4366 - body") = {
+ val body = factory.createBody(
+ """
+ /**
+ * <strong><code>foo</code> has been deprecated and will be removed in a future version. Please call <code>bar</code> instead.</strong>
+ */
+ """
+ )
+
+ body == Body(List(Paragraph(Chain(List(
+ Summary(Chain(List(HtmlTag("<strong><code>foo</code> has been deprecated and will be removed in a future version. Please call <code>bar</code> instead.</strong>"), Text("\n"), Text(""))))
+ )))))
+ }
+
+ property("Trac #4366 - summary") = {
+ val body = factory.createBody(
+ """
+ /**
+ * <strong><code>foo</code> has been deprecated and will be removed in a future version. Please call <code>bar</code> instead.</strong>
+ */
+ """
+ )
+ body.summary == Some(Chain(List(HtmlTag("<strong><code>foo</code> has been deprecated and will be removed in a future version. Please call <code>bar</code> instead.</strong>"), Text("\n"), Text(""))))
+ }
+
+ property("Trac #4358 - body") = {
+ factory.createBody(
+ """
+ /**
+ * Implicit conversion that invokes the <code>expect</code> method on the <code>EasyMock</code> companion object (<em>i.e.</em>, the
+ * static <code>expect</code> method in Java class <code>org.easymock.EasyMock</code>).
+ */
+ """
+ ) match {
+ case Body(List(Paragraph(Chain(List(Summary(Chain(List(Chain(List(
+ Text("Implicit conversion that invokes the "),
+ HtmlTag("<code>expect</code>"),
+ Text(" method on the "),
+ HtmlTag("<code>EasyMock</code>"),
+ Text(" companion object ("),
+ HtmlTag("<em>i.e.</em>"),
+ Text(", the\nstatic "),
+ HtmlTag("<code>expect</code>"),
+ Text(" method in Java class "),
+ HtmlTag("<code>org.easymock.EasyMock</code>"),
+ Text(")")
+ )), Text(".")))), Text("\n")))))) =>
+ true
+ case other => {
+ println(other)
+ false
+ }
+ }
+ }
+
+ property("Empty parameter text should be empty") = {
+ // used to fail with
+ // body == Body(List(Paragraph(Chain(List(Summary(Text('\n')))))))
+ factory.getComment(
+ """
+/**
+ * @deprecated
+ */
+ """).deprecated match {
+ case Some(Body(l)) if l.isEmpty => true
+ case other =>
+ println(other)
+ false
+ }
+ }
+}
diff --git a/test/scalacheck/scala/tools/nsc/scaladoc/HtmlFactoryTest.scala b/test/scalacheck/scala/tools/nsc/scaladoc/HtmlFactoryTest.scala
new file mode 100644
index 0000000000..740eb68d99
--- /dev/null
+++ b/test/scalacheck/scala/tools/nsc/scaladoc/HtmlFactoryTest.scala
@@ -0,0 +1,786 @@
+package scala.tools.nsc.scaladoc
+
+import org.scalacheck._
+import org.scalacheck.Prop._
+
+import java.net.{URLClassLoader, URLDecoder}
+import scala.collection.mutable
+import scala.xml.NodeSeq
+
+object XMLUtil {
+ import scala.xml._
+
+ def stripGroup(seq: Node): Node = {
+ seq match {
+ case group: Group => {
+ <div class="group">{ group.nodes.map(stripGroup _) }</div>
+ }
+ case e: Elem => {
+ val child = e.child.map(stripGroup _)
+ Elem(e.prefix, e.label, e.attributes, e.scope, child : _*)
+ }
+ case _ => seq
+ }
+ }
+}
+
+object HtmlFactoryTest extends Properties("HtmlFactory") {
+
+ final val RESOURCES = "test/scaladoc/resources/"
+
+ import scala.tools.nsc.ScalaDocReporter
+ import scala.tools.nsc.doc.{DocFactory, Settings}
+ import scala.tools.nsc.doc.html.HtmlFactory
+
+ def getClasspath = {
+ // these things can be tricky
+ // this test previously relied on the assumption that the current thread's classloader is an url classloader and contains all the classpaths
+ // does partest actually guarantee this? to quote Leonard Nimoy: The answer, of course, is no.
+ // this test _will_ fail again some time in the future.
+ // Footnote: java.lang.ClassCastException: org.apache.tools.ant.loader.AntClassLoader5 cannot be cast to java.net.URLClassLoader
+ val loader = Thread.currentThread.getContextClassLoader.asInstanceOf[URLClassLoader]
+ val paths = loader.getURLs.map(u => URLDecoder.decode(u.getPath))
+ paths mkString java.io.File.pathSeparator
+ }
+
+ def createFactory = {
+ val settings = new Settings({Console.err.println(_)})
+ settings.scaladocQuietRun = true
+ settings.nowarn.value = true
+ settings.classpath.value = getClasspath
+ settings.docAuthor.value = true
+
+ val reporter = new scala.tools.nsc.reporters.ConsoleReporter(settings)
+ new DocFactory(reporter, settings)
+ }
+
+ def createTemplates(basename: String): collection.Map[String, NodeSeq] = {
+ val result = mutable.Map[String, NodeSeq]()
+
+ createFactory.makeUniverse(Left(List(RESOURCES+basename))) match {
+ case Some(universe) => {
+ new HtmlFactory(universe, new ScalaDocReporter(universe.settings)).writeTemplates((page) => {
+ result += (page.absoluteLinkTo(page.path) -> page.body)
+ })
+ }
+ case _ =>
+ }
+
+ result
+ }
+
+ def createTemplate(scala: String) = {
+ val html = scala.stripSuffix(".scala") + ".html"
+ createTemplates(scala)(html)
+ }
+
+ /**
+ * This tests the text without the markup - ex:
+ *
+ * <h4 class="signature">
+ * <span class="modifier_kind">
+ * <span class="modifier">implicit</span>
+ * <span class="kind">def</span>
+ * </span>
+ * <span class="symbol">
+ * <span class="name">test</span><span class="params">()</span><span class="result">: <span name="scala.Int" class="extype">Int</span></span>
+ * </span>
+ * </h4>
+ *
+ * becomes:
+ *
+ * implicit def test(): Int
+ *
+ * and is required to contain the text in the given checks
+ *
+ * NOTE: Comparison is done ignoring all whitespace
+ */
+ def checkText(scalaFile: String, debug: Boolean = true)(checks: (Option[String], String, Boolean)*): Boolean = {
+ val htmlFile = scalaFile.stripSuffix(".scala") + ".html"
+ val htmlAllFiles = createTemplates(scalaFile)
+ var result = true
+
+ for ((fileHint, check, expected) <- checks) {
+ // resolve the file to be checked
+ val fileName = fileHint match {
+ case Some(file) =>
+ if (file endsWith ".html")
+ file
+ else
+ file + ".html"
+ case None =>
+ htmlFile
+ }
+ val fileTextPretty = htmlAllFiles(fileName).text.replace('→',' ').replaceAll("\\s+"," ")
+ val fileText = fileTextPretty.replaceAll(" ", "")
+
+ val checkTextPretty = check.replace('→',' ').replaceAll("\\s+"," ")
+ val checkText = checkTextPretty.replaceAll(" ", "")
+
+ val checkValue = fileText.contains(checkText) == expected
+ if (debug && (!checkValue)) {
+ Console.err.println("")
+ Console.err.println("HTML Check failed for resource file " + scalaFile + ":")
+ Console.err.println("Could not match: \n" + checkTextPretty)
+ Console.err.println("In the extracted HTML text: \n" + fileTextPretty)
+ Console.err.println("NOTE: The whitespaces are eliminated before matching!")
+ Console.err.println("")
+ }
+ result &&= checkValue
+ }
+
+ result
+ }
+
+ def shortComments(root: scala.xml.Node) =
+ XMLUtil.stripGroup(root).descendant.flatMap {
+ case e: scala.xml.Elem => {
+ if (e.attribute("class").toString.contains("shortcomment")) {
+ Some(e)
+ } else {
+ None
+ }
+ }
+ case _ => None
+ }
+
+ property("Trac #3790") = {
+ createTemplate("Trac3790.scala") match {
+ case node: scala.xml.Node => {
+ val comments = shortComments(node)
+
+ comments.exists { _.toString.contains(">A lazy String\n</p>") } &&
+ comments.exists { _.toString.contains(">A non-lazy String\n</p>") }
+ }
+ case _ => false
+ }
+ }
+
+ property("Trac #4306") = {
+ val files = createTemplates("Trac4306.scala")
+ files("com/example/trac4306/foo/package$$Bar.html") != None
+ }
+
+ property("Trac #4366") = {
+ createTemplate("Trac4366.scala") match {
+ case node: scala.xml.Node => {
+ shortComments(node).exists { n => {
+ val str = n.toString
+ str.contains("<code>foo</code>") && str.contains("</strong>")
+ } }
+ }
+ case _ => false
+ }
+ }
+
+ property("Trac #4358") = {
+ createTemplate("Trac4358.scala") match {
+ case node: scala.xml.Node =>
+ ! shortComments(node).exists {
+ _.toString.contains("<em>i.</em>")
+ }
+ case _ => false
+ }
+ }
+
+ property("Trac #4180") = {
+ createTemplate("Trac4180.scala") != None
+ }
+
+ property("Trac #4372") = {
+ createTemplate("Trac4372.scala") match {
+ case node: scala.xml.Node => {
+ val html = node.toString
+ html.contains("<span title=\"gt4s: $plus$colon\" class=\"name\">+:</span>") &&
+ html.contains("<span title=\"gt4s: $minus$colon\" class=\"name\">-:</span>") &&
+ html.contains("""<span class="params">(<span name="n">n: <span class="extype" name="scala.Int">Int</span></span>)</span><span class="result">: <span class="extype" name="scala.Int">Int</span></span>""")
+ }
+ case _ => false
+ }
+ }
+
+ property("Trac #4374 - public") = {
+ val files = createTemplates("Trac4374.scala")
+ files("WithPublic.html") match {
+ case node: scala.xml.Node => {
+ val s = node.toString
+ s.contains("""href="WithPublic$.html"""") &&
+ files.get("WithPublic$.html") != None
+ }
+ case _ => false
+ }
+ }
+
+ property("Trac #4374 - private") = {
+ val files = createTemplates("Trac4374.scala")
+ files("WithPrivate.html") match {
+ case node: scala.xml.Node => {
+ val s = node.toString
+ ! s.contains("""href="WithPrivate$.html"""") &&
+ files.get("WithPrivate$.html") == None
+ }
+ case _ => false
+ }
+ }
+
+ property("Trac #4325 - files") = {
+ val files = createTemplates("Trac4325.scala")
+
+ files.get("WithSynthetic.html") != None &&
+ files.get("WithSynthetic$.html") == None &&
+ files.get("WithObject.html") != None &&
+ files.get("WithObject$.html") != None
+ }
+
+ property("Trac #4325 - Don't link to syntetic companion") = {
+ val files = createTemplates("Trac4325.scala")
+
+ files("WithSynthetic.html") match {
+ case node: scala.xml.Node => {
+ val s = node.toString
+ ! s.contains("""href="WithSynthetic$.html"""")
+ }
+ case _ => false
+ }
+ }
+
+ property("Trac #4325 - Link to companion") = {
+ val files = createTemplates("Trac4325.scala")
+
+ files("WithObject.html") match {
+ case node: scala.xml.Node => {
+ val s = node.toString
+ s.contains("""href="WithObject$.html"""")
+ }
+ case _ => false
+ }
+ }
+
+ property("Trac #4420 - no whitespace at end of line") = {
+ val files = createTemplates("Trac4420.scala")
+
+ files("TestA.html") match {
+ case node: scala.xml.Node => {
+ val s = node.toString
+ s.contains("""See YYY for more details""")
+ }
+ case _ => false
+ }
+ }
+ //
+ // property("Trac #484 - refinements and existentials") = {
+ // val files = createTemplates("Trac484.scala")
+ // val lines = """
+ // |type Bar = AnyRef { type Dingus <: T forSome { type T <: String } }
+ // |type Foo = AnyRef { ... /* 3 definitions in type refinement */ }
+ // |def g(x: T forSome { type T <: String }): String
+ // |def h(x: Float): AnyRef { def quux(x: Int,y: Int): Int }
+ // |def hh(x: Float): AnyRef { def quux(x: Int,y: Int): Int }
+ // |def j(x: Int): Bar
+ // |def k(): AnyRef { type Dingus <: T forSome { type T <: String } }
+ // """.stripMargin.trim.lines map (_.trim)
+ //
+ // files("RefinementAndExistentials.html") match {
+ // case node: scala.xml.Node => {
+ // val s = node.text.replaceAll("\\s+", " ")
+ // lines forall (s contains _)
+ // }
+ // case _ => false
+ // }
+ // }
+
+ property("Trac #4289") = {
+ val files = createTemplates("Trac4289.scala")
+
+ files("Subclass.html") match {
+ case node: scala.xml.Node => {
+ node.toString.contains {
+ """<dt>returns</dt><dd class="cmt"><p>123</p></dd>"""
+ }
+ }
+ case _ => false
+ }
+ }
+
+ property("Trac #4409") = {
+ createTemplate("Trac4409.scala") match {
+ case node: scala.xml.Node => {
+ ! node.toString.contains("""<div class="block"><ol>since""")
+ }
+ case _ => false
+ }
+ }
+
+ property("Trac #4452") = {
+ createTemplate("Trac4452.scala") match {
+ case node: scala.xml.Node =>
+ ! node.toString.contains(">*")
+ case _ => false
+ }
+ }
+
+ property("SI-4421") = {
+ createTemplate("SI_4421.scala") match {
+ case node: scala.xml.Node => {
+ val html = node.toString
+ html.contains(">Example:") && html.contains(">Note<")
+ }
+ case _ => false
+ }
+ }
+
+ property("SI-4589") = {
+ createTemplate("SI_4589.scala") match {
+ case node: scala.xml.Node => {
+ val html = node.toString
+ html.contains(">x0123456789: <") &&
+ html.contains(">x012345678901234567890123456789: <")
+ }
+ case _ => false
+ }
+ }
+
+ property("SI-4714: Should decode symbolic type alias name.") = {
+ createTemplate("SI_4715.scala") match {
+ case node: scala.xml.Node => {
+ val html = node.toString
+ html.contains(">:+:<")
+ }
+ case _ => false
+ }
+ }
+
+ property("SI-4287: Default arguments of synthesized constructor") = {
+ val files = createTemplates("SI_4287.scala")
+
+ files("ClassWithSugar.html") match {
+ case node: scala.xml.Node => {
+ node.toString.contains(">123<")
+ }
+ case _ => false
+ }
+ }
+
+ property("SI-4507: Default arguments of synthesized constructor") = {
+ createTemplate("SI_4507.scala") match {
+ case node: scala.xml.Node =>
+ ! node.toString.contains("<li>returns silently when evaluating true and true</li>")
+ case _ => false
+ }
+ }
+
+ property("SI-4898: Use cases and links should not crash scaladoc") = {
+ createTemplate("SI_4898.scala")
+ true
+ }
+
+ property("SI-5054: Use cases should override their original members") =
+ checkText("SI_5054_q1.scala")(
+ (None,"""def test(): Int""", true)
+ //Disabled because the full signature is now displayed
+ //(None, """def test(implicit lost: Int): Int""", false)
+ )
+
+ property("SI-5054: Use cases should keep their flags - final should not be lost") =
+ checkText("SI_5054_q2.scala")((None, """final def test(): Int""", true))
+
+ property("SI-5054: Use cases should keep their flags - implicit should not be lost") =
+ checkText("SI_5054_q3.scala")((None, """implicit def test(): Int""", true))
+
+ property("SI-5054: Use cases should keep their flags - real abstract should not be lost") =
+ checkText("SI_5054_q4.scala")((None, """abstract def test(): Int""", true))
+
+ property("SI-5054: Use cases should keep their flags - traits should not be affected") =
+ checkText("SI_5054_q5.scala")((None, """def test(): Int""", true))
+
+ property("SI-5054: Use cases should keep their flags - traits should not be affected") =
+ checkText("SI_5054_q6.scala")((None, """abstract def test(): Int""", true))
+
+ property("SI-5054: Use case individual signature test") =
+ checkText("SI_5054_q7.scala")(
+ (None, """abstract def test2(explicit: Int): Int [use case] This takes the explicit value passed.""", true),
+ (None, """abstract def test1(): Int [use case] This takes the implicit value in scope.""", true)
+ )
+
+ property("SI-5287: Display correct \"Definition classes\"") =
+ checkText("SI_5287.scala")(
+ (None,
+ """def method(): Int
+ [use case] The usecase explanation
+ [use case] The usecase explanation
+ Definition Classes SI_5287 SI_5287_B SI_5287_A""", true)
+ ) // the explanation appears twice, as small comment and full comment
+
+ property("Comment inheritance: Correct comment inheritance for overriding") =
+ checkText("implicit-inheritance-override.scala")(
+ (Some("Base"),
+ """def function[T](arg1: T, arg2: String): Double
+ The base comment.
+ The base comment. And another sentence...
+ T the type of the first argument
+ arg1 The T term comment
+ arg2 The string comment
+ returns The return comment
+ """, true),
+ (Some("DerivedA"),
+ """def function[T](arg1: T, arg2: String): Double
+ Overriding the comment, the params and returns comments should stay the same.
+ Overriding the comment, the params and returns comments should stay the same.
+ T the type of the first argument
+ arg1 The T term comment
+ arg2 The string comment
+ returns The return comment
+ """, true),
+ (Some("DerivedB"),
+ """def function[T](arg1: T, arg2: String): Double
+ T the type of the first argument
+ arg1 The overridden T term comment
+ arg2 The overridden string comment
+ returns The return comment
+ """, true),
+ (Some("DerivedC"),
+ """def function[T](arg1: T, arg2: String): Double
+ T the type of the first argument
+ arg1 The T term comment
+ arg2 The string comment
+ returns The overridden return comment
+ """, true),
+ (Some("DerivedD"),
+ """def function[T](arg1: T, arg2: String): Double
+ T The overridden type parameter comment
+ arg1 The T term comment
+ arg2 The string comment
+ returns The return comment
+ """, true)
+ )
+
+ for (useCaseFile <- List("UseCaseInheritance", "UseCaseOverrideInheritance")) {
+ property("Comment inheritance: Correct comment inheritance for usecases") =
+ checkText("implicit-inheritance-usecase.scala")(
+ (Some(useCaseFile),
+ """def missing_arg[T](arg1: T): Double
+ [use case]
+ [use case]
+ T The type parameter
+ arg1 The T term comment
+ returns The return comment
+ """, true),
+ (Some(useCaseFile),
+ """def missing_targ(arg1: Int, arg2: String): Double
+ [use case]
+ [use case]
+ arg1 The T term comment
+ arg2 The string comment
+ returns The return comment
+ """, true),
+ (Some(useCaseFile),
+ """def overridden_arg1[T](implicit arg1: T, arg2: String): Double
+ [use case]
+ [use case]
+ T The type parameter
+ arg1 The overridden T term comment
+ arg2 The string comment
+ returns The return comment
+ """, true),
+ (Some(useCaseFile),
+ """def overridden_targ[T](implicit arg1: T, arg2: String): Double
+ [use case]
+ [use case]
+ T The overridden type parameter comment
+ arg1 The T term comment
+ arg2 The string comment
+ returns The return comment
+ """, true),
+ (Some(useCaseFile),
+ """def overridden_return[T](implicit arg1: T, arg2: String): Double
+ [use case]
+ [use case]
+ T The type parameter
+ arg1 The T term comment
+ arg2 The string comment
+ returns The overridden return comment
+ """, true),
+ (Some(useCaseFile),
+ """def added_arg[T](implicit arg1: T, arg2: String, arg3: Float): Double
+ [use case]
+ [use case]
+ T The type parameter
+ arg1 The T term comment
+ arg2 The string comment
+ arg3 The added float comment
+ returns The return comment
+ """, true),
+ (Some(useCaseFile),
+ """def overridden_comment[T](implicit arg1: T, arg2: String): Double
+ [use case] The overridden comment.
+ [use case] The overridden comment.
+ T The type parameter
+ arg1 The T term comment
+ arg2 The string comment
+ returns The return comment
+ """, true)
+ )
+ }
+
+ property("Comment inheritance: Correct explicit inheritance for override") =
+ checkText("explicit-inheritance-override.scala")(
+ (Some("InheritDocDerived"),
+ """def function[T](arg1: T, arg2: String): Double
+ Starting line
+ Starting line
+ The base comment. And another sentence...
+ The base comment. And another sentence...
+ Ending line
+ Author: StartAuthor a Scala developer EndAuthor
+ T StartT the type of the first argument EndT
+ arg1 Start1 The T term comment End1
+ arg2 Start2 The string comment End2
+ returns StartRet The return comment EndRet""", true),
+ (Some("InheritDocDerived"),
+ """Definition Classes InheritDocDerived → InheritDocBase
+ Example: StartExample function[Int](3, "something") EndExample
+ Version StartVer 0.0.2 EndVer
+ Since StartSince 0.0.1 EndSince
+ Exceptions thrown
+ SomeException StartEx if the function is not called with correct parameters EndEx
+ SomeOtherException StartSOE Should Warn <invalid inheritdoc annotation> EndSOE
+ To do StartTodo Call mom. And dad! EndTodo
+ Note StartNote Be careful! EndNote
+ See also StartSee The Manual EndSee
+ """, true))
+
+ property("Comment inheritance: Correct explicit inheritance for usecase") =
+ checkText("explicit-inheritance-usecase.scala")(
+ (Some("UseCaseInheritDoc"),
+ """def function[T](arg1: T, arg2: String): Double
+ [use case] Starting line
+ [use case] Starting line
+ The base comment. And another sentence...
+ The base comment. And another sentence...
+ Ending line
+ Author: StartAuthor a Scala developer EndAuthor
+ T StartT the type of the first argument EndT
+ arg1 Start1 The T term comment End1
+ arg2 Start2 The string comment End2
+ returns StartRet The return comment EndRet""", true),
+ (Some("UseCaseInheritDoc"),
+ """Example: StartExample function[Int](3,"something") EndExample
+ Version StartVer 0.0.2 EndVer
+ Since StartSince 0.0.1 EndSince
+ Exceptions thrown
+ SomeException StartEx if the function is not called with correct parameters EndEx
+ SomeOtherException StartSOE Should Warn <invalid inheritdoc annotation> EndSOE
+ To do StartTodo Call mom. And dad! EndTodo
+ Note StartNote Be careful! EndNote
+ See also StartSee The Manual EndSee
+ """, true))
+
+ property("Comment inheritance: Correct explicit inheritance in corner cases") =
+ checkText("inheritdoc-corner-cases.scala")(
+ (Some("D"),
+ """def hello1: Int
+ Inherited: Hello 1 comment
+ Inherited: Hello 1 comment
+ Definition Classes D → A
+ """, true),
+ (Some("D"),
+ """def hello2: Int
+ Inherited: Hello 2 comment
+ Inherited: Hello 2 comment
+ Definition Classes D → B
+ """, true),
+ (Some("G"),
+ """def hello1: Int
+ Inherited: Hello 1 comment
+ Inherited: Hello 1 comment
+ Definition Classes G → D → A
+ """, true),
+ (Some("G"),
+ """def hello2: Int
+ Inherited: Hello 2 comment
+ Inherited: Hello 2 comment
+ Definition Classes G → D → B
+ """, true),
+ (Some("I"),
+ """def hello1(i: Int): Unit
+ [use case] Inherited: Hello 1 comment
+ [use case] Inherited: Hello 1 comment
+ Definition Classes I → G → D → A
+ """, true)
+ // traits E, F and H shouldn't crash scaladoc but we don't need to check the output
+ )
+
+ property("Indentation normalization for code blocks") = {
+ val files = createTemplates("code-indent.scala")
+
+ files("C.html") match {
+ case node: scala.xml.Node => {
+ val s = node.toString
+ s.contains("<pre>a typicial indented\ncomment on multiple\ncomment lines</pre>") &&
+ s.contains("<pre>one liner</pre>") &&
+ s.contains("<pre>two lines, one useful</pre>") &&
+ s.contains("<pre>line1\nline2\nline3\nline4</pre>") &&
+ s.contains("<pre>a ragged example\na (condition)\n the t h e n branch\nan alternative\n the e l s e branch</pre>") &&
+ s.contains("<pre>Trait example {\n Val x = a\n Val y = b\n}</pre>") &&
+ s.contains("<pre>l1\n\nl2\n\nl3\n\nl4\n\nl5</pre>")
+ }
+ case _ => false
+ }
+ }
+
+ property("SI-4014: Scaladoc omits @author: no authors") = {
+ val noAuthors = createTemplates("SI-4014_0.scala")("Foo.html")
+
+ noAuthors match {
+ case node: scala.xml.Node => {
+ val s = node.toString
+ ! s.contains("Author")
+ }
+ case _ => false
+ }
+ }
+
+ property("SI-4014: Scaladoc omits @author: one author") = {
+ val oneAuthor = createTemplates("SI-4014_1.scala")("Foo.html")
+
+ oneAuthor match {
+ case node: scala.xml.Node => {
+ val s = node.toString
+ s.contains("<h6>Author:</h6>") &&
+ s.contains("<p>The Only Author</p>")
+ }
+ case _ => false
+ }
+ }
+
+ property("SI-4014: Scaladoc omits @author: two authors") = {
+ val twoAuthors = createTemplates("SI-4014_2.scala")("Foo.html")
+
+ twoAuthors match {
+ case node: scala.xml.Node => {
+ val s = node.toString
+ s.contains("<h6>Authors:</h6>") &&
+ s.contains("<p>The First Author</p>") &&
+ s.contains("<p>The Second Author</p>")
+ }
+ case _ => false
+ }
+ }
+
+ {
+ val files = createTemplates("basic.scala")
+ //println(files)
+
+ property("class") = files.get("com/example/p1/Clazz.html") match {
+ case Some(node: scala.xml.Node) => {
+ property("implicit conversion") =
+ node.toString contains "<span class=\"modifier\">implicit </span>"
+
+ property("gt4s") =
+ node.toString contains "title=\"gt4s: $colon$colon\""
+
+ property("gt4s of a deprecated method") =
+ node.toString contains "title=\"gt4s: $colon$colon$colon$colon. Deprecated: "
+ true
+ }
+ case _ => false
+ }
+ property("package") = files.get("com/example/p1/index.html") != None
+
+ property("package object") = files("com/example/p1/index.html") match {
+ case node: scala.xml.Node =>
+ node.toString contains "com.example.p1#packageObjectMethod"
+ case _ => false
+ }
+
+ property("lower bound") = files("com/example/p1/LowerBound.html") match {
+ case node: scala.xml.Node => true
+ case _ => false
+ }
+
+ property("upper bound") = files("com/example/p1/UpperBound.html") match {
+ case node: scala.xml.Node => true
+ case _ => false
+ }
+
+ property("SI-8514: No inconsistencies") =
+ checkText("SI-8514.scala")(
+ (Some("a/index"),
+ """class A extends AnyRef
+ Some doc here
+ Some doc here
+ Annotations @DeveloperApi()
+ """, true),
+ (Some("a/index"),
+ """class B extends AnyRef
+ Annotations @DeveloperApi()
+ """, true)
+ )
+ }
+
+ // SI-8144
+ {
+ implicit class AttributesAwareNode(val node: NodeSeq) {
+
+ def \@(attrName: String): String =
+ node \ ("@" + attrName) text
+
+ def \@(attrName: String, attrValue: String): NodeSeq =
+ node filter { _ \ ("@" + attrName) exists (_.text == attrValue) }
+ }
+
+ implicit class AssertionAwareNode(node: scala.xml.NodeSeq) {
+
+ def assertTypeLink(expectedUrl: String): Boolean = {
+ val linkElement: NodeSeq = node \\ "div" \@ ("id", "definition") \\ "span" \@ ("class", "permalink") \ "a"
+ linkElement \@ "href" == expectedUrl
+ }
+
+ def assertMemberLink(group: String)(memberName: String, expectedUrl: String): Boolean = {
+ val linkElement: NodeSeq = node \\ "div" \@ ("id", group) \\ "li" \@ ("name", memberName) \\ "span" \@ ("class", "permalink") \ "a"
+ linkElement \@ "href" == expectedUrl
+ }
+
+ def assertValuesLink(memberName: String, expectedUrl: String): Boolean = {
+ val linkElement: NodeSeq = node \\ "div" \@ ("class", "values members") \\ "li" \@ ("name", memberName) \\ "span" \@ ("class", "permalink") \ "a"
+ linkElement \@ "href" == expectedUrl
+ }
+
+ }
+
+ val files = createTemplates("SI-8144.scala")
+
+ def check(pagePath: String)(f: NodeSeq => org.scalacheck.Prop): org.scalacheck.Prop =
+ files(pagePath) match {
+ case node: scala.xml.Node => f(XMLUtil.stripGroup(node))
+ case _ => false
+ }
+
+ property("SI-8144: Members' permalink - inner package") = check("some/pack/index.html") { node =>
+ ("type link" |: node.assertTypeLink("../../some/pack/index.html")) &&
+ ("member: SomeType (object)" |: node.assertValuesLink("some.pack.SomeType", "../../some/pack/index.html#SomeType")) &&
+ ("member: SomeType (class)" |: node.assertMemberLink("types")("some.pack.SomeType", "../../some/pack/index.html#SomeTypeextendsAnyRef"))
+ }
+
+ property("SI-8144: Members' permalink - companion object") = check("some/pack/SomeType$.html") { node =>
+ ("type link" |: node.assertTypeLink("../../some/pack/SomeType$.html")) &&
+ ("member: someVal" |: node.assertMemberLink("allMembers")("some.pack.SomeType#someVal", "../../some/pack/SomeType$.html#someVal:String"))
+ }
+
+ property("SI-8144: Members' permalink - class") = check("some/pack/SomeType.html") { node =>
+ ("type link" |: node.assertTypeLink("../../some/pack/SomeType.html")) &&
+ ("constructor " |: node.assertMemberLink("constructors")("some.pack.SomeType#<init>", "../../some/pack/SomeType.html#<init>(arg:String):some.pack.SomeType")) &&
+ ( "member: type TypeAlias" |: node.assertMemberLink("types")("some.pack.SomeType.TypeAlias", "../../some/pack/SomeType.html#TypeAlias=String")) &&
+ ( "member: def >#<():Int " |: node.assertValuesLink("some.pack.SomeType#>#<", "../../some/pack/SomeType.html#>#<():Int")) &&
+ ( "member: def >@<():TypeAlias " |: node.assertValuesLink("some.pack.SomeType#>@<", "../../some/pack/SomeType.html#>@<():SomeType.this.TypeAlias"))
+ }
+
+ }
+
+ property("SI-9599 Multiple @todo formatted with comma on separate line") = {
+ createTemplates("SI-9599.scala")("X.html") match {
+ case node: scala.xml.Node => node.text.contains("todo3todo2todo1")
+ case _ => false
+ }
+ }
+}
diff --git a/test/scalacheck/scala/tools/nsc/scaladoc/IndexScriptTest.scala b/test/scalacheck/scala/tools/nsc/scaladoc/IndexScriptTest.scala
new file mode 100644
index 0000000000..fb4dc55c98
--- /dev/null
+++ b/test/scalacheck/scala/tools/nsc/scaladoc/IndexScriptTest.scala
@@ -0,0 +1,57 @@
+package scala.tools.nsc.scaladoc
+
+import org.scalacheck._
+import org.scalacheck.Prop._
+
+import scala.tools.nsc.doc
+import scala.tools.nsc.doc.html.page.IndexScript
+import java.net.{URLClassLoader, URLDecoder}
+
+object IndexScriptTest extends Properties("IndexScript") {
+
+ def getClasspath = {
+ // these things can be tricky
+ // this test previously relied on the assumption that the current thread's classloader is an url classloader and contains all the classpaths
+ // does partest actually guarantee this? to quote Leonard Nimoy: The answer, of course, is no.
+ // this test _will_ fail again some time in the future.
+ // Footnote: java.lang.ClassCastException: org.apache.tools.ant.loader.AntClassLoader5 cannot be cast to java.net.URLClassLoader
+ val loader = Thread.currentThread.getContextClassLoader.asInstanceOf[URLClassLoader]
+ val paths = loader.getURLs.map(u => URLDecoder.decode(u.getPath))
+ paths mkString java.io.File.pathSeparator
+ }
+
+ val docFactory = {
+ val settings = new doc.Settings({Console.err.println(_)})
+ settings.scaladocQuietRun = true
+ settings.nowarn.value = true
+ settings.classpath.value = getClasspath
+ val reporter = new scala.tools.nsc.reporters.ConsoleReporter(settings)
+ new doc.DocFactory(reporter, settings)
+ }
+
+ val indexModelFactory = doc.model.IndexModelFactory
+
+ def createIndexScript(path: String) =
+ docFactory.makeUniverse(Left(List(path))) match {
+ case Some(universe) =>
+ Some(new IndexScript(universe))
+ case _ =>
+ None
+ }
+
+ property("allPackages") = {
+ createIndexScript("src/scaladoc/scala/tools/nsc/doc/html/page/IndexScript.scala") match {
+ case Some(index) =>
+ index.allPackages.map(_.toString) == List(
+ "scala",
+ "scala.tools",
+ "scala.tools.nsc",
+ "scala.tools.nsc.doc",
+ "scala.tools.nsc.doc.html",
+ "scala.tools.nsc.doc.html.page"
+ )
+ case None =>
+ false
+ }
+ }
+}
diff --git a/test/scalacheck/scan.scala b/test/scalacheck/scan.scala
new file mode 100644
index 0000000000..4d2abafdef
--- /dev/null
+++ b/test/scalacheck/scan.scala
@@ -0,0 +1,16 @@
+import org.scalacheck._
+import Prop._
+import Gen._
+
+object ScanTest extends Properties("TraversableLike.scanLeft") {
+ property("scanLeft") = forAll { (xs: List[Int], z: Int) => {
+ val sums = xs.scanLeft(z)(_ + _)
+ (xs.size == 0) || sums.zip(sums.tail).map(x => x._2 - x._1) == xs
+ }}
+}
+
+
+
+
+
+
diff --git a/test/scalacheck/substringTests.scala b/test/scalacheck/substringTests.scala
new file mode 100644
index 0000000000..df3d18be0b
--- /dev/null
+++ b/test/scalacheck/substringTests.scala
@@ -0,0 +1,19 @@
+import org.scalacheck._
+
+object SubstringTest extends Properties("String") {
+ property("startsWith") = Prop.forAll((a: String, b: String) => (a+b).startsWith(a))
+
+ property("endsWith") = Prop.forAll((a: String, b: String) => (a+b).endsWith(b))
+
+ property("concat") = Prop.forAll((a: String, b: String) =>
+ (a+b).length >= a.length && (a+b).length >= b.length
+ )
+
+ property("substring") = Prop.forAll((a: String, b: String) =>
+ (a+b).substring(a.length) == b
+ )
+
+ property("substring") = Prop.forAll((a: String, b: String, c: String) =>
+ (a+b+c).substring(a.length, a.length+b.length) == b
+ )
+}
diff --git a/test/scalacheck/t2460.scala b/test/scalacheck/t2460.scala
new file mode 100644
index 0000000000..42ff3ecfe6
--- /dev/null
+++ b/test/scalacheck/t2460.scala
@@ -0,0 +1,27 @@
+import org.scalacheck.Prop.forAll
+import org.scalacheck.Properties
+import org.scalacheck.{Test => SCTest}
+import org.scalacheck.Gen
+
+object SI2460Test extends Properties("Regex : Ticket 2460") {
+
+ val vowel = Gen.oneOf("a", "z")
+
+ val numberOfMatch = forAll(vowel) {
+ (s: String) => "\\s*([a-z])\\s*".r("data").findAllMatchIn((1 to 20).map(_ => s).mkString).size == 20
+ }
+
+ val numberOfGroup = forAll(vowel) {
+ (s: String) => "\\s*([a-z])\\s*([a-z])\\s*".r("data").findAllMatchIn((1 to 20).map(_ => s).mkString).next.groupCount == 2
+ }
+
+ val nameOfGroup = forAll(vowel) {
+ (s: String) => "([a-z])".r("data").findAllMatchIn(s).next.group("data") == s
+ }
+
+ val tests = List(
+ ("numberOfMatch", numberOfMatch),
+ ("numberOfGroup", numberOfGroup),
+ ("nameOfGroup", nameOfGroup)
+ )
+}
diff --git a/test/scalacheck/t4147.scala b/test/scalacheck/t4147.scala
new file mode 100644
index 0000000000..c58abb99f0
--- /dev/null
+++ b/test/scalacheck/t4147.scala
@@ -0,0 +1,68 @@
+import org.scalacheck.Prop.{forAll, throws}
+import org.scalacheck.Properties
+import org.scalacheck.Gen
+
+
+import collection.mutable
+
+
+object SI4147Test extends Properties("Mutable TreeSet") {
+
+ val generator = Gen.listOfN(1000, Gen.chooseNum(0, 1000))
+
+ val denseGenerator = Gen.listOfN(1000, Gen.chooseNum(0, 200))
+
+ property("Insertion doesn't allow duplicates values.") = forAll(generator) { (s: List[Int]) =>
+ {
+ val t = mutable.TreeSet[Int](s: _*)
+ t == s.toSet
+ }
+ }
+
+ property("Verification of size method validity") = forAll(generator) { (s: List[Int]) =>
+ {
+ val t = mutable.TreeSet[Int](s: _*)
+ for (a <- s) {
+ t -= a
+ }
+ t.size == 0
+ }
+ }
+
+ property("All inserted elements are removed") = forAll(generator) { (s: List[Int]) =>
+ {
+ val t = mutable.TreeSet[Int](s: _*)
+ for (a <- s) {
+ t -= a
+ }
+ t == Set()
+ }
+ }
+
+ property("Elements are sorted.") = forAll(generator) { (s: List[Int]) =>
+ {
+ val t = mutable.TreeSet[Int](s: _*)
+ t.toList == s.distinct.sorted
+ }
+ }
+
+ property("Implicit CanBuildFrom resolution succeeds as well as the \"same-result-type\" principle.") =
+ forAll(generator) { (s: List[Int]) =>
+ {
+ val t = mutable.TreeSet[Int](s: _*)
+ val t2 = t.map(_ * 2)
+ t2.isInstanceOf[collection.mutable.TreeSet[Int]]
+ }
+ }
+
+ property("A view doesn't expose off bounds elements") = forAll(denseGenerator) { (s: List[Int]) =>
+ {
+ val t = mutable.TreeSet[Int](s: _*)
+ val view = t.rangeImpl(Some(50), Some(150))
+ view.filter(_ < 50) == Set[Int]() && view.filter(_ >= 150) == Set[Int]()
+ }
+ }
+
+ property("ordering must not be null") =
+ throws(classOf[NullPointerException])(mutable.TreeSet.empty[Int](null))
+}
diff --git a/test/scalacheck/treemap.scala b/test/scalacheck/treemap.scala
new file mode 100644
index 0000000000..6978ca3145
--- /dev/null
+++ b/test/scalacheck/treemap.scala
@@ -0,0 +1,154 @@
+import collection.immutable._
+import org.scalacheck._
+import Prop._
+import Gen._
+import Arbitrary._
+import util._
+import Buildable._
+
+object TreeMapTest extends Properties("TreeMap") {
+ def genTreeMap[A: Arbitrary: Ordering, B: Arbitrary]: Gen[TreeMap[A, B]] =
+ for {
+ keys <- listOf(arbitrary[A])
+ values <- listOfN(keys.size, arbitrary[B])
+ } yield TreeMap(keys zip values: _*)
+ implicit def arbTreeMap[A : Arbitrary : Ordering, B : Arbitrary] = Arbitrary(genTreeMap[A, B])
+
+ property("foreach/iterator consistency") = forAll { (subject: TreeMap[Int, String]) =>
+ val it = subject.iterator
+ var consistent = true
+ subject.foreach { element =>
+ consistent &&= it.hasNext && element == it.next
+ }
+ consistent
+ }
+
+ property("worst-case tree height is iterable") = forAll(choose(0, 10), arbitrary[Boolean]) { (n: Int, even: Boolean) =>
+ /*
+ * According to "Ralf Hinze. Constructing red-black trees" [http://www.cs.ox.ac.uk/ralf.hinze/publications/#P5]
+ * you can construct a skinny tree of height 2n by inserting the elements [1 .. 2^(n+1) - 2] and a tree of height
+ * 2n+1 by inserting the elements [1 .. 3 * 2^n - 2], both in reverse order.
+ *
+ * Since we allocate a fixed size buffer in the iterator (based on the tree size) we need to ensure
+ * it is big enough for these worst-case trees.
+ */
+ val highest = if (even) (1 << (n+1)) - 2 else 3*(1 << n) - 2
+ val values = (1 to highest).reverse
+ val subject = TreeMap(values zip values: _*)
+ val it = subject.iterator
+ try { while (it.hasNext) it.next; true } catch { case _: Throwable => false }
+ }
+
+ property("sorted") = forAll { (subject: TreeMap[Int, String]) => (subject.size >= 3) ==> {
+ subject.zip(subject.tail).forall { case (x, y) => x._1 < y._1 }
+ }}
+
+ property("contains all") = forAll { (arr: List[(Int, String)]) =>
+ val subject = TreeMap(arr: _*)
+ arr.map(_._1).forall(subject.contains(_))
+ }
+
+ property("size") = forAll { (elements: List[(Int, Int)]) =>
+ val subject = TreeMap(elements: _*)
+ elements.map(_._1).distinct.size == subject.size
+ }
+
+ property("toSeq") = forAll { (elements: List[(Int, Int)]) =>
+ val subject = TreeMap(elements: _*)
+ elements.map(_._1).distinct.sorted == subject.toSeq.map(_._1)
+ }
+
+ property("head") = forAll { (elements: List[Int]) => elements.nonEmpty ==> {
+ val subject = TreeMap(elements zip elements: _*)
+ elements.min == subject.head._1
+ }}
+
+ property("last") = forAll { (elements: List[Int]) => elements.nonEmpty ==> {
+ val subject = TreeMap(elements zip elements: _*)
+ elements.max == subject.last._1
+ }}
+
+ property("head/tail identity") = forAll { (subject: TreeMap[Int, String]) => subject.nonEmpty ==> {
+ subject == (subject.tail + subject.head)
+ }}
+
+ property("init/last identity") = forAll { (subject: TreeMap[Int, String]) => subject.nonEmpty ==> {
+ subject == (subject.init + subject.last)
+ }}
+
+ property("take") = forAll { (subject: TreeMap[Int, String]) =>
+ val n = choose(0, subject.size).sample.get
+ n == subject.take(n).size && subject.take(n).forall(elt => subject.get(elt._1) == Some(elt._2))
+ }
+
+ property("drop") = forAll { (subject: TreeMap[Int, String]) =>
+ val n = choose(0, subject.size).sample.get
+ (subject.size - n) == subject.drop(n).size && subject.drop(n).forall(elt => subject.get(elt._1) == Some(elt._2))
+ }
+
+ property("take/drop identity") = forAll { (subject: TreeMap[Int, String]) =>
+ val n = choose(-1, subject.size + 1).sample.get
+ subject == subject.take(n) ++ subject.drop(n)
+ }
+
+ property("splitAt") = forAll { (subject: TreeMap[Int, String]) =>
+ val n = choose(-1, subject.size + 1).sample.get
+ val (prefix, suffix) = subject.splitAt(n)
+ prefix == subject.take(n) && suffix == subject.drop(n)
+ }
+
+ def genSliceParms = for {
+ tree <- genTreeMap[Int, String]
+ from <- choose(0, tree.size)
+ until <- choose(from, tree.size)
+ } yield (tree, from, until)
+
+ property("slice") = forAll(genSliceParms) { case (subject, from, until) =>
+ val slice = subject.slice(from, until)
+ slice.size == until - from && subject.toSeq == subject.take(from).toSeq ++ slice ++ subject.drop(until)
+ }
+
+ property("takeWhile") = forAll { (subject: TreeMap[Int, String]) =>
+ val result = subject.takeWhile(_._1 < 0)
+ result.forall(_._1 < 0) && result == subject.take(result.size)
+ }
+
+ property("dropWhile") = forAll { (subject: TreeMap[Int, String]) =>
+ val result = subject.dropWhile(_._1 < 0)
+ result.forall(_._1 >= 0) && result == subject.takeRight(result.size)
+ }
+
+ property("span identity") = forAll { (subject: TreeMap[Int, String]) =>
+ val (prefix, suffix) = subject.span(_._1 < 0)
+ prefix.forall(_._1 < 0) && suffix.forall(_._1 >= 0) && subject == prefix ++ suffix
+ }
+
+ property("from is inclusive") = forAll { (subject: TreeMap[Int, String]) => subject.nonEmpty ==> {
+ val n = choose(0, subject.size - 1).sample.get
+ val from = subject.drop(n).firstKey
+ subject.from(from).firstKey == from && subject.from(from).forall(_._1 >= from)
+ }}
+
+ property("to is inclusive") = forAll { (subject: TreeMap[Int, String]) => subject.nonEmpty ==> {
+ val n = choose(0, subject.size - 1).sample.get
+ val to = subject.drop(n).firstKey
+ subject.to(to).lastKey == to && subject.to(to).forall(_._1 <= to)
+ }}
+
+ property("until is exclusive") = forAll { (subject: TreeMap[Int, String]) => subject.size > 1 ==> {
+ val n = choose(1, subject.size - 1).sample.get
+ val until = subject.drop(n).firstKey
+ subject.until(until).lastKey == subject.take(n).lastKey && subject.until(until).forall(_._1 <= until)
+ }}
+
+ property("remove single") = forAll { (subject: TreeMap[Int, String]) => subject.nonEmpty ==> {
+ val key = oneOf(subject.keys.toSeq).sample.get
+ val removed = subject - key
+ subject.contains(key) && !removed.contains(key) && subject.size - 1 == removed.size
+ }}
+
+ property("remove all") = forAll { (subject: TreeMap[Int, String]) =>
+ val result = subject.foldLeft(subject)((acc, elt) => acc - elt._1)
+ result.isEmpty
+ }
+}
diff --git a/test/scalacheck/treeset.scala b/test/scalacheck/treeset.scala
new file mode 100644
index 0000000000..ec6de40693
--- /dev/null
+++ b/test/scalacheck/treeset.scala
@@ -0,0 +1,155 @@
+import collection.immutable._
+import org.scalacheck._
+import Prop._
+import Gen._
+import Arbitrary._
+import util._
+
+object TreeSetTest extends Properties("TreeSet") {
+ def genTreeSet[A: Arbitrary: Ordering]: Gen[TreeSet[A]] =
+ for {
+ elements <- listOf(arbitrary[A])
+ } yield TreeSet(elements: _*)
+ implicit def arbTreeSet[A : Arbitrary : Ordering]: Arbitrary[TreeSet[A]] = Arbitrary(genTreeSet)
+
+ property("foreach/iterator consistency") = forAll { (subject: TreeSet[Int]) =>
+ val it = subject.iterator
+ var consistent = true
+ subject.foreach { element =>
+ consistent &&= it.hasNext && element == it.next
+ }
+ consistent
+ }
+
+ property("worst-case tree height is iterable") = forAll(choose(0, 10), arbitrary[Boolean]) { (n: Int, even: Boolean) =>
+ /*
+ * According to "Ralf Hinze. Constructing red-black trees" [http://www.cs.ox.ac.uk/ralf.hinze/publications/#P5]
+ * you can construct a skinny tree of height 2n by inserting the elements [1 .. 2^(n+1) - 2] and a tree of height
+ * 2n+1 by inserting the elements [1 .. 3 * 2^n - 2], both in reverse order.
+ *
+ * Since we allocate a fixed size buffer in the iterator (based on the tree size) we need to ensure
+ * it is big enough for these worst-case trees.
+ */
+ val highest = if (even) (1 << (n+1)) - 2 else 3*(1 << n) - 2
+ val values = (1 to highest).reverse
+ val subject = TreeSet(values: _*)
+ val it = subject.iterator
+ try { while (it.hasNext) it.next; true } catch { case _: Throwable => false }
+ }
+
+ property("sorted") = forAll { (subject: TreeSet[Int]) => (subject.size >= 3) ==> {
+ subject.zip(subject.tail).forall { case (x, y) => x < y }
+ }}
+
+ property("contains all") = forAll { (elements: List[Int]) =>
+ val subject = TreeSet(elements: _*)
+ elements.forall(subject.contains)
+ }
+
+ property("size") = forAll { (elements: List[Int]) =>
+ val subject = TreeSet(elements: _*)
+ elements.distinct.size == subject.size
+ }
+
+ property("toSeq") = forAll { (elements: List[Int]) =>
+ val subject = TreeSet(elements: _*)
+ elements.distinct.sorted == subject.toSeq
+ }
+
+ property("head") = forAll { (elements: List[Int]) => elements.nonEmpty ==> {
+ val subject = TreeSet(elements: _*)
+ elements.min == subject.head
+ }}
+
+ property("last") = forAll { (elements: List[Int]) => elements.nonEmpty ==> {
+ val subject = TreeSet(elements: _*)
+ elements.max == subject.last
+ }}
+
+ property("head/tail identity") = forAll { (subject: TreeSet[Int]) => subject.nonEmpty ==> {
+ subject == (subject.tail + subject.head)
+ }}
+
+ property("init/last identity") = forAll { (subject: TreeSet[Int]) => subject.nonEmpty ==> {
+ subject == (subject.init + subject.last)
+ }}
+
+ property("take") = forAll { (subject: TreeSet[Int]) =>
+ val n = choose(0, subject.size).sample.get
+ n == subject.take(n).size && subject.take(n).forall(subject.contains)
+ }
+
+ property("drop") = forAll { (subject: TreeSet[Int]) =>
+ val n = choose(0, subject.size).sample.get
+ (subject.size - n) == subject.drop(n).size && subject.drop(n).forall(subject.contains)
+ }
+
+ property("take/drop identity") = forAll { (subject: TreeSet[Int]) =>
+ val n = choose(-1, subject.size + 1).sample.get
+ subject == subject.take(n) ++ subject.drop(n)
+ }
+
+ property("splitAt") = forAll { (subject: TreeSet[Int]) =>
+ val n = choose(-1, subject.size + 1).sample.get
+ val (prefix, suffix) = subject.splitAt(n)
+ prefix == subject.take(n) && suffix == subject.drop(n)
+ }
+
+ def genSliceParms = for {
+ tree <- genTreeSet[Int]
+ from <- choose(0, tree.size)
+ until <- choose(from, tree.size)
+ } yield (tree, from, until)
+
+ property("slice") = forAll(genSliceParms) { case (subject, from, until) =>
+ val slice = subject.slice(from, until)
+ slice.size == until - from && subject.toSeq == subject.take(from).toSeq ++ slice ++ subject.drop(until)
+ }
+
+ property("takeWhile") = forAll { (subject: TreeSet[Int]) =>
+ val result = subject.takeWhile(_ < 0)
+ result.forall(_ < 0) && result == subject.take(result.size)
+ }
+
+ property("dropWhile") = forAll { (subject: TreeSet[Int]) =>
+ val result = subject.dropWhile(_ < 0)
+ result.forall(_ >= 0) && result == subject.takeRight(result.size)
+ }
+
+ property("span identity") = forAll { (subject: TreeSet[Int]) =>
+ val (prefix, suffix) = subject.span(_ < 0)
+ prefix.forall(_ < 0) && suffix.forall(_ >= 0) && subject == prefix ++ suffix
+ }
+
+ property("from is inclusive") = forAll { (subject: TreeSet[Int]) => subject.nonEmpty ==> {
+ val n = choose(0, subject.size - 1).sample.get
+ val from = subject.drop(n).firstKey
+ subject.from(from).firstKey == from && subject.from(from).forall(_ >= from)
+ }}
+
+ property("to is inclusive") = forAll { (subject: TreeSet[Int]) => subject.nonEmpty ==> {
+ val n = choose(0, subject.size - 1).sample.get
+ val to = subject.drop(n).firstKey
+ subject.to(to).lastKey == to && subject.to(to).forall(_ <= to)
+ }}
+
+ property("until is exclusive") = forAll { (subject: TreeSet[Int]) => subject.size > 1 ==> {
+ val n = choose(1, subject.size - 1).sample.get
+ val until = subject.drop(n).firstKey
+ subject.until(until).lastKey == subject.take(n).lastKey && subject.until(until).forall(_ <= until)
+ }}
+
+ property("remove single") = forAll { (subject: TreeSet[Int]) => subject.nonEmpty ==> {
+ val element = oneOf(subject.toSeq).sample.get
+ val removed = subject - element
+ subject.contains(element) && !removed.contains(element) && subject.size - 1 == removed.size
+ }}
+
+ property("remove all") = forAll { (subject: TreeSet[Int]) =>
+ val result = subject.foldLeft(subject)((acc, elt) => acc - elt)
+ result.isEmpty
+ }
+
+ property("ordering must not be null") =
+ throws(classOf[NullPointerException])(TreeSet.empty[Int](null))
+}