summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Scanners.scala45
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala13
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala1
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala48
-rw-r--r--src/compiler/scala/tools/nsc/javac/JavaScanners.scala23
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala46
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala225
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala21
-rw-r--r--src/library/scala/Array.scala3
-rw-r--r--src/library/scala/collection/Iterator.scala23
-rw-r--r--src/library/scala/collection/immutable/Stream.scala6
-rw-r--r--src/library/scala/collection/immutable/Vector.scala9
-rw-r--r--src/library/scala/collection/mutable/ArrayBuffer.scala2
-rw-r--r--src/library/scala/collection/mutable/PriorityQueue.scala2
-rw-r--r--src/library/scala/collection/mutable/ResizableArray.scala2
-rw-r--r--src/library/scala/collection/parallel/immutable/ParRange.scala1
-rw-r--r--src/library/scala/concurrent/impl/Promise.scala2
-rw-r--r--src/partest-extras/scala/org/scalacheck/Arbitrary.scala433
-rw-r--r--src/partest-extras/scala/org/scalacheck/Commands.scala146
-rw-r--r--src/partest-extras/scala/org/scalacheck/Commands2.scala150
-rw-r--r--src/partest-extras/scala/org/scalacheck/Gen.scala813
-rw-r--r--src/partest-extras/scala/org/scalacheck/Prop.scala953
-rw-r--r--src/partest-extras/scala/org/scalacheck/Properties.scala82
-rw-r--r--src/partest-extras/scala/org/scalacheck/ScalaCheckFramework.scala93
-rw-r--r--src/partest-extras/scala/org/scalacheck/Shrink.scala215
-rw-r--r--src/partest-extras/scala/org/scalacheck/Test.scala372
-rw-r--r--src/partest-extras/scala/org/scalacheck/util/Buildable.scala77
-rw-r--r--src/partest-extras/scala/org/scalacheck/util/CmdLineParser.scala41
-rw-r--r--src/partest-extras/scala/org/scalacheck/util/ConsoleReporter.scala44
-rw-r--r--src/partest-extras/scala/org/scalacheck/util/FreqMap.scala65
-rw-r--r--src/partest-extras/scala/org/scalacheck/util/Pretty.scala129
-rw-r--r--src/reflect/scala/reflect/internal/BaseTypeSeqs.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Mirrors.scala22
-rw-r--r--src/reflect/scala/reflect/internal/Names.scala4
-rw-r--r--src/reflect/scala/reflect/internal/Symbols.scala12
-rw-r--r--src/reflect/scala/reflect/internal/pickling/UnPickler.scala43
-rw-r--r--src/reflect/scala/reflect/runtime/JavaMirrors.scala2
-rw-r--r--src/reflect/scala/reflect/runtime/SymbolLoaders.scala4
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala106
40 files changed, 321 insertions, 3961 deletions
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
index 891858ba7b..3a659fd0f0 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
@@ -38,6 +38,27 @@ trait ScannersCommon {
def deprecationWarning(off: Offset, msg: String, since: String): Unit
}
+ // Hooks for ScaladocUnitScanner and ScaladocJavaUnitScanner
+ trait DocScanner {
+ protected def beginDocComment(prefix: String): Unit = {}
+ protected def processCommentChar(): Unit = {}
+ protected def finishDocComment(): Unit = {}
+
+ private var lastDoc: DocComment = null
+ // get last doc comment
+ def flushDoc(): DocComment = try lastDoc finally lastDoc = null
+ def registerDocComment(raw: String, pos: Position) = {
+ lastDoc = DocComment(raw, pos)
+ signalParsedDocComment(raw, pos)
+ }
+
+ /** To prevent doc comments attached to expressions from leaking out of scope
+ * onto the next documentable entity, they are discarded upon passing a right
+ * brace, bracket, or parenthesis.
+ */
+ def discardDocBuffer(): Unit = {}
+ }
+
def createKeywordArray(keywords: Seq[(Name, Token)], defaultToken: Token): (Token, Array[Token]) = {
val names = keywords sortBy (_._1.start) map { case (k, v) => (k.start, v) }
val low = names.head._1
@@ -103,11 +124,11 @@ trait Scanners extends ScannersCommon {
}
}
- abstract class Scanner extends CharArrayReader with TokenData with ScannerData with ScannerCommon {
+ abstract class Scanner extends CharArrayReader with TokenData with ScannerData with ScannerCommon with DocScanner {
private def isDigit(c: Char) = java.lang.Character isDigit c
private var openComments = 0
- protected def putCommentChar(): Unit = nextChar()
+ final protected def putCommentChar(): Unit = { processCommentChar(); nextChar() }
@tailrec private def skipLineComment(): Unit = ch match {
case SU | CR | LF =>
@@ -134,8 +155,6 @@ trait Scanners extends ScannersCommon {
case SU => incompleteInputError("unclosed comment")
case _ => putCommentChar() ; skipNestedComments()
}
- def skipDocComment(): Unit = skipNestedComments()
- def skipBlockComment(): Unit = skipNestedComments()
private def skipToCommentEnd(isLineComment: Boolean): Unit = {
nextChar()
@@ -147,27 +166,23 @@ trait Scanners extends ScannersCommon {
// Check for the amazing corner case of /**/
if (ch == '/')
nextChar()
- else
- skipDocComment()
+ else {
+ beginDocComment("/**")
+ skipNestedComments()
+ }
}
- else skipBlockComment()
+ else skipNestedComments()
}
}
/** @pre ch == '/'
* Returns true if a comment was skipped.
*/
- def skipComment(): Boolean = ch match {
- case '/' | '*' => skipToCommentEnd(isLineComment = ch == '/') ; true
+ final def skipComment(): Boolean = ch match {
+ case '/' | '*' => skipToCommentEnd(isLineComment = ch == '/') ; finishDocComment(); true
case _ => false
}
- def flushDoc(): DocComment = null
- /** To prevent doc comments attached to expressions from leaking out of scope
- * onto the next documentable entity, they are discarded upon passing a right
- * brace, bracket, or parenthesis.
- */
- def discardDocBuffer(): Unit = ()
def isAtEnd = charOffset >= buf.length
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala
index 0b07e12917..b0815b0008 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala
@@ -488,16 +488,11 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
bc emitRETURN returnType
case nextCleanup :: rest =>
if (saveReturnValue) {
- if (insideCleanupBlock) {
- reporter.warning(r.pos, "Return statement found in finally-clause, discarding its return-value in favor of that of a more deeply nested return.")
- bc drop returnType
- } else {
- // regarding return value, the protocol is: in place of a `return-stmt`, a sequence of `adapt, store, jump` are inserted.
- if (earlyReturnVar == null) {
- earlyReturnVar = locals.makeLocal(returnType, "earlyReturnVar")
- }
- locals.store(earlyReturnVar)
+ // regarding return value, the protocol is: in place of a `return-stmt`, a sequence of `adapt, store, jump` are inserted.
+ if (earlyReturnVar == null) {
+ earlyReturnVar = locals.makeLocal(returnType, "earlyReturnVar")
}
+ locals.store(earlyReturnVar)
}
bc goTo nextCleanup
shouldEmitCleanup = true
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala
index dbad37cd5b..fdb5687311 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala
@@ -255,7 +255,6 @@ abstract class BCodeSkelBuilder extends BCodeHelpers {
// used by genLoadTry() and genSynchronized()
var earlyReturnVar: Symbol = null
var shouldEmitCleanup = false
- var insideCleanupBlock = false
// line numbers
var lastEmittedLineNr = -1
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala
index 466793010f..add2c5ffe6 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala
@@ -36,7 +36,7 @@ abstract class BCodeSyncAndTry extends BCodeBodyBuilder {
// if the synchronized block returns a result, store it in a local variable.
// Just leaving it on the stack is not valid in MSIL (stack is cleaned when leaving try-blocks).
val hasResult = (expectedType != UNIT)
- val monitorResult: Symbol = if (hasResult) locals.makeLocal(tpeTK(args.head), "monitorResult") else null;
+ val monitorResult: Symbol = if (hasResult) locals.makeLocal(tpeTK(args.head), "monitorResult") else null
/* ------ (1) pushing and entering the monitor, also keeping a reference to it in a local var. ------ */
genLoadQualifier(fun)
@@ -215,7 +215,7 @@ abstract class BCodeSyncAndTry extends BCodeBodyBuilder {
* please notice `tmp` has type tree.tpe, while `earlyReturnVar` has the method return type.
* Because those two types can be different, dedicated vars are needed.
*/
- val tmp = if (guardResult) locals.makeLocal(tpeTK(tree), "tmp") else null;
+ val tmp = if (guardResult) locals.makeLocal(tpeTK(tree), "tmp") else null
/*
* upon early return from the try-body or one of its EHs (but not the EH-version of the finally-clause)
@@ -238,6 +238,34 @@ abstract class BCodeSyncAndTry extends BCodeBodyBuilder {
val endTryBody = currProgramPoint()
bc goTo postHandlers
+ /**
+ * A return within a `try` or `catch` block where a `finally` is present ("early return")
+ * emits a store of the result to a local, jump to a "cleanup" version of the `finally` block,
+ * and sets `shouldEmitCleanup = true` (see [[PlainBodyBuilder.genReturn]]).
+ *
+ * If the try-catch is nested, outer `finally` blocks need to be emitted in a cleanup version
+ * as well, so the `shouldEmitCleanup` variable remains `true` until the outermost `finally`.
+ * Nested cleanup `finally` blocks jump to the next enclosing one. For the outermost, we emit
+ * a read of the local variable, a return, and we set `shouldEmitCleanup = false` (see
+ * [[pendingCleanups]]).
+ *
+ * Now, assume we have
+ *
+ * try { return 1 } finally {
+ * try { println() } finally { println() }
+ * }
+ *
+ * Here, the outer `finally` needs a cleanup version, but the inner one does not. The method
+ * here makes sure that `shouldEmitCleanup` is only propagated outwards, not inwards to
+ * nested `finally` blocks.
+ */
+ def withFreshCleanupScope(body: => Unit) = {
+ val savedShouldEmitCleanup = shouldEmitCleanup
+ shouldEmitCleanup = false
+ body
+ shouldEmitCleanup = savedShouldEmitCleanup || shouldEmitCleanup
+ }
+
/* ------ (2) One EH for each case-clause (this does not include the EH-version of the finally-clause)
* An EH in (2) is reached upon abrupt termination of (1).
* An EH in (2) is protected by:
@@ -246,8 +274,7 @@ abstract class BCodeSyncAndTry extends BCodeBodyBuilder {
* ------
*/
- for (ch <- caseHandlers) {
-
+ for (ch <- caseHandlers) withFreshCleanupScope {
// (2.a) emit case clause proper
val startHandler = currProgramPoint()
var endHandler: asm.Label = null
@@ -277,9 +304,13 @@ abstract class BCodeSyncAndTry extends BCodeBodyBuilder {
protect(startTryBody, endTryBody, startHandler, excType)
// (2.c) emit jump to the program point where the finally-clause-for-normal-exit starts, or in effect `after` if no finally-clause was given.
bc goTo postHandlers
-
}
+ // Need to save the state of `shouldEmitCleanup` at this point: while emitting the first
+ // version of the `finally` block below, the variable may become true. But this does not mean
+ // that we need a cleanup version for the current block, only for the enclosing ones.
+ val currentFinallyBlockNeedsCleanup = shouldEmitCleanup
+
/* ------ (3.A) The exception-handler-version of the finally-clause.
* Reached upon abrupt termination of (1) or one of the EHs in (2).
* Protected only by whatever protects the whole try-catch-finally expression.
@@ -288,7 +319,7 @@ abstract class BCodeSyncAndTry extends BCodeBodyBuilder {
// a note on terminology: this is not "postHandlers", despite appearances.
// "postHandlers" as in the source-code view. And from that perspective, both (3.A) and (3.B) are invisible implementation artifacts.
- if (hasFinally) {
+ if (hasFinally) withFreshCleanupScope {
nopIfNeeded(startTryBody)
val finalHandler = currProgramPoint() // version of the finally-clause reached via unhandled exception.
protect(startTryBody, finalHandler, finalHandler, null)
@@ -316,14 +347,11 @@ abstract class BCodeSyncAndTry extends BCodeBodyBuilder {
// this is not "postHandlers" either.
// `shouldEmitCleanup` can be set, and at the same time this try expression may lack a finally-clause.
// In other words, all combinations of (hasFinally, shouldEmitCleanup) are valid.
- if (hasFinally && shouldEmitCleanup) {
- val savedInsideCleanup = insideCleanupBlock
- insideCleanupBlock = true
+ if (hasFinally && currentFinallyBlockNeedsCleanup) {
markProgramPoint(finCleanup)
// regarding return value, the protocol is: in place of a `return-stmt`, a sequence of `adapt, store, jump` are inserted.
emitFinalizer(finalizer, null, isDuplicate = true)
pendingCleanups()
- insideCleanupBlock = savedInsideCleanup
}
/* ------ (4) finally-clause-for-normal-nonEarlyReturn-exit
diff --git a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala
index e11ac94041..f77e53c54b 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala
@@ -215,7 +215,7 @@ trait JavaScanners extends ast.parser.ScannersCommon {
*
* @author Martin Odersky
*/
- abstract class JavaScanner extends AbstractJavaScanner with JavaTokenData with Cloneable with ScannerCommon {
+ abstract class JavaScanner extends AbstractJavaScanner with JavaTokenData with Cloneable with ScannerCommon with DocScanner {
override def intVal = super.intVal// todo: needed?
override def floatVal = super.floatVal
def currentPos: Position = g2p(pos - 1)
@@ -577,27 +577,32 @@ trait JavaScanners extends ast.parser.ScannersCommon {
}
}
- protected def putCommentChar(): Unit = in.next()
+ final protected def putCommentChar(): Unit = { processCommentChar(); in.next() }
- protected def skipBlockComment(isDoc: Boolean): Unit = in.ch match {
- case SU => incompleteInputError("unclosed comment")
- case '*' => putCommentChar() ; if (in.ch == '/') putCommentChar() else skipBlockComment(isDoc)
- case _ => putCommentChar() ; skipBlockComment(isDoc)
+ @tailrec final protected def skipBlockComment(isDoc: Boolean): Unit = {
+ if (isDoc) beginDocComment("/*") // the second '*' is the current character
+
+ in.ch match {
+ case SU => incompleteInputError("unclosed comment")
+ case '*' => putCommentChar() ; if (in.ch == '/') putCommentChar() else skipBlockComment(isDoc)
+ case _ => putCommentChar() ; skipBlockComment(isDoc)
+ }
}
- protected def skipLineComment(): Unit = in.ch match {
+ @tailrec final protected def skipLineComment(): Unit = in.ch match {
case CR | LF | SU =>
case _ => putCommentChar() ; skipLineComment()
}
- protected def skipComment(): Boolean = in.ch match {
- case '/' => putCommentChar() ; skipLineComment() ; true
+ final protected def skipComment(): Boolean = in.ch match {
+ case '/' => putCommentChar() ; skipLineComment() ; finishDocComment() ; true
case '*' =>
putCommentChar()
in.ch match {
case '*' => skipBlockComment(isDoc = true)
case _ => skipBlockComment(isDoc = false)
}
+ finishDocComment()
true
case _ => false
}
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
index b36d5d4ef1..d948d151a6 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
@@ -52,20 +52,28 @@ abstract class SymbolLoaders {
})
}
+ def newClass(owner: Symbol, name: String): ClassSymbol = owner.newClass(newTypeName(name))
+
/** Enter class with given `name` into scope of `root`
* and give them `completer` as type.
*/
- def enterClass(owner: Symbol, name: String, completer: SymbolLoader): Symbol = {
- val clazz = owner.newClass(newTypeName(name))
+ def enterClass(owner: Symbol, name: String, completer: SymbolLoader): Symbol =
+ enterClass(owner, newClass(owner, name), completer)
+
+ def enterClass(owner: Symbol, clazz: ClassSymbol, completer: SymbolLoader): Symbol = {
clazz setInfo completer
enterIfNew(owner, clazz, completer)
}
+ def newModule(owner: Symbol, name: String): ModuleSymbol = owner.newModule(newTermName(name))
+
/** Enter module with given `name` into scope of `root`
* and give them `completer` as type.
*/
- def enterModule(owner: Symbol, name: String, completer: SymbolLoader): Symbol = {
- val module = owner.newModule(newTermName(name))
+ def enterModule(owner: Symbol, name: String, completer: SymbolLoader): Symbol =
+ enterModule(owner, newModule(owner, name), completer)
+
+ def enterModule(owner: Symbol, module: ModuleSymbol, completer: SymbolLoader): Symbol = {
module setInfo completer
module.moduleClass setInfo moduleClassLoader
enterIfNew(owner, module, completer)
@@ -113,9 +121,12 @@ abstract class SymbolLoaders {
/** Enter class and module with given `name` into scope of `root`
* and give them `completer` as type.
*/
- def enterClassAndModule(root: Symbol, name: String, completer: SymbolLoader) {
- val clazz = enterClass(root, name, completer)
- val module = enterModule(root, name, completer)
+ def enterClassAndModule(root: Symbol, name: String, getCompleter: (ClassSymbol, ModuleSymbol) => SymbolLoader) {
+ val clazz = newClass(root, name)
+ val module = newModule(root, name)
+ val completer = getCompleter(clazz, module)
+ enterClass(root, clazz, completer)
+ enterModule(root, module, completer)
if (!clazz.isAnonymousClass) {
// Diagnostic for SI-7147
def msg: String = {
@@ -136,7 +147,7 @@ abstract class SymbolLoaders {
* (overridden in interactive.Global).
*/
def enterToplevelsFromSource(root: Symbol, name: String, src: AbstractFile) {
- enterClassAndModule(root, name, new SourcefileLoader(src))
+ enterClassAndModule(root, name, (_, _) => new SourcefileLoader(src))
}
/** The package objects of scala and scala.reflect should always
@@ -162,17 +173,10 @@ abstract class SymbolLoaders {
if (settings.verbose) inform("[symloader] no class, picked up source file for " + src.path)
enterToplevelsFromSource(owner, classRep.name, src)
case (Some(bin), _) =>
- enterClassAndModule(owner, classRep.name, newClassLoader(bin))
+ enterClassAndModule(owner, classRep.name, new ClassfileLoader(bin, _, _))
}
}
- /** Create a new loader from a binary classfile.
- * This is intended as a hook allowing to support loading symbols from
- * files other than .class files.
- */
- protected def newClassLoader(bin: AbstractFile): SymbolLoader =
- new ClassfileLoader(bin)
-
/**
* A lazy type that completes itself by calling parameter doComplete.
* Any linked modules/classes or module classes are also initialized.
@@ -277,7 +281,7 @@ abstract class SymbolLoaders {
}
}
- class ClassfileLoader(val classfile: AbstractFile) extends SymbolLoader with FlagAssigningCompleter {
+ class ClassfileLoader(val classfile: AbstractFile, clazz: ClassSymbol, module: ModuleSymbol) extends SymbolLoader with FlagAssigningCompleter {
private object classfileParser extends {
val symbolTable: SymbolLoaders.this.symbolTable.type = SymbolLoaders.this.symbolTable
} with ClassfileParser {
@@ -304,13 +308,7 @@ abstract class SymbolLoaders {
protected def doComplete(root: Symbol) {
val start = if (Statistics.canEnable) Statistics.startTimer(classReadNanos) else null
-
- // Running the classfile parser after refchecks can lead to "illegal class file dependency"
- // errors. More concretely, the classfile parser calls "sym.companionModule", which calls
- // "isModuleNotMethod" on the companion. After refchecks, this method forces the info, which
- // may run the classfile parser. This produces the error.
- enteringPhase(phaseBeforeRefchecks)(classfileParser.parse(classfile, root))
-
+ classfileParser.parse(classfile, clazz, module)
if (root.associatedFile eq NoAbstractFile) {
root match {
// In fact, the ModuleSymbol forwards its setter to the module class
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index f8c1a0d082..7e81fad606 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -10,6 +10,7 @@ package classfile
import java.io.{File, IOException}
import java.lang.Integer.toHexString
+
import scala.collection.{immutable, mutable}
import scala.collection.mutable.{ArrayBuffer, ListBuffer}
import scala.annotation.switch
@@ -18,6 +19,7 @@ import scala.reflect.internal.pickling.{ByteCodecs, PickleBuffer}
import scala.reflect.io.NoAbstractFile
import scala.tools.nsc.util.ClassPath
import scala.tools.nsc.io.AbstractFile
+import scala.util.control.NonFatal
/** This abstract class implements a class file parser.
*
@@ -53,18 +55,18 @@ abstract class ClassfileParser {
protected type ThisConstantPool <: ConstantPool
protected def newConstantPool: ThisConstantPool
- protected var file: AbstractFile = _ // the class file
- protected var in: AbstractFileReader = _ // the class file reader
- protected var clazz: Symbol = _ // the class symbol containing dynamic members
- protected var staticModule: Symbol = _ // the module symbol containing static members
- protected var instanceScope: Scope = _ // the scope of all instance definitions
- protected var staticScope: Scope = _ // the scope of all static definitions
- protected var pool: ThisConstantPool = _ // the classfile's constant pool
- protected var isScala: Boolean = _ // does class file describe a scala class?
- protected var isScalaAnnot: Boolean = _ // does class file describe a scala class with its pickled info in an annotation?
- protected var isScalaRaw: Boolean = _ // this class file is a scala class with no pickled info
- protected var busy: Symbol = _ // lock to detect recursive reads
- protected var currentClass: Name = _ // JVM name of the current class
+ protected var file: AbstractFile = _ // the class file
+ protected var in: AbstractFileReader = _ // the class file reader
+ protected var clazz: ClassSymbol = _ // the class symbol containing dynamic members
+ protected var staticModule: ModuleSymbol = _ // the module symbol containing static members
+ protected var instanceScope: Scope = _ // the scope of all instance definitions
+ protected var staticScope: Scope = _ // the scope of all static definitions
+ protected var pool: ThisConstantPool = _ // the classfile's constant pool
+ protected var isScala: Boolean = _ // does class file describe a scala class?
+ protected var isScalaAnnot: Boolean = _ // does class file describe a scala class with its pickled info in an annotation?
+ protected var isScalaRaw: Boolean = _ // this class file is a scala class with no pickled info
+ protected var busy: Symbol = _ // lock to detect recursive reads
+ protected var currentClass: Name = _ // JVM name of the current class
protected var classTParams = Map[Name,Symbol]()
protected var srcfile0 : Option[AbstractFile] = None
protected def moduleClass: Symbol = staticModule.moduleClass
@@ -132,17 +134,21 @@ abstract class ClassfileParser {
finally loaders.parentsLevel -= 1
}
- def parse(file: AbstractFile, root: Symbol): Unit = {
- debuglog("[class] >> " + root.fullName)
-
+ /**
+ * `clazz` and `module` are the class and module symbols corresponding to the classfile being
+ * parsed. Note that the ClassfileLoader unconditionally creates both of these symbols, they may
+ * may get invalidated later on (.exists).
+ *
+ * Note that using `companionModule` / `companionClass` does not always work to navigate between
+ * those two symbols, namely when they are shadowed by a type / value in the a package object
+ * (scala-dev#248).
+ */
+ def parse(file: AbstractFile, clazz: ClassSymbol, module: ModuleSymbol): Unit = {
this.file = file
- pushBusy(root) {
+ pushBusy(clazz) {
this.in = new AbstractFileReader(file)
- this.clazz = if (root.isModule) root.companionClass else root
- // WARNING! do no use clazz.companionModule to find staticModule.
- // In a situation where root can be defined, but its companionClass not,
- // this would give incorrect results (see SI-5031 in separate compilation scenario)
- this.staticModule = if (root.isModule) root else root.companionModule
+ this.clazz = clazz
+ this.staticModule = module
this.isScala = false
parseHeader()
@@ -271,7 +277,7 @@ abstract class ClassfileParser {
* arrays are considered to be class types, they might
* appear as entries in 'newarray' or 'cast' opcodes.
*/
- def getClassOrArrayType(index: Int): Type = (
+ def getClassOrArrayType(index: Int): Type = {
if (index <= 0 || len <= index) errorBadIndex(index)
else values(index) match {
case tp: Type => tp
@@ -283,7 +289,7 @@ abstract class ClassfileParser {
case _ => recordAtIndex(classNameToSymbol(name), index).tpe_*
}
}
- )
+ }
def getType(index: Int): Type = getType(null, index)
def getType(sym: Symbol, index: Int): Type = sigToType(sym, getExternalName(index))
@@ -356,63 +362,43 @@ abstract class ClassfileParser {
abort(s"bad constant pool tag ${in.buf(start)} at byte $start")
}
- private def loadClassSymbol(name: Name): Symbol = {
- val file = classPath findClassFile name.toString getOrElse {
- // SI-5593 Scaladoc's current strategy is to visit all packages in search of user code that can be documented
- // therefore, it will rummage through the classpath triggering errors whenever it encounters package objects
- // that are not in their correct place (see bug for details)
-
- // TODO More consistency with use of stub symbols in `Unpickler`
- // - better owner than `NoSymbol`
- // - remove eager warning
- val msg = s"Class $name not found - continuing with a stub."
- if ((!settings.isScaladoc) && (settings.verbose || settings.developer)) warning(msg)
- return NoSymbol.newStubSymbol(name.toTypeName, msg)
- }
- val completer = new loaders.ClassfileLoader(file)
- var owner: Symbol = rootMirror.RootClass
- var sym: Symbol = NoSymbol
- var ss: Name = null
- var start = 0
- var end = name indexOf '.'
-
- while (end > 0) {
- ss = name.subName(start, end)
- sym = owner.info.decls lookup ss
- if (sym == NoSymbol) {
- sym = owner.newPackage(ss.toTermName) setInfo completer
- sym.moduleClass setInfo completer
- owner.info.decls enter sym
- }
- owner = sym.moduleClass
- start = end + 1
- end = name.indexOf('.', start)
- }
- ss = name.subName(0, start)
- owner.info.decls lookup ss orElse {
- sym = owner.newClass(ss.toTypeName) setInfoAndEnter completer
- debuglog("loaded "+sym+" from file "+file)
- sym
- }
+ def stubClassSymbol(name: Name): Symbol = {
+ // SI-5593 Scaladoc's current strategy is to visit all packages in search of user code that can be documented
+ // therefore, it will rummage through the classpath triggering errors whenever it encounters package objects
+ // that are not in their correct place (see bug for details)
+
+ // TODO More consistency with use of stub symbols in `Unpickler`
+ // - better owner than `NoSymbol`
+ // - remove eager warning
+ val msg = s"Class $name not found - continuing with a stub."
+ if ((!settings.isScaladoc) && (settings.verbose || settings.developer)) warning(msg)
+ NoSymbol.newStubSymbol(name.toTypeName, msg)
}
- /** FIXME - we shouldn't be doing ad hoc lookups in the empty package.
- * The method called "getClassByName" should either return the class or not.
- */
- private def lookupClass(name: Name) = (
+ private def lookupClass(name: Name) = try {
if (name containsChar '.')
- rootMirror getClassByName name // see tickets #2464, #3756
+ rootMirror getClassByName name
else
+ // FIXME - we shouldn't be doing ad hoc lookups in the empty package, getClassByName should return the class
definitions.getMember(rootMirror.EmptyPackageClass, name.toTypeName)
- )
+ } catch {
+ // The handler
+ // - prevents crashes with deficient InnerClassAttributes (SI-2464, 0ce0ad5)
+ // - was referenced in the bugfix commit for SI-3756 (4fb0d53), not sure why
+ // - covers the case when a type alias in a package object shadows a class symbol,
+ // getClassByName throws a MissingRequirementError (scala-dev#248)
+ case _: FatalError =>
+ // getClassByName can throw a MissingRequirementError (which extends FatalError)
+ // definitions.getMember can throw a FatalError, for example in pos/t5165b
+ stubClassSymbol(name)
+ }
/** Return the class symbol of the given name. */
def classNameToSymbol(name: Name): Symbol = {
if (innerClasses contains name)
innerClasses innerSymbol name
else
- try lookupClass(name)
- catch { case _: FatalError => loadClassSymbol(name) }
+ lookupClass(name)
}
def parseClass() {
@@ -441,13 +427,10 @@ abstract class ClassfileParser {
}
val isTopLevel = !(currentClass containsChar '$') // Java class name; *don't* try to to use Scala name decoding (SI-7532)
-
- val c = if (isTopLevel) pool.getClassSymbol(nameIdx) else clazz
if (isTopLevel) {
- if (c != clazz) {
- if ((clazz eq NoSymbol) && (c ne NoSymbol)) clazz = c
- else mismatchError(c)
- }
+ val c = pool.getClassSymbol(nameIdx)
+ // scala-dev#248: when a type alias (in a package object) shadows a class symbol, getClassSymbol returns a stub
+ if (!c.isInstanceOf[StubSymbol] && c != clazz) mismatchError(c)
}
addEnclosingTParams(clazz)
@@ -848,16 +831,19 @@ abstract class ClassfileParser {
// Java annotations on classes / methods / fields with RetentionPolicy.RUNTIME
case tpnme.RuntimeAnnotationATTR =>
if (isScalaAnnot || !isScala) {
- val scalaSigAnnot = parseAnnotations(attrLen)
- if (isScalaAnnot)
- scalaSigAnnot match {
- case Some(san: AnnotationInfo) =>
- val bytes =
- san.assocs.find({ _._1 == nme.bytes }).get._2.asInstanceOf[ScalaSigBytes].bytes
- unpickler.unpickle(bytes, 0, clazz, staticModule, in.file.name)
- case None =>
- throw new RuntimeException("Scala class file does not contain Scala annotation")
- }
+ // For Scala classfiles we are only interested in the scala signature annotations. Other
+ // annotations should be skipped (the pickle contains the symbol's annotations).
+ // Skipping them also prevents some spurious warnings / errors related to SI-7014,
+ // SI-7551, pos/5165b
+ val scalaSigAnnot = parseAnnotations(onlyScalaSig = isScalaAnnot)
+ if (isScalaAnnot) scalaSigAnnot match {
+ case Some(san: AnnotationInfo) =>
+ val bytes =
+ san.assocs.find({ _._1 == nme.bytes }).get._2.asInstanceOf[ScalaSigBytes].bytes
+ unpickler.unpickle(bytes, 0, clazz, staticModule, in.file.name)
+ case None =>
+ throw new RuntimeException("Scala class file does not contain Scala annotation")
+ }
debuglog("[class] << " + sym.fullName + sym.annotationsString)
}
else
@@ -891,6 +877,24 @@ abstract class ClassfileParser {
}
}
+ def skipAnnotArg(): Unit = {
+ u1 match {
+ case STRING_TAG | BOOL_TAG | BYTE_TAG | CHAR_TAG | SHORT_TAG |
+ INT_TAG | LONG_TAG | FLOAT_TAG | DOUBLE_TAG | CLASS_TAG =>
+ in.skip(2)
+
+ case ENUM_TAG =>
+ in.skip(4)
+
+ case ARRAY_TAG =>
+ val num = u2
+ for (i <- 0 until num) skipAnnotArg()
+
+ case ANNOTATION_TAG =>
+ parseAnnotation(u2, onlyScalaSig = true)
+ }
+ }
+
def parseAnnotArg: Option[ClassfileAnnotArg] = {
val tag = u1
val index = u2
@@ -924,7 +928,7 @@ abstract class ClassfileParser {
if (hasError) None
else Some(ArrayAnnotArg(arr.toArray))
case ANNOTATION_TAG =>
- parseAnnotation(index) map (NestedAnnotArg(_))
+ parseAnnotation(index, onlyScalaSig = false) map (NestedAnnotArg(_))
}
}
@@ -951,7 +955,7 @@ abstract class ClassfileParser {
/* Parse and return a single annotation. If it is malformed,
* return None.
*/
- def parseAnnotation(attrNameIndex: Int): Option[AnnotationInfo] = try {
+ def parseAnnotation(attrNameIndex: Int, onlyScalaSig: Boolean): Option[AnnotationInfo] = try {
val attrType = pool.getType(attrNameIndex)
val nargs = u2
val nvpairs = new ListBuffer[(Name, ClassfileAnnotArg)]
@@ -972,18 +976,17 @@ abstract class ClassfileParser {
case None => hasError = true
}
else
- parseAnnotArg match {
+ if (onlyScalaSig) skipAnnotArg()
+ else parseAnnotArg match {
case Some(c) => nvpairs += ((name, c))
case None => hasError = true
}
}
if (hasError) None
else Some(AnnotationInfo(attrType, List(), nvpairs.toList))
- }
- catch {
- case f: FatalError => throw f // don't eat fatal errors, they mean a class was not found
- case ex: java.lang.Error => throw ex
- case ex: Throwable =>
+ } catch {
+ case f: FatalError => throw f // don't eat fatal errors, they mean a class was not found
+ case NonFatal(ex) =>
// We want to be robust when annotations are unavailable, so the very least
// we can do is warn the user about the exception
// There was a reference to ticket 1135, but that is outdated: a reference to a class not on
@@ -992,7 +995,6 @@ abstract class ClassfileParser {
// and that should never be swallowed silently.
warning(s"Caught: $ex while parsing annotations in ${in.file}")
if (settings.debug) ex.printStackTrace()
-
None // ignore malformed annotations
}
@@ -1014,19 +1016,18 @@ abstract class ClassfileParser {
/* Parse a sequence of annotations and attaches them to the
* current symbol sym, except for the ScalaSignature annotation that it returns, if it is available. */
- def parseAnnotations(len: Int): Option[AnnotationInfo] = {
+ def parseAnnotations(onlyScalaSig: Boolean): Option[AnnotationInfo] = {
val nAttr = u2
var scalaSigAnnot: Option[AnnotationInfo] = None
- for (n <- 0 until nAttr)
- parseAnnotation(u2) match {
- case Some(scalaSig) if (scalaSig.atp == ScalaSignatureAnnotation.tpe) =>
- scalaSigAnnot = Some(scalaSig)
- case Some(scalaSig) if (scalaSig.atp == ScalaLongSignatureAnnotation.tpe) =>
- scalaSigAnnot = Some(scalaSig)
- case Some(annot) =>
- sym.addAnnotation(annot)
- case None =>
- }
+ for (n <- 0 until nAttr) parseAnnotation(u2, onlyScalaSig) match {
+ case Some(scalaSig) if scalaSig.atp == ScalaSignatureAnnotation.tpe =>
+ scalaSigAnnot = Some(scalaSig)
+ case Some(scalaSig) if scalaSig.atp == ScalaLongSignatureAnnotation.tpe =>
+ scalaSigAnnot = Some(scalaSig)
+ case Some(annot) =>
+ sym.addAnnotation(annot)
+ case None =>
+ }
scalaSigAnnot
}
@@ -1043,7 +1044,6 @@ abstract class ClassfileParser {
def enterClassAndModule(entry: InnerClassEntry, file: AbstractFile) {
def jflags = entry.jflags
- val completer = new loaders.ClassfileLoader(file)
val name = entry.originalName
val sflags = jflags.toScalaFlags
val owner = ownerForFlags(jflags)
@@ -1054,8 +1054,11 @@ abstract class ClassfileParser {
val (innerClass, innerModule) = if (file == NoAbstractFile) {
(newStub(name.toTypeName), newStub(name.toTermName))
} else {
- val cls = owner.newClass(name.toTypeName, NoPosition, sflags) setInfo completer
- val mod = owner.newModule(name.toTermName, NoPosition, sflags) setInfo completer
+ val cls = owner.newClass(name.toTypeName, NoPosition, sflags)
+ val mod = owner.newModule(name.toTermName, NoPosition, sflags)
+ val completer = new loaders.ClassfileLoader(file, cls, mod)
+ cls setInfo completer
+ mod setInfo completer
mod.moduleClass setInfo loaders.moduleClassLoader
List(cls, mod.moduleClass) foreach (_.associatedFile = file)
(cls, mod)
@@ -1098,8 +1101,6 @@ abstract class ClassfileParser {
val attrName = readTypeName()
val attrLen = u4
attrName match {
- case tpnme.SignatureATTR =>
- in.skip(attrLen)
case tpnme.ScalaSignatureATTR =>
isScala = true
val pbuf = new PickleBuffer(in.buf, in.bp, in.bp + attrLen)
@@ -1166,10 +1167,10 @@ abstract class ClassfileParser {
private def innerSymbol(entry: InnerClassEntry): Symbol = {
val name = entry.originalName.toTypeName
val enclosing = entry.enclosing
- val member = (
+ val member = {
if (enclosing == clazz) entry.scope lookup name
else lookupMemberAtTyperPhaseIfPossible(enclosing, name)
- )
+ }
def newStub = enclosing.newStubSymbol(name, s"Unable to locate class corresponding to inner class entry for $name in owner ${entry.outerName}")
member.orElse(newStub)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index 106b076eef..116c932365 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -472,7 +472,7 @@ abstract class RefChecks extends Transform {
checkOverrideTypes()
checkOverrideDeprecated()
if (settings.warnNullaryOverride) {
- if (other.paramss.isEmpty && !member.paramss.isEmpty) {
+ if (other.paramss.isEmpty && !member.paramss.isEmpty && !member.isJavaDefined) {
reporter.warning(member.pos, "non-nullary method overrides nullary method")
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 7d48c548a1..cca6f280e3 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -3139,10 +3139,25 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
val initElems = scope.elems
// SI-5877 The decls of a package include decls of the package object. But we don't want to add
// the corresponding synthetics to the package class, only to the package object class.
- def shouldAdd(sym: Symbol) =
- inBlock || !context.isInPackageObject(sym, context.owner)
+ // SI-6734 Locality test below is meaningless if we're not even in the correct tree.
+ // For modules that are synthetic case companions, check that case class is defined here.
+ def shouldAdd(sym: Symbol): Boolean = {
+ def shouldAddAsModule: Boolean =
+ sym.moduleClass.attachments.get[ClassForCaseCompanionAttachment] match {
+ case Some(att) =>
+ val cdef = att.caseClass
+ stats.exists {
+ case t @ ClassDef(_, _, _, _) => t.symbol == cdef.symbol // cdef ne t
+ case _ => false
+ }
+ case _ => true
+ }
+
+ (!sym.isModule || shouldAddAsModule) && (inBlock || !context.isInPackageObject(sym, context.owner))
+ }
for (sym <- scope)
- for (tree <- context.unit.synthetics get sym if shouldAdd(sym)) { // OPT: shouldAdd is usually true. Call it here, rather than in the outer loop
+ // OPT: shouldAdd is usually true. Call it here, rather than in the outer loop
+ for (tree <- context.unit.synthetics.get(sym) if shouldAdd(sym)) {
newStats += typedStat(tree) // might add even more synthetics to the scope
context.unit.synthetics -= sym
}
diff --git a/src/library/scala/Array.scala b/src/library/scala/Array.scala
index 6d829a9e5d..5d1c25732c 100644
--- a/src/library/scala/Array.scala
+++ b/src/library/scala/Array.scala
@@ -11,7 +11,6 @@ package scala
import scala.collection.generic._
import scala.collection.{ mutable, immutable }
import mutable.{ ArrayBuilder, ArraySeq }
-import scala.compat.Platform.arraycopy
import scala.reflect.ClassTag
import scala.runtime.ScalaRunTime.{ array_apply, array_update }
@@ -102,7 +101,7 @@ object Array extends FallbackArrayBuilding {
def copy(src: AnyRef, srcPos: Int, dest: AnyRef, destPos: Int, length: Int) {
val srcClass = src.getClass
if (srcClass.isArray && dest.getClass.isAssignableFrom(srcClass))
- arraycopy(src, srcPos, dest, destPos, length)
+ java.lang.System.arraycopy(src, srcPos, dest, destPos, length)
else
slowcopy(src, srcPos, dest, destPos, length)
}
diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala
index 1426278954..66d7493217 100644
--- a/src/library/scala/collection/Iterator.scala
+++ b/src/library/scala/collection/Iterator.scala
@@ -686,15 +686,15 @@ trait Iterator[+A] extends TraversableOnce[A] {
* handling of structural calls. It's not what's intended here.
*/
class Leading extends AbstractIterator[A] {
- var lookahead: mutable.Queue[A] = null
- var hd: A = _
+ private[this] var lookahead: mutable.Queue[A] = null
+ private[this] var hd: A = _
/* Status is kept with magic numbers
* 1 means next element is in hd and we're still reading into this iterator
* 0 means we're still reading but haven't found a next element
* -1 means we are done reading into the iterator, so we must rely on lookahead
* -2 means we are done but have saved hd for the other iterator to use as its first element
*/
- var status = 0
+ private[this] var status = 0
private def store(a: A) {
if (lookahead == null) lookahead = new mutable.Queue[A]
lookahead += a
@@ -718,26 +718,23 @@ trait Iterator[+A] extends TraversableOnce[A] {
}
else empty.next()
}
- def finish(): Boolean = {
- if (status == -1) false
- else if (status == -2) {
+ def finish(): Boolean = status match {
+ case -2 => status = -1 ; true
+ case -1 => false
+ case 1 => store(hd) ; status = 0 ; finish()
+ case 0 =>
status = -1
- true
- }
- else {
- if (status == 1) store(hd)
while (self.hasNext) {
val a = self.next()
if (p(a)) store(a)
else {
hd = a
- status = -1
return true
}
}
false
- }
}
+ def trailer: A = hd
}
val leading = new Leading
@@ -770,7 +767,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
if (status > 0) self.next()
else {
status = 1
- val ans = myLeading.hd
+ val ans = myLeading.trailer
myLeading = null
ans
}
diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala
index db19df315f..3d4e32971c 100644
--- a/src/library/scala/collection/immutable/Stream.scala
+++ b/src/library/scala/collection/immutable/Stream.scala
@@ -23,7 +23,7 @@ import scala.language.implicitConversions
* import scala.math.BigInt
* object Main extends App {
*
- * val fibs: Stream[BigInt] = BigInt(0) #:: BigInt(1) #:: fibs.zip(fibs.tail).map { n => n._1 + n._2 }
+ * lazy val fibs: Stream[BigInt] = BigInt(0) #:: BigInt(1) #:: fibs.zip(fibs.tail).map { n => n._1 + n._2 }
*
* fibs take 5 foreach println
* }
@@ -46,7 +46,7 @@ import scala.language.implicitConversions
* import scala.math.BigInt
* object Main extends App {
*
- * val fibs: Stream[BigInt] = BigInt(0) #:: BigInt(1) #:: fibs.zip(
+ * lazy val fibs: Stream[BigInt] = BigInt(0) #:: BigInt(1) #:: fibs.zip(
* fibs.tail).map(n => {
* println("Adding %d and %d".format(n._1, n._2))
* n._1 + n._2
@@ -162,7 +162,7 @@ import scala.language.implicitConversions
* // The first time we try to access the tail we're going to need more
* // information which will require us to recurse, which will require us to
* // recurse, which...
- * val sov: Stream[Vector[Int]] = Vector(0) #:: sov.zip(sov.tail).map { n => n._1 ++ n._2 }
+ * lazy val sov: Stream[Vector[Int]] = Vector(0) #:: sov.zip(sov.tail).map { n => n._1 ++ n._2 }
* }}}
*
* The definition of `fibs` above creates a larger number of objects than
diff --git a/src/library/scala/collection/immutable/Vector.scala b/src/library/scala/collection/immutable/Vector.scala
index a162fdaaf8..d9d925705f 100644
--- a/src/library/scala/collection/immutable/Vector.scala
+++ b/src/library/scala/collection/immutable/Vector.scala
@@ -11,7 +11,6 @@ package collection
package immutable
import scala.annotation.unchecked.uncheckedVariance
-import scala.compat.Platform
import scala.collection.generic._
import scala.collection.mutable.{Builder, ReusableBuilder}
import scala.collection.parallel.immutable.ParVector
@@ -478,12 +477,12 @@ override def companion: GenericCompanion[Vector] = Vector
// if (array eq null)
// println("OUCH!!! " + right + "/" + depth + "/"+startIndex + "/" + endIndex + "/" + focus)
val a2 = new Array[AnyRef](array.length)
- Platform.arraycopy(array, 0, a2, 0, right)
+ java.lang.System.arraycopy(array, 0, a2, 0, right)
a2
}
private def copyRight(array: Array[AnyRef], left: Int): Array[AnyRef] = {
val a2 = new Array[AnyRef](array.length)
- Platform.arraycopy(array, left, a2, left, a2.length - left)
+ java.lang.System.arraycopy(array, left, a2, left, a2.length - left)
a2
}
@@ -955,7 +954,7 @@ private[immutable] trait VectorPointer[T] {
private[immutable] final def copyOf(a: Array[AnyRef]) = {
val b = new Array[AnyRef](a.length)
- Platform.arraycopy(a, 0, b, 0, a.length)
+ java.lang.System.arraycopy(a, 0, b, 0, a.length)
b
}
@@ -1119,7 +1118,7 @@ private[immutable] trait VectorPointer[T] {
private[immutable] final def copyRange(array: Array[AnyRef], oldLeft: Int, newLeft: Int) = {
val elems = new Array[AnyRef](32)
- Platform.arraycopy(array, oldLeft, elems, newLeft, 32 - math.max(newLeft,oldLeft))
+ java.lang.System.arraycopy(array, oldLeft, elems, newLeft, 32 - math.max(newLeft,oldLeft))
elems
}
diff --git a/src/library/scala/collection/mutable/ArrayBuffer.scala b/src/library/scala/collection/mutable/ArrayBuffer.scala
index 167e04ccbd..23d386f729 100644
--- a/src/library/scala/collection/mutable/ArrayBuffer.scala
+++ b/src/library/scala/collection/mutable/ArrayBuffer.scala
@@ -67,7 +67,7 @@ class ArrayBuffer[A](override protected val initialSize: Int)
override def sizeHint(len: Int) {
if (len > size && len >= 1) {
val newarray = new Array[AnyRef](len)
- scala.compat.Platform.arraycopy(array, 0, newarray, 0, size0)
+ java.lang.System.arraycopy(array, 0, newarray, 0, size0)
array = newarray
}
}
diff --git a/src/library/scala/collection/mutable/PriorityQueue.scala b/src/library/scala/collection/mutable/PriorityQueue.scala
index 107a2bfa0e..ed43ef6db9 100644
--- a/src/library/scala/collection/mutable/PriorityQueue.scala
+++ b/src/library/scala/collection/mutable/PriorityQueue.scala
@@ -331,8 +331,8 @@ sealed class PriorityQueue[A](implicit val ord: Ordering[A])
val pq = new PriorityQueue[A]
val n = resarr.p_size0
pq.resarr.p_ensureSize(n)
+ java.lang.System.arraycopy(resarr.p_array, 1, pq.resarr.p_array, 1, n-1)
pq.resarr.p_size0 = n
- scala.compat.Platform.arraycopy(resarr.p_array, 1, pq.resarr.p_array, 1, n-1)
pq
}
}
diff --git a/src/library/scala/collection/mutable/ResizableArray.scala b/src/library/scala/collection/mutable/ResizableArray.scala
index 85a299216e..50d3513784 100644
--- a/src/library/scala/collection/mutable/ResizableArray.scala
+++ b/src/library/scala/collection/mutable/ResizableArray.scala
@@ -101,7 +101,7 @@ trait ResizableArray[A] extends IndexedSeq[A]
if (newSize > Int.MaxValue) newSize = Int.MaxValue
val newArray: Array[AnyRef] = new Array(newSize.toInt)
- scala.compat.Platform.arraycopy(array, 0, newArray, 0, size0)
+ java.lang.System.arraycopy(array, 0, newArray, 0, size0)
array = newArray
}
}
diff --git a/src/library/scala/collection/parallel/immutable/ParRange.scala b/src/library/scala/collection/parallel/immutable/ParRange.scala
index 8fd5382ce9..de2b53a6c0 100644
--- a/src/library/scala/collection/parallel/immutable/ParRange.scala
+++ b/src/library/scala/collection/parallel/immutable/ParRange.scala
@@ -107,6 +107,7 @@ self =>
}
}
+ override def toString = s"Par$range"
}
object ParRange {
diff --git a/src/library/scala/concurrent/impl/Promise.scala b/src/library/scala/concurrent/impl/Promise.scala
index 626540425f..7fcc8c9f2d 100644
--- a/src/library/scala/concurrent/impl/Promise.scala
+++ b/src/library/scala/concurrent/impl/Promise.scala
@@ -384,7 +384,7 @@ private[concurrent] object Promise {
private[this] final def thisAs[S]: Future[S] = future.asInstanceOf[Future[S]]
override def onSuccess[U](pf: PartialFunction[T, U])(implicit executor: ExecutionContext): Unit = ()
- override def failed: Future[Throwable] = thisAs[Throwable]
+ override def failed: Future[Throwable] = KeptPromise(Success(result.exception)).future
override def foreach[U](f: T => U)(implicit executor: ExecutionContext): Unit = ()
override def map[S](f: T => S)(implicit executor: ExecutionContext): Future[S] = thisAs[S]
override def flatMap[S](f: T => Future[S])(implicit executor: ExecutionContext): Future[S] = thisAs[S]
diff --git a/src/partest-extras/scala/org/scalacheck/Arbitrary.scala b/src/partest-extras/scala/org/scalacheck/Arbitrary.scala
deleted file mode 100644
index 1cbd668f0c..0000000000
--- a/src/partest-extras/scala/org/scalacheck/Arbitrary.scala
+++ /dev/null
@@ -1,433 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2014 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-import util.{FreqMap, Buildable, Buildable2}
-
-
-sealed abstract class Arbitrary[T] {
- val arbitrary: Gen[T]
-}
-
-/** Defines implicit [[org.scalacheck.Arbitrary]] instances for common types.
- * <p>
- * ScalaCheck
- * uses implicit [[org.scalacheck.Arbitrary]] instances when creating properties
- * out of functions with the `Prop.property` method, and when
- * the `Arbitrary.arbitrary` method is used. For example, the
- * following code requires that there exists an implicit
- * `Arbitrary[MyClass]` instance:
- * </p>
- *
- * {{{
- * val myProp = Prop.forAll { myClass: MyClass =>
- * ...
- * }
- *
- * val myGen = Arbitrary.arbitrary[MyClass]
- * }}}
- *
- * <p>
- * The required implicit definition could look like this:
- * </p>
- *
- * {{{
- * implicit val arbMyClass: Arbitrary[MyClass] = Arbitrary(...)
- * }}}
- *
- * <p>
- * The factory method `Arbitrary(...)` takes a generator of type
- * `Gen[T]` and returns an instance of `Arbitrary[T]`.
- * </p>
- *
- * <p>
- * The `Arbitrary` module defines implicit [[org.scalacheck.Arbitrary]]
- * instances for common types, for convenient use in your properties and
- * generators.
- * </p>
- */
-object Arbitrary {
-
- import Gen.{const, choose, sized, frequency, oneOf, containerOf, resize}
- import collection.{immutable, mutable}
- import java.util.Date
-
- /** Creates an Arbitrary instance */
- def apply[T](g: => Gen[T]): Arbitrary[T] = new Arbitrary[T] {
- lazy val arbitrary = g
- }
-
- /** Returns an arbitrary generator for the type T. */
- def arbitrary[T](implicit a: Arbitrary[T]): Gen[T] = a.arbitrary
-
- /**** Arbitrary instances for each AnyVal ****/
-
- /** Arbitrary AnyVal */
- implicit lazy val arbAnyVal: Arbitrary[AnyVal] = Arbitrary(oneOf(
- arbitrary[Unit], arbitrary[Boolean], arbitrary[Char], arbitrary[Byte],
- arbitrary[Short], arbitrary[Int], arbitrary[Long], arbitrary[Float],
- arbitrary[Double]
- ))
-
- /** Arbitrary instance of Boolean */
- implicit lazy val arbBool: Arbitrary[Boolean] =
- Arbitrary(oneOf(true, false))
-
- /** Arbitrary instance of Int */
- implicit lazy val arbInt: Arbitrary[Int] = Arbitrary(
- Gen.chooseNum(Int.MinValue, Int.MaxValue)
- )
-
- /** Arbitrary instance of Long */
- implicit lazy val arbLong: Arbitrary[Long] = Arbitrary(
- Gen.chooseNum(Long.MinValue, Long.MaxValue)
- )
-
- /** Arbitrary instance of Float */
- implicit lazy val arbFloat: Arbitrary[Float] = Arbitrary(
- Gen.chooseNum(
- Float.MinValue, Float.MaxValue
- // I find that including these by default is a little TOO testy.
- // Float.Epsilon, Float.NaN, Float.PositiveInfinity, Float.NegativeInfinity
- )
- )
-
- /** Arbitrary instance of Double */
- implicit lazy val arbDouble: Arbitrary[Double] = Arbitrary(
- Gen.chooseNum(
- Double.MinValue / 2, Double.MaxValue / 2
- // As above. Perhaps behind some option?
- // Double.Epsilon, Double.NaN, Double.PositiveInfinity, Double.NegativeInfinity
- )
- )
-
- /** Arbitrary instance of Char */
- implicit lazy val arbChar: Arbitrary[Char] = Arbitrary(
- Gen.frequency(
- (0xD800-Char.MinValue, Gen.choose[Char](Char.MinValue,0xD800-1)),
- (Char.MaxValue-0xDFFF, Gen.choose[Char](0xDFFF+1,Char.MaxValue))
- )
- )
-
- /** Arbitrary instance of Byte */
- implicit lazy val arbByte: Arbitrary[Byte] = Arbitrary(
- Gen.chooseNum(Byte.MinValue, Byte.MaxValue)
- )
-
- /** Arbitrary instance of Short */
- implicit lazy val arbShort: Arbitrary[Short] = Arbitrary(
- Gen.chooseNum(Short.MinValue, Short.MaxValue)
- )
-
- /** Absolutely, totally, 100% arbitrarily chosen Unit. */
- implicit lazy val arbUnit: Arbitrary[Unit] = Arbitrary(const(()))
-
- /**** Arbitrary instances of other common types ****/
-
- /** Arbitrary instance of String */
- implicit lazy val arbString: Arbitrary[String] =
- Arbitrary(arbitrary[List[Char]] map (_.mkString))
-
- /** Arbitrary instance of Date */
- implicit lazy val arbDate: Arbitrary[Date] = Arbitrary(for {
- l <- arbitrary[Long]
- d = new Date
- } yield new Date(d.getTime + l))
-
- /** Arbitrary instance of Throwable */
- implicit lazy val arbThrowable: Arbitrary[Throwable] =
- Arbitrary(oneOf(const(new Exception), const(new Error)))
-
- /** Arbitrary instance of Exception */
- implicit lazy val arbException: Arbitrary[Exception] =
- Arbitrary(const(new Exception))
-
- /** Arbitrary instance of Error */
- implicit lazy val arbError: Arbitrary[Error] =
- Arbitrary(const(new Error))
-
- /** Arbitrary BigInt */
- implicit lazy val arbBigInt: Arbitrary[BigInt] = {
- def chooseBigInt: Gen[BigInt] =
- sized((s: Int) => choose(-s, s)) map (x => BigInt(x))
-
- def chooseReallyBigInt: Gen[BigInt] = for {
- bi <- chooseBigInt
- n <- choose(32,128)
- } yield bi << n
-
- Arbitrary(
- frequency(
- (5, chooseBigInt),
- (10, chooseReallyBigInt),
- (1, BigInt(0)),
- (1, BigInt(1)),
- (1, BigInt(-1)),
- (1, BigInt(Int.MaxValue) + 1),
- (1, BigInt(Int.MinValue) - 1),
- (1, BigInt(Long.MaxValue)),
- (1, BigInt(Long.MinValue)),
- (1, BigInt(Long.MaxValue) + 1),
- (1, BigInt(Long.MinValue) - 1)
- )
- )
- }
-
- /** Arbitrary BigDecimal */
- implicit lazy val arbBigDecimal: Arbitrary[BigDecimal] = {
- import java.math.MathContext._
- val mcGen = oneOf(UNLIMITED, DECIMAL32, DECIMAL64, DECIMAL128)
- val bdGen = for {
- x <- arbBigInt.arbitrary
- mc <- mcGen
- limit <- const(if(mc == UNLIMITED) 0 else math.max(x.abs.toString.length - mc.getPrecision, 0))
- scale <- Gen.chooseNum(Int.MinValue + limit , Int.MaxValue)
- } yield {
- try {
- BigDecimal(x, scale, mc)
- } catch {
- case ae: java.lang.ArithmeticException => BigDecimal(x, scale, UNLIMITED) // Handle the case where scale/precision conflict
- }
- }
- Arbitrary(bdGen)
- }
-
- /** Arbitrary java.lang.Number */
- implicit lazy val arbNumber: Arbitrary[Number] = {
- val gen = Gen.oneOf(
- arbitrary[Byte], arbitrary[Short], arbitrary[Int], arbitrary[Long],
- arbitrary[Float], arbitrary[Double]
- )
- Arbitrary(gen map (_.asInstanceOf[Number]))
- // XXX TODO - restore BigInt and BigDecimal
- // Arbitrary(oneOf(arbBigInt.arbitrary :: (arbs map (_.arbitrary) map toNumber) : _*))
- }
-
- /** Generates an arbitrary property */
- implicit lazy val arbProp: Arbitrary[Prop] = {
- import Prop._
- val undecidedOrPassed = forAll { b: Boolean =>
- b ==> true
- }
- Arbitrary(frequency(
- (4, falsified),
- (4, passed),
- (3, proved),
- (3, undecidedOrPassed),
- (2, undecided),
- (1, exception(null))
- ))
- }
-
- /** Arbitrary instance of test parameters */
- implicit lazy val arbTestParameters: Arbitrary[Test.Parameters] =
- Arbitrary(for {
- _minSuccTests <- choose(10,200)
- _maxDiscardRatio <- choose(0.2f,10f)
- _minSize <- choose(0,500)
- sizeDiff <- choose(0,500)
- _maxSize <- choose(_minSize, _minSize + sizeDiff)
- _workers <- choose(1,4)
- } yield new Test.Parameters.Default {
- override val minSuccessfulTests = _minSuccTests
- override val maxDiscardRatio = _maxDiscardRatio
- override val minSize = _minSize
- override val maxSize = _maxSize
- override val workers = _workers
- })
-
- /** Arbitrary instance of gen params */
- implicit lazy val arbGenParams: Arbitrary[Gen.Parameters] =
- Arbitrary(for {
- sz <- arbitrary[Int] suchThat (_ >= 0)
- } yield (new Gen.Parameters.Default {
- override val size = sz
- }))
-
-
- // Higher-order types //
-
- /** Arbitrary instance of [[org.scalacheck.Gen]] */
- implicit def arbGen[T](implicit a: Arbitrary[T]): Arbitrary[Gen[T]] =
- Arbitrary(frequency(
- (5, arbitrary[T] map (const(_))),
- (1, Gen.fail)
- ))
-
- /** Arbitrary instance of the Option type */
- implicit def arbOption[T](implicit a: Arbitrary[T]): Arbitrary[Option[T]] =
- Arbitrary(sized(n =>
- // When n is larger, make it less likely that we generate None,
- // but still do it some of the time. When n is zero, we always
- // generate None, since it's the smallest value.
- frequency(
- (n, resize(n / 2, arbitrary[T]).map(Some(_))),
- (1, const(None)))))
-
- /** Arbitrary instance of the Either type */
- implicit def arbEither[T, U](implicit at: Arbitrary[T], au: Arbitrary[U]): Arbitrary[Either[T, U]] =
- Arbitrary(oneOf(arbitrary[T].map(Left(_)), arbitrary[U].map(Right(_))))
-
- /** Arbitrary instance of any [[org.scalacheck.util.Buildable]] container
- * (such as lists, arrays, streams, etc). The maximum size of the container
- * depends on the size generation parameter. */
- implicit def arbContainer[C[_],T](implicit
- a: Arbitrary[T], b: Buildable[T,C], t: C[T] => Traversable[T]
- ): Arbitrary[C[T]] = Arbitrary(containerOf[C,T](arbitrary[T]))
-
- /** Arbitrary instance of any [[org.scalacheck.util.Buildable2]] container
- * (such as maps, etc). The maximum size of the container depends on the size
- * generation parameter. */
- implicit def arbContainer2[C[_,_],T,U](implicit
- a: Arbitrary[(T,U)], b: Buildable2[T,U,C], t: C[T,U] => Traversable[(T,U)]
- ): Arbitrary[C[T,U]] = Arbitrary(containerOf[C,T,U](arbitrary[(T,U)]))
-
- // Functions //
-
- /** Arbitrary instance of Function1 */
- implicit def arbFunction1[T1,R](implicit a: Arbitrary[R]
- ): Arbitrary[T1 => R] = Arbitrary(
- for(r <- arbitrary[R]) yield (t1: T1) => r
- )
-
- /** Arbitrary instance of Function2 */
- implicit def arbFunction2[T1,T2,R](implicit a: Arbitrary[R]
- ): Arbitrary[(T1,T2) => R] = Arbitrary(
- for(r <- arbitrary[R]) yield (t1: T1, t2: T2) => r
- )
-
- /** Arbitrary instance of Function3 */
- implicit def arbFunction3[T1,T2,T3,R](implicit a: Arbitrary[R]
- ): Arbitrary[(T1,T2,T3) => R] = Arbitrary(
- for(r <- arbitrary[R]) yield (t1: T1, t2: T2, t3: T3) => r
- )
-
- /** Arbitrary instance of Function4 */
- implicit def arbFunction4[T1,T2,T3,T4,R](implicit a: Arbitrary[R]
- ): Arbitrary[(T1,T2,T3,T4) => R] = Arbitrary(
- for(r <- arbitrary[R]) yield (t1: T1, t2: T2, t3: T3, t4: T4) => r
- )
-
- /** Arbitrary instance of Function5 */
- implicit def arbFunction5[T1,T2,T3,T4,T5,R](implicit a: Arbitrary[R]
- ): Arbitrary[(T1,T2,T3,T4,T5) => R] = Arbitrary(
- for(r <- arbitrary[R]) yield (t1: T1, t2: T2, t3: T3, t4: T4, t5: T5) => r
- )
-
-
- // Tuples //
-
- /** Arbitrary instance of 2-tuple */
- implicit def arbTuple2[T1,T2](implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2]
- ): Arbitrary[(T1,T2)] =
- Arbitrary(for {
- t1 <- arbitrary[T1]
- t2 <- arbitrary[T2]
- } yield (t1,t2))
-
- /** Arbitrary instance of 3-tuple */
- implicit def arbTuple3[T1,T2,T3](implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3]
- ): Arbitrary[(T1,T2,T3)] =
- Arbitrary(for {
- t1 <- arbitrary[T1]
- t2 <- arbitrary[T2]
- t3 <- arbitrary[T3]
- } yield (t1,t2,t3))
-
- /** Arbitrary instance of 4-tuple */
- implicit def arbTuple4[T1,T2,T3,T4](implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4]
- ): Arbitrary[(T1,T2,T3,T4)] =
- Arbitrary(for {
- t1 <- arbitrary[T1]
- t2 <- arbitrary[T2]
- t3 <- arbitrary[T3]
- t4 <- arbitrary[T4]
- } yield (t1,t2,t3,t4))
-
- /** Arbitrary instance of 5-tuple */
- implicit def arbTuple5[T1,T2,T3,T4,T5](implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4],
- a5: Arbitrary[T5]
- ): Arbitrary[(T1,T2,T3,T4,T5)] =
- Arbitrary(for {
- t1 <- arbitrary[T1]
- t2 <- arbitrary[T2]
- t3 <- arbitrary[T3]
- t4 <- arbitrary[T4]
- t5 <- arbitrary[T5]
- } yield (t1,t2,t3,t4,t5))
-
- /** Arbitrary instance of 6-tuple */
- implicit def arbTuple6[T1,T2,T3,T4,T5,T6](implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4],
- a5: Arbitrary[T5], a6: Arbitrary[T6]
- ): Arbitrary[(T1,T2,T3,T4,T5,T6)] =
- Arbitrary(for {
- t1 <- arbitrary[T1]
- t2 <- arbitrary[T2]
- t3 <- arbitrary[T3]
- t4 <- arbitrary[T4]
- t5 <- arbitrary[T5]
- t6 <- arbitrary[T6]
- } yield (t1,t2,t3,t4,t5,t6))
-
- /** Arbitrary instance of 7-tuple */
- implicit def arbTuple7[T1,T2,T3,T4,T5,T6,T7](implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4],
- a5: Arbitrary[T5], a6: Arbitrary[T6], a7: Arbitrary[T7]
- ): Arbitrary[(T1,T2,T3,T4,T5,T6,T7)] =
- Arbitrary(for {
- t1 <- arbitrary[T1]
- t2 <- arbitrary[T2]
- t3 <- arbitrary[T3]
- t4 <- arbitrary[T4]
- t5 <- arbitrary[T5]
- t6 <- arbitrary[T6]
- t7 <- arbitrary[T7]
- } yield (t1,t2,t3,t4,t5,t6,t7))
-
- /** Arbitrary instance of 8-tuple */
- implicit def arbTuple8[T1,T2,T3,T4,T5,T6,T7,T8](implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4],
- a5: Arbitrary[T5], a6: Arbitrary[T6], a7: Arbitrary[T7], a8: Arbitrary[T8]
- ): Arbitrary[(T1,T2,T3,T4,T5,T6,T7,T8)] =
- Arbitrary(for {
- t1 <- arbitrary[T1]
- t2 <- arbitrary[T2]
- t3 <- arbitrary[T3]
- t4 <- arbitrary[T4]
- t5 <- arbitrary[T5]
- t6 <- arbitrary[T6]
- t7 <- arbitrary[T7]
- t8 <- arbitrary[T8]
- } yield (t1,t2,t3,t4,t5,t6,t7,t8))
-
- /** Arbitrary instance of 9-tuple */
- implicit def arbTuple9[T1,T2,T3,T4,T5,T6,T7,T8,T9](implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4],
- a5: Arbitrary[T5], a6: Arbitrary[T6], a7: Arbitrary[T7], a8: Arbitrary[T8],
- a9: Arbitrary[T9]
- ): Arbitrary[(T1,T2,T3,T4,T5,T6,T7,T8,T9)] =
- Arbitrary(for {
- t1 <- arbitrary[T1]
- t2 <- arbitrary[T2]
- t3 <- arbitrary[T3]
- t4 <- arbitrary[T4]
- t5 <- arbitrary[T5]
- t6 <- arbitrary[T6]
- t7 <- arbitrary[T7]
- t8 <- arbitrary[T8]
- t9 <- arbitrary[T9]
- } yield (t1,t2,t3,t4,t5,t6,t7,t8,t9))
-
-}
diff --git a/src/partest-extras/scala/org/scalacheck/Commands.scala b/src/partest-extras/scala/org/scalacheck/Commands.scala
deleted file mode 100644
index 5ff3a397e5..0000000000
--- a/src/partest-extras/scala/org/scalacheck/Commands.scala
+++ /dev/null
@@ -1,146 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2014 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-/** See User Guide for usage examples */
-@deprecated("Will be replaced with a new implementation in 1.12.0", "1.11.4")
-trait Commands extends Prop {
-
- /** The abstract state data type. This type must be immutable.
- * The state type that encodes the abstract state. The abstract state
- * should model all the features we need from the real state, the system
- * under test. We should leave out all details that aren't needed for
- * specifying our pre- and postconditions. The state type must be called
- * State and be immutable. */
- type State <: AnyRef
-
- class Binding(private val key: State) {
- def get: Any = bindings.find(_._1 eq key) match {
- case None => sys.error("No value bound")
- case Some(x) => x._2
- }
- }
-
- /** Abstract commands are defined as subtypes of the traits Command or SetCommand.
- * Each command must have a run method and a method that returns the new abstract
- * state, as it should look after the command has been run.
- * A command can also define a precondition that states how the current
- * abstract state must look if the command should be allowed to run.
- * Finally, we can also define a postcondition which verifies that the
- * system under test is in a correct state after the command exectution. */
- trait Command {
-
- /** Used internally. */
- protected[Commands] def run_(s: State) = run(s)
-
- def run(s: State): Any
- def nextState(s: State): State
-
- /** Returns all preconditions merged into a single function */
- def preCondition: (State => Boolean) =
- s => preConditions.toList.forall(_.apply(s))
-
- /** A precondition is a function that
- * takes the current abstract state as parameter and returns a boolean
- * that says if the precondition is fulfilled or not. You can add several
- * conditions to the precondition list */
- val preConditions = new collection.mutable.ListBuffer[State => Boolean]
-
- /** Returns all postconditions merged into a single function */
- def postCondition: (State,State,Any) => Prop =
- (s0,s1,r) => Prop.all(postConditions.map(_.apply(s0,s1,r)): _*)
-
- /** A postcondition is a function that
- * takes three parameters, s0, s1 and r. s0 is the abstract state before
- * the command was run, s1 is the abstract state after the command was
- * run, and r is the result from the command's run
- * method. The postcondition function should return a Boolean (or
- * a Prop instance) that says if the condition holds or not. You can add several
- * conditions to the postConditions list. */
- val postConditions = new collection.mutable.ListBuffer[(State,State,Any) => Prop]
- }
-
- /** A command that binds its result for later use */
- trait SetCommand extends Command {
- /** Used internally. */
- protected[Commands] final override def run_(s: State) = {
- val r = run(s)
- bindings += ((s,r))
- r
- }
-
- final def nextState(s: State) = nextState(s, new Binding(s))
- def nextState(s: State, b: Binding): State
- }
-
- private case class Cmds(cs: List[Command], ss: List[State]) {
- override def toString = cs.map(_.toString).mkString(", ")
- }
-
- private val bindings = new scala.collection.mutable.ListBuffer[(State,Any)]
-
- private def initState() = {
- bindings.clear()
- initialState()
- }
-
- private def genCmds: Gen[Cmds] = {
- def sizedCmds(s: State, sz: Int): Gen[Cmds] = {
- if(sz <= 0) Gen.const(Cmds(Nil, Nil)) else for {
- c <- genCommand(s) suchThat (_.preCondition(s))
- Cmds(cs,ss) <- sizedCmds(c.nextState(s), sz-1)
- } yield Cmds(c::cs, s::ss)
- }
-
- Gen.sized(sz => sizedCmds(initialState(), sz))
- }
-
- private def validCmds(s: State, cs: List[Command]): Option[Cmds] =
- cs match {
- case Nil => Some(Cmds(Nil, s::Nil))
- case c::_ if !c.preCondition(s) => None
- case c::cmds => for {
- Cmds(_, ss) <- validCmds(c.nextState(s), cmds)
- } yield Cmds(cs, s::ss)
- }
-
- private def runCommands(cmds: Cmds): Prop = Prop.all {
- cmds.cs.indices.map { i =>
- val (c,s) = (cmds.cs(i), cmds.ss(i))
- c.postCondition(s,c.nextState(s),c.run_(s))
- } : _*
- }
-
- private def commandsProp: Prop = {
- def shrinkCmds(cmds: Cmds) =
- Shrink.shrink(cmds.cs)(Shrink.shrinkContainer).flatMap { cs =>
- validCmds(initialState(), cs).toList
- }
-
- Prop.forAllShrink(genCmds label "COMMANDS", shrinkCmds)(runCommands _)
- }
-
- def apply(p: Gen.Parameters) = commandsProp(p)
-
- /** initialState should reset the system under test to a well defined
- * initial state, and return the abstract version of that state. */
- def initialState(): State
-
- /** The command generator. Given an abstract state, the generator
- * should return a command that is allowed to run in that state. Note that
- * it is still neccessary to define preconditions on the commands if there
- * are any. The generator is just giving a hint of which commands that are
- * suitable for a given state, the preconditions will still be checked before
- * a command runs. Sometimes you maybe want to adjust the distribution of
- * your command generator according to the state, or do other calculations
- * based on the state. */
- def genCommand(s: State): Gen[Command]
-
-}
diff --git a/src/partest-extras/scala/org/scalacheck/Commands2.scala b/src/partest-extras/scala/org/scalacheck/Commands2.scala
deleted file mode 100644
index 67393a7a70..0000000000
--- a/src/partest-extras/scala/org/scalacheck/Commands2.scala
+++ /dev/null
@@ -1,150 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2014 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-private[scalacheck] trait Commands2 {
-
- /** The abstract state type. Must be immutable.
- * The [[Commands2.State]] type should model the state of the system under test (SUT).
- * It should leave out all details that aren't needed for specifying our
- * pre- and postconditions. */
- type State
-
- /** A type representing one instance of the system under test (SUT).
- * The [[Commands2.System]] type should be a proxy to the actual system under test.
- * It is used in the postconditions to verify that the real system
- * behaves according to specification. It should be possible to have
- * up to [[Commands2.maxSystemInstanceCount]] co-existing instances of the System
- * type, and each System instance should be a proxy to a distinct
- * SUT instance. There should be no dependencies between the System
- * instances, as they might be used in parallel by ScalaCheck.
- * System instances are created by [[Commands2.newSystemInstance]] and destroyed by
- * [[Commands2.destroySystemInstance]]. [[Commands2.newSystemInstance]] and
- * [[Commands2.destroySystemInstance]] might be called at any time by ScalaCheck,
- * as long as [[Commands2.maxSystemInstanceCount]] isn't violated. */
- type System
-
- /** The maximum number of concurrent [[Commands2.System]] instances allowed to exist. */
- def maxSystemInstanceCount: Int
-
- /** Should create a new [[Commands2.System]] instance with an internal state that
- * corresponds to the provided abstract state instance. The provided state
- * is guaranteed to fulfill [[Commands2.initialPreCondition]], and
- * [[Commands2.newSystemInstance]] will never be called if there already
- * is [[Commands2.maxSystemInstanceCount]] instances of [[Commands2.System]] */
- def newSystemInstance(state: State): System
-
- /** Should destroy the given SUT, so that a new [[Commands2.System]] instance can be
- * created with [[Commands2.newSystemInstance]]. */
- def destroySystemInstance(system: System): Unit
-
- /** The precondition for the initial state, when no commands yet have
- * run. This is used by ScalaCheck when command sequences are shrinked
- * and the first state might differ from what is returned from
- * [[Commands2.initialState]]. */
- def initialPreCondition(state: State): Boolean
-
- /** A generator that should produce an initial [[Commands2.State]] instance that is
- * usable by [[Commands2.newSystemInstance]] to create a new system under test.
- * The state returned by this generator is always checked with the
- * [[Commands2.initialPreCondition]] method before it is used. */
- def genInitialState: Gen[State]
-
- /** A generator that, given the current abstract state, should produce
- * a suitable Command instance. */
- def genCommand(state: State): Gen[Command]
-
- /** Abstract commands are defined as subtypes of the trait [[Commands2.Command]].
- * Each command must have a run method and a method
- * that returns the new abstract state, as it is supposed to look after
- * the command has been run. A command can also define a precondition
- * that defines how the current abstract state must look if the command
- * should be allowed to run. Finally, you can also define a postcondition
- * that verifies that the system under test is in a correct state after
- * the command execution. */
- trait Command {
- /** Runs this command in the system under test,
- * represented by the provided [[Commands2.System]] instance. This method
- * can return any value as result. The returned value will be
- * used by the postcondition to decide if the system behaves as
- * expected. */
- def run(state: State, system: System): Any
-
- /** Returns a new abstract [[Commands2.State]] instance that represents the
- * state of the system after this command has run. */
- def nextState(state: State): State
-
- /** The precondition that decides if this command is allowed to run
- * when the system under test is in the specified (abstract) state. */
- def preCondition(state: State): Boolean
-
- /** The postcondition that decides if the system under test behaved
- * correctly when the command ran.
- * @param s0 The abstract state as it looked before this command ran.
- * @param s1 The abstract state as it looked after this command ran.
- * @param system The proxy for the system under test. The postcondition
- * can query the system for its current state, but care must be taken
- * not to mutate the system under test in any way.
- * @param result The result returned from the [[Command.run]] method.
- */
- def postCondition(s0: State, s1: State, system: System, result: Any): Prop
- }
-
-/* WIP
- private case class Cmds(cs: List[Command], ss: List[State]) {
- override def toString = cs.map(_.toString).mkString(", ")
- }
-
- private val bindings = new scala.collection.mutable.ListBuffer[(State,Any)]
-
- private def initState() = {
- bindings.clear()
- initialState()
- }
-
- private def genCmds: Gen[Cmds] = {
- def sizedCmds(s: State, sz: Int): Gen[Cmds] = {
- if(sz <= 0) Gen.const(Cmds(Nil, Nil)) else for {
- c <- genCommand(s) suchThat (_.preCondition(s))
- Cmds(cs,ss) <- sizedCmds(c.nextState(s), sz-1)
- } yield Cmds(c::cs, s::ss)
- }
-
- Gen.sized(sz => sizedCmds(initialState(), sz))
- }
-
- private def validCmds(s: State, cs: List[Command]): Option[Cmds] =
- cs match {
- case Nil => Some(Cmds(Nil, s::Nil))
- case c::_ if !c.preCondition(s) => None
- case c::cmds => for {
- Cmds(_, ss) <- validCmds(c.nextState(s), cmds)
- } yield Cmds(cs, s::ss)
- }
-
- private def runCommands(cmds: Cmds): Prop = Prop.all {
- cmds.cs.indices.map { i =>
- val (c,s) = (cmds.cs(i), cmds.ss(i))
- c.postCondition(s,c.nextState(s),c.run_(s))
- } : _*
- }
-
- private def commandsProp: Prop = {
- def shrinkCmds(cmds: Cmds) =
- Shrink.shrink(cmds.cs)(Shrink.shrinkContainer).flatMap { cs =>
- validCmds(initialState(), cs).toList
- }
-
- Prop.forAllShrink(genCmds label "COMMANDS", shrinkCmds)(runCommands _)
- }
-
- def apply(p: Prop.Params) = commandsProp(p)
-*/
-}
diff --git a/src/partest-extras/scala/org/scalacheck/Gen.scala b/src/partest-extras/scala/org/scalacheck/Gen.scala
deleted file mode 100644
index ba82c9ea95..0000000000
--- a/src/partest-extras/scala/org/scalacheck/Gen.scala
+++ /dev/null
@@ -1,813 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2014 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-import util.{Buildable, Buildable2}
-import scala.collection.immutable.TreeMap
-
-sealed trait Gen[+T] {
-
- //// Private interface ////
-
- import Gen.{R, r, gen}
-
- /** Just an alias */
- private type P = Gen.Parameters
-
- /** Should be a copy of R.sieve. Used internally in Gen when some generators
- * with suchThat-claues are created (when R is not available). This method
- * actually breaks covariance, but since this method will only ever be
- * called with a value of exactly type T, it is OK. */
- protected def sieveCopy(x: Any): Boolean = true
-
- private[scalacheck] def doApply(p: P): R[T]
-
-
- //// Public interface ////
-
- /** A class supporting filtered operations. */
- final class WithFilter(p: T => Boolean) {
- def map[U](f: T => U): Gen[U] = Gen.this.suchThat(p).map(f)
- def flatMap[U](f: T => Gen[U]): Gen[U] = Gen.this.suchThat(p).flatMap(f)
- def withFilter(q: T => Boolean): WithFilter = Gen.this.withFilter(x => p(x) && q(x))
- }
-
- /** Evaluate this generator with the given parameters */
- def apply(p: Gen.Parameters): Option[T] = doApply(p).retrieve
-
- /** Create a new generator by mapping the result of this generator */
- def map[U](f: T => U): Gen[U] = gen { p => doApply(p).map(f) }
-
- /** Create a new generator by flat-mapping the result of this generator */
- def flatMap[U](f: T => Gen[U]): Gen[U] = gen { p =>
- doApply(p).flatMap(t => f(t).doApply(p))
- }
-
- /** Create a new generator that uses this generator to produce a value
- * that fulfills the given condition. If the condition is not fulfilled,
- * the generator fails (returns None). */
- def filter(p: T => Boolean): Gen[T] = suchThat(p)
-
- /** Creates a non-strict filtered version of this generator. */
- def withFilter(p: T => Boolean): WithFilter = new WithFilter(p)
-
- /** Create a new generator that uses this generator to produce a value
- * that fulfills the given condition. If the condition is not fulfilled,
- * the generator fails (returns None). This method is identical to
- * [Gen.filter]. */
- def suchThat(f: T => Boolean): Gen[T] = new Gen[T] {
- def doApply(p: P) = {
- val res = Gen.this.doApply(p)
- res.copy(s = { x:T => res.sieve(x) && f(x) })
- }
- override def sieveCopy(x: Any) =
- try Gen.this.sieveCopy(x) && f(x.asInstanceOf[T])
- catch { case _: java.lang.ClassCastException => false }
- }
-
- /** Create a generator that calls this generator repeatedly until
- * the given condition is fulfilled. The generated value is then
- * returned. Use this combinator with care, since it may result
- * in infinite loops. */
- def retryUntil(p: T => Boolean): Gen[T] = flatMap { t =>
- if (p(t)) Gen.const(t).suchThat(p) else retryUntil(p)
- }
-
- def sample: Option[T] = doApply(Gen.Parameters.default).retrieve
-
- /** Returns a new property that holds if and only if both this
- * and the given generator generates the same result, or both
- * generators generate no result. */
- def ==[U](g: Gen[U]) = Prop { prms =>
- (doApply(prms).retrieve, g.doApply(prms).retrieve) match {
- case (None,None) => Prop.proved(prms)
- case (Some(r1),Some(r2)) if r1 == r2 => Prop.proved(prms)
- case _ => Prop.falsified(prms)
- }
- }
-
- def !=[U](g: Gen[U]) = Prop.forAll(this)(r => Prop.forAll(g)(_ != r))
-
- def !==[U](g: Gen[U]) = Prop { prms =>
- (doApply(prms).retrieve, g.doApply(prms).retrieve) match {
- case (None,None) => Prop.falsified(prms)
- case (Some(r1),Some(r2)) if r1 == r2 => Prop.falsified(prms)
- case _ => Prop.proved(prms)
- }
- }
-
- /** Put a label on the generator to make test reports clearer */
- def label(l: String) = new Gen[T] {
- def doApply(p: P) = {
- val r = Gen.this.doApply(p)
- r.copy(l = r.labels + l)
- }
- override def sieveCopy(x: Any) = Gen.this.sieveCopy(x)
- }
-
- /** Put a label on the generator to make test reports clearer */
- def :|(l: String) = label(l)
-
- /** Put a label on the generator to make test reports clearer */
- def |:(l: String) = label(l)
-
- /** Put a label on the generator to make test reports clearer */
- def :|(l: Symbol) = label(l.toString.drop(1))
-
- /** Put a label on the generator to make test reports clearer */
- def |:(l: Symbol) = label(l.toString.drop(1))
-
-}
-
-object Gen {
-
- //// Private interface ////
-
- import Arbitrary.arbitrary
-
- /** Just an alias */
- private type P = Parameters
-
- private[scalacheck] trait R[+T] {
- def labels: Set[String] = Set()
- def sieve[U >: T]: U => Boolean = _ => true
- protected def result: Option[T]
-
- def retrieve = result.filter(sieve)
-
- def copy[U >: T](
- l: Set[String] = this.labels,
- s: U => Boolean = this.sieve,
- r: Option[U] = this.result
- ): R[U] = new R[U] {
- override val labels = l
- override def sieve[V >: U] = { x:Any =>
- try s(x.asInstanceOf[U])
- catch { case _: java.lang.ClassCastException => false }
- }
- val result = r
- }
-
- def map[U](f: T => U): R[U] = r(retrieve.map(f)).copy(l = labels)
-
- def flatMap[U](f: T => R[U]): R[U] = retrieve match {
- case None => r(None).copy(l = labels)
- case Some(t) =>
- val r = f(t)
- r.copy(l = labels ++ r.labels)
- }
- }
-
- private[scalacheck] def r[T](r: Option[T]): R[T] = new R[T] {
- val result = r
- }
-
- /** Generator factory method */
- private[scalacheck] def gen[T](f: P => R[T]): Gen[T] = new Gen[T] {
- def doApply(p: P) = f(p)
- }
-
- //// Public interface ////
-
- /** Generator parameters, used by [[org.scalacheck.Gen.apply]] */
- trait Parameters {
-
- /** The size of the generated value. Generator implementations are allowed
- * to freely interpret (or ignore) this value. During test execution, the
- * value of this parameter is controlled by [[Test.Parameters.minSize]] and
- * [[Test.Parameters.maxSize]]. */
- val size: Int
-
- /** Create a copy of this [[Gen.Parameters]] instance with
- * [[Gen.Parameters.size]] set to the specified value. */
- def withSize(size: Int): Parameters = cp(size = size)
-
- /** The random number generator used. */
- val rng: scala.util.Random
-
- /** Create a copy of this [[Gen.Parameters]] instance with
- * [[Gen.Parameters.rng]] set to the specified value. */
- def withRng(rng: scala.util.Random): Parameters = cp(rng = rng)
-
- /** Change the size parameter.
- * @deprecated Use [[Gen.Parameters.withSize]] instead. */
- @deprecated("Use withSize instead.", "1.11.2")
- def resize(newSize: Int): Parameters = withSize(newSize)
-
- // private since we can't guarantee binary compatibility for this one
- private case class cp(
- size: Int = size,
- rng: scala.util.Random = rng
- ) extends Parameters
- }
-
- /** Provides methods for creating [[org.scalacheck.Gen.Parameters]] values */
- object Parameters {
- /** Default generator parameters trait. This can be overriden if you
- * need to tweak the parameters. */
- trait Default extends Parameters {
- val size: Int = 100
- val rng: scala.util.Random = scala.util.Random
- }
-
- /** Default generator parameters instance. */
- val default: Parameters = new Default {}
- }
-
- /** A wrapper type for range types */
- trait Choose[T] {
- /** Creates a generator that returns a value in the given inclusive range */
- def choose(min: T, max: T): Gen[T]
- }
-
- /** Provides implicit [[org.scalacheck.Gen.Choose]] instances */
- object Choose {
-
- private def chLng(l: Long, h: Long)(p: P): R[Long] = {
- if (h < l) r(None) else {
- val d = h - l + 1
- if (d <= 0) {
- var n = p.rng.nextLong
- while (n < l || n > h) {
- n = p.rng.nextLong
- }
- r(Some(n))
- } else {
- r(Some(l + math.abs(p.rng.nextLong % d)))
- }
- }
- }
-
- private def chDbl(l: Double, h: Double)(p: P): R[Double] = {
- val d = h-l
- if (d < 0 || d > Double.MaxValue) r(None)
- else if (d == 0) r(Some(l))
- else r(Some(p.rng.nextDouble * (h-l) + l))
- }
-
- implicit val chooseLong: Choose[Long] = new Choose[Long] {
- def choose(low: Long, high: Long) =
- gen(chLng(low,high)).suchThat(x => x >= low && x <= high)
- }
- implicit val chooseInt: Choose[Int] = new Choose[Int] {
- def choose(low: Int, high: Int) =
- gen(chLng(low,high)).map(_.toInt).suchThat(x => x >= low && x <= high)
- }
- implicit val chooseByte: Choose[Byte] = new Choose[Byte] {
- def choose(low: Byte, high: Byte) =
- gen(chLng(low,high)).map(_.toByte).suchThat(x => x >= low && x <= high)
- }
- implicit val chooseShort: Choose[Short] = new Choose[Short] {
- def choose(low: Short, high: Short) =
- gen(chLng(low,high)).map(_.toShort).suchThat(x => x >= low && x <= high)
- }
- implicit val chooseChar: Choose[Char] = new Choose[Char] {
- def choose(low: Char, high: Char) =
- gen(chLng(low,high)).map(_.toChar).suchThat(x => x >= low && x <= high)
- }
- implicit val chooseDouble: Choose[Double] = new Choose[Double] {
- def choose(low: Double, high: Double) =
- gen(chDbl(low,high)).suchThat(x => x >= low && x <= high)
- }
- implicit val chooseFloat: Choose[Float] = new Choose[Float] {
- def choose(low: Float, high: Float) =
- gen(chDbl(low,high)).map(_.toFloat).suchThat(x => x >= low && x <= high)
- }
-
- /** Transform a Choose[T] to a Choose[U] where T and U are two isomorphic types
- * whose relationship is described by the provided transformation functions.
- * (exponential functor map) */
- def xmap[T, U](from: T => U, to: U => T)(implicit c: Choose[T]): Choose[U] = new Choose[U] {
- def choose(low: U, high: U) =
- c.choose(to(low), to(high)).map(from)
- }
- }
-
-
- //// Various Generator Combinators ////
-
- /** A generator that always generates the given value */
- @deprecated("Use Gen.const instead", "1.11.0")
- def value[T](x: T): Gen[T] = const(x)
-
- /** A generator that always generates the given value */
- implicit def const[T](x: T): Gen[T] = gen(_ => r(Some(x))).suchThat(_ == x)
-
- /** A generator that never generates a value */
- def fail[T]: Gen[T] = gen(_ => r(None)).suchThat(_ => false)
-
- /** A generator that generates a random value in the given (inclusive)
- * range. If the range is invalid, the generator will not generate
- * any value. */
- def choose[T](min: T, max: T)(implicit c: Choose[T]): Gen[T] =
- c.choose(min, max)
-
- /** Sequences generators. If any of the given generators fails, the
- * resulting generator will also fail. */
- def sequence[C[_],T](gs: Traversable[Gen[T]])(implicit b: Buildable[T,C]): Gen[C[T]] = {
- val g = gen { p =>
- gs.foldLeft(r(Some(collection.immutable.Vector.empty[T]))) {
- case (rs,g) => g.doApply(p).flatMap(r => rs.map(_ :+ r))
- }
- }
- g.map(b.fromIterable)
- }
-
- /** Sequences generators. If any of the given generators fails, the
- * resulting generator will also fail. */
- def sequence[C[_,_],T,U](gs: Traversable[Gen[(T,U)]])(implicit b: Buildable2[T,U,C]): Gen[C[T,U]] = {
- val g = gen { p =>
- gs.foldLeft(r(Some(collection.immutable.Vector.empty[(T,U)]))) {
- case (rs,g) => g.doApply(p).flatMap(r => rs.map(_ :+ r))
- }
- }
- g.map(b.fromIterable)
- }
-
- /** Wraps a generator lazily. The given parameter is only evaluated once,
- * and not until the wrapper generator is evaluated. */
- def lzy[T](g: => Gen[T]): Gen[T] = {
- lazy val h = g
- gen { p => h.doApply(p) }
- }
-
- /** Wraps a generator for later evaluation. The given parameter is
- * evaluated each time the wrapper generator is evaluated. */
- def wrap[T](g: => Gen[T]) = gen { p => g.doApply(p) }
-
- /** Creates a generator that can access its generation parameters */
- def parameterized[T](f: Parameters => Gen[T]) = gen { p => f(p).doApply(p) }
-
- /** Creates a generator that can access its generation size */
- def sized[T](f: Int => Gen[T]) = gen { p => f(p.size).doApply(p) }
-
- /** A generator that returns the current generation size */
- lazy val size: Gen[Int] = sized { sz => sz }
-
- /** Creates a resized version of a generator */
- def resize[T](s: Int, g: Gen[T]) = gen(p => g.doApply(p.withSize(s)))
-
- /** Picks a random value from a list */
- def oneOf[T](xs: Seq[T]): Gen[T] =
- choose(0, xs.size-1).map(xs(_)).suchThat(xs.contains)
-
- /** Picks a random value from a list */
- def oneOf[T](t0: T, t1: T, tn: T*): Gen[T] = oneOf(t0 +: t1 +: tn)
-
- /** Picks a random generator from a list */
- def oneOf[T](g0: Gen[T], g1: Gen[T], gn: Gen[T]*): Gen[T] = {
- val gs = g0 +: g1 +: gn
- choose(0,gs.size-1).flatMap(gs(_)).suchThat(x => gs.exists(_.sieveCopy(x)))
- }
-
- /** Makes a generator result optional. Either `Some(T)` or `None` will be provided. */
- def option[T](g: Gen[T]): Gen[Option[T]] =
- oneOf[Option[T]](g.map(Some.apply), None)
-
- /** Chooses one of the given generators with a weighted random distribution */
- def frequency[T](gs: (Int,Gen[T])*): Gen[T] = {
- gs.filter(_._1 > 0) match {
- case Nil => fail
- case filtered =>
- var tot = 0l
- val tree: TreeMap[Long, Gen[T]] = {
- val builder = TreeMap.newBuilder[Long, Gen[T]]
- filtered.foreach {
- case (f, v) =>
- tot += f
- builder.+=((tot, v))
- }
- builder.result()
- }
- choose(1L, tot).flatMap(r => tree.from(r).head._2).suchThat { x =>
- gs.exists(_._2.sieveCopy(x))
- }
- }
- }
-
- /** Implicit convenience method for using the `frequency` method
- * like this:
- * {{{
- * frequency((1, "foo"), (3, "bar"))
- * }}}
- */
- implicit def freqTuple[T](t: (Int,T)): (Int,Gen[T]) = (t._1, const(t._2))
-
-
- //// List Generators ////
-
- /** Generates a container of any Traversable type for which there exists an
- * implicit [[org.scalacheck.util.Buildable]] instance. The elements in the
- * container will be generated by the given generator. The size of the
- * generated container is limited by `n`. Depending on what kind of container
- * that is generated, the resulting container may contain fewer elements than
- * `n`, but not more. If the given generator fails generating a value, the
- * complete container generator will also fail. */
- def containerOfN[C[_],T](n: Int, g: Gen[T])(implicit
- evb: Buildable[T,C], evt: C[T] => Traversable[T]
- ): Gen[C[T]] =
- sequence[C,T](Traversable.fill(n)(g)) suchThat { c =>
- // TODO: Can we guarantee c.size == n (See issue #89)?
- c.forall(g.sieveCopy)
- }
-
- /** Generates a container of any Traversable type for which there exists an
- * implicit [[org.scalacheck.util.Buildable]] instance. The elements in the
- * container will be generated by the given generator. The size of the
- * container is bounded by the size parameter used when generating values. */
- def containerOf[C[_],T](g: Gen[T])(implicit
- evb: Buildable[T,C], evt: C[T] => Traversable[T]
- ): Gen[C[T]] =
- sized(s => choose(0,s).flatMap(containerOfN[C,T](_,g))) suchThat { c =>
- c.forall(g.sieveCopy)
- }
-
- /** Generates a non-empty container of any Traversable type for which there
- * exists an implicit [[org.scalacheck.util.Buildable]] instance. The
- * elements in the container will be generated by the given generator. The
- * size of the container is bounded by the size parameter used when
- * generating values. */
- def nonEmptyContainerOf[C[_],T](g: Gen[T])(implicit
- evb: Buildable[T,C], evt: C[T] => Traversable[T]
- ): Gen[C[T]] =
- sized(s => choose(1,s).flatMap(containerOfN[C,T](_,g))) suchThat { c =>
- c.size > 0 && c.forall(g.sieveCopy)
- }
-
- /** Generates a non-empty container of any Traversable type for which there
- * exists an implicit [[org.scalacheck.util.Buildable]] instance. The
- * elements in the container will be generated by the given generator. The
- * size of the container is bounded by the size parameter used when
- * generating values. */
- @deprecated("Use Gen.nonEmptyContainerOf instead", "1.11.0")
- def containerOf1[C[_],T](g: Gen[T])(implicit
- evb: Buildable[T,C], evt: C[T] => Traversable[T]
- ): Gen[C[T]] = nonEmptyContainerOf[C,T](g)
-
- /** Generates a container of any Traversable type for which there exists an
- * implicit [[org.scalacheck.util.Buildable2]] instance. The elements in
- * container will be generated by the given generator. The size of the
- * generated container is limited by `n`. Depending on what kind of container
- * that is generated, the resulting container may contain fewer elements than
- * `n`, but not more. If the given generator fails generating a value, the
- * complete container generator will also fail. */
- def containerOfN[C[_,_],T,U](n: Int, g: Gen[(T,U)])(implicit
- evb: Buildable2[T,U,C], evt: C[T,U] => Traversable[(T,U)]
- ): Gen[C[T,U]] =
- sequence[C,T,U](Traversable.fill(n)(g)).suchThat { c =>
- // TODO: Can we guarantee c.size == n (See issue #89)?
- c.forall(g.sieveCopy)
- }
-
- /** Generates a container of any Traversable type for which there exists
- * an implicit <code>Buildable2</code> instance. The elements in the
- * container will be generated by the given generator. The size of the
- * container is bounded by the size parameter used when generating values. */
- def containerOf[C[_,_],T,U](g: Gen[(T,U)])(implicit
- evb: Buildable2[T,U,C], evt: C[T,U] => Traversable[(T,U)]
- ): Gen[C[T,U]] =
- sized(s => choose(0,s).flatMap(containerOfN[C,T,U](_,g))) suchThat { c =>
- c.forall(g.sieveCopy)
- }
-
- /** Generates a non-empty container of any type for which there exists an
- * implicit <code>Buildable2</code> instance. The elements in the container
- * will be generated by the given generator. The size of the container is
- * bounded by the size parameter used when generating values. */
- def nonEmptyContainerOf[C[_,_],T,U](g: Gen[(T,U)])(implicit
- evb: Buildable2[T,U,C], evt: C[T,U] => Traversable[(T,U)]
- ): Gen[C[T,U]] =
- sized(s => choose(1,s).flatMap(containerOfN[C,T,U](_,g))) suchThat { c =>
- c.size > 0 && c.forall(g.sieveCopy)
- }
-
- /** Generates a list of random length. The maximum length depends on the
- * size parameter. This method is equal to calling
- * `containerOf[List,T](g)`. */
- def listOf[T](g: => Gen[T]) = containerOf[List,T](g)
-
- /** Generates a non-empty list of random length. The maximum length depends
- * on the size parameter. This method is equal to calling
- * `nonEmptyContainerOf[List,T](g)`. */
- def nonEmptyListOf[T](g: => Gen[T]) = nonEmptyContainerOf[List,T](g)
-
- /** Generates a non-empty list of random length. The maximum length depends
- * on the size parameter. This method is equal to calling
- * `nonEmptyContainerOf[List,T](g)`. */
- @deprecated("Use Gen.nonEmptyListOf instead", "1.11.0")
- def listOf1[T](g: => Gen[T]) = nonEmptyListOf[T](g)
-
- /** Generates a list of the given length. This method is equal to calling
- * `containerOfN[List,T](n,g)`. */
- def listOfN[T](n: Int, g: Gen[T]) = containerOfN[List,T](n,g)
-
- /** Generates a map of random length. The maximum length depends on the
- * size parameter. This method is equal to calling
- * <code>containerOf[Map,T,U](g)</code>. */
- def mapOf[T,U](g: => Gen[(T,U)]) = containerOf[Map,T,U](g)
-
- /** Generates a non-empty map of random length. The maximum length depends
- * on the size parameter. This method is equal to calling
- * <code>nonEmptyContainerOf[Map,T,U](g)</code>. */
- def nonEmptyMap[T,U](g: => Gen[(T,U)]) = nonEmptyContainerOf[Map,T,U](g)
-
- /** Generates a map of with at least the given number of elements. This method
- * is equal to calling <code>containerOfN[Map,T,U](n,g)</code>. */
- def mapOfN[T,U](n: Int, g: Gen[(T,U)]) = containerOfN[Map,T,U](n,g)
-
- /** A generator that picks a random number of elements from a list */
- def someOf[T](l: Iterable[T]) = choose(0,l.size).flatMap(pick(_,l))
-
- /** A generator that picks a random number of elements from a list */
- def someOf[T](g1: Gen[T], g2: Gen[T], gs: Gen[T]*) =
- choose(0, gs.length+2).flatMap(pick(_, g1, g2, gs: _*))
-
- /** A generator that picks a given number of elements from a list, randomly */
- def pick[T](n: Int, l: Iterable[T]): Gen[Seq[T]] =
- if(n > l.size || n < 0) fail
- else (gen { p =>
- val b = new collection.mutable.ListBuffer[T]
- b ++= l
- while(b.length > n) b.remove(choose(0, b.length-1).doApply(p).retrieve.get)
- r(Some(b))
- }).suchThat(_.forall(x => l.exists(x == _)))
-
- /** A generator that picks a given number of elements from a list, randomly */
- def pick[T](n: Int, g1: Gen[T], g2: Gen[T], gn: Gen[T]*): Gen[Seq[T]] = {
- val gs = g1 +: g2 +: gn
- pick(n, 0 until gs.size).flatMap(idxs =>
- sequence[List,T](idxs.toList.map(gs(_)))
- ).suchThat(_.forall(x => gs.exists(_.sieveCopy(x))))
- }
-
-
- //// Character Generators ////
-
- /** Generates a numerical character */
- def numChar: Gen[Char] = choose(48.toChar, 57.toChar)
-
- /** Generates an upper-case alpha character */
- def alphaUpperChar: Gen[Char] = choose(65.toChar, 90.toChar)
-
- /** Generates a lower-case alpha character */
- def alphaLowerChar: Gen[Char] = choose(97.toChar, 122.toChar)
-
- /** Generates an alpha character */
- def alphaChar = frequency((1,alphaUpperChar), (9,alphaLowerChar))
-
- /** Generates an alphanumerical character */
- def alphaNumChar = frequency((1,numChar), (9,alphaChar))
-
-
- //// String Generators ////
-
- /** Generates a string that starts with a lower-case alpha character,
- * and only contains alphanumerical characters */
- def identifier: Gen[String] = (for {
- c <- alphaLowerChar
- cs <- listOf(alphaNumChar)
- } yield (c::cs).mkString).suchThat(_.forall(c => c.isLetter || c.isDigit))
-
- /** Generates a string of alpha characters */
- def alphaStr: Gen[String] =
- listOf(alphaChar).map(_.mkString).suchThat(_.forall(_.isLetter))
-
- /** Generates a string of digits */
- def numStr: Gen[String] =
- listOf(numChar).map(_.mkString).suchThat(_.forall(_.isDigit))
-
-
- //// Number Generators ////
-
- /** Generates positive numbers of uniform distribution, with an
- * upper bound of the generation size parameter. */
- def posNum[T](implicit num: Numeric[T], c: Choose[T]): Gen[T] = {
- import num._
- sized(max => c.choose(one, fromInt(max)))
- }
-
- /** Generates negative numbers of uniform distribution, with an
- * lower bound of the negated generation size parameter. */
- def negNum[T](implicit num: Numeric[T], c: Choose[T]): Gen[T] = {
- import num._
- sized(max => c.choose(-fromInt(max), -one))
- }
-
- /** Generates numbers within the given inclusive range, with
- * extra weight on zero, +/- unity, both extremities, and any special
- * numbers provided. The special numbers must lie within the given range,
- * otherwise they won't be included. */
- def chooseNum[T](minT: T, maxT: T, specials: T*)(
- implicit num: Numeric[T], c: Choose[T]
- ): Gen[T] = {
- import num._
- val basics = List(minT, maxT, zero, one, -one)
- val basicsAndSpecials = for {
- t <- specials ++ basics if t >= minT && t <= maxT
- } yield (1, const(t))
- val allGens = basicsAndSpecials ++ List(
- (basicsAndSpecials.length, c.choose(minT, maxT))
- )
- frequency(allGens: _*)
- }
-
- /** Generates a version 4 (random) UUID. */
- lazy val uuid: Gen[java.util.UUID] = for {
- l1 <- Gen.choose(Long.MinValue, Long.MaxValue)
- l2 <- Gen.choose(Long.MinValue, Long.MaxValue)
- y <- Gen.oneOf('8', '9', 'a', 'b')
- } yield java.util.UUID.fromString(
- new java.util.UUID(l1,l2).toString.updated(14, '4').updated(19, y)
- )
-
- /** Combines the given generators into one generator that produces a
- * tuple of their generated values. */
- def zip[T1,T2](g1: Gen[T1], g2: Gen[T2]): Gen[(T1,T2)] = {
- val g = for {
- t1 <- g1; t2 <- g2
- } yield (t1,t2)
- g.suchThat { case (t1,t2) => g1.sieveCopy(t1) && g2.sieveCopy(t2) }
- }
-
- /** Combines the given generators into one generator that produces a
- * tuple of their generated values. */
- def zip[T1,T2,T3](g1: Gen[T1], g2: Gen[T2], g3: Gen[T3]): Gen[(T1,T2,T3)] = {
- val g0 = zip(g1,g2)
- val g = for {
- (t1,t2) <- g0; t3 <- g3
- } yield (t1,t2,t3)
- g.suchThat { case (t1,t2,t3) => g0.sieveCopy(t1,t2) && g3.sieveCopy(t3) }
- }
-
- /** Combines the given generators into one generator that produces a
- * tuple of their generated values. */
- def zip[T1,T2,T3,T4](g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4]
- ): Gen[(T1,T2,T3,T4)] = {
- val g0 = zip(g1,g2,g3)
- val g = for {
- (t1,t2,t3) <- g0; t4 <- g4
- } yield (t1,t2,t3,t4)
- g.suchThat { case (t1,t2,t3,t4) => g0.sieveCopy(t1,t2,t3) && g4.sieveCopy(t4) }
- }
-
- /** Combines the given generators into one generator that produces a
- * tuple of their generated values. */
- def zip[T1,T2,T3,T4,T5](g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4],
- g5: Gen[T5]
- ): Gen[(T1,T2,T3,T4,T5)] = {
- val g0 = zip(g1,g2,g3,g4)
- val g = for {
- (t1,t2,t3,t4) <- g0; t5 <- g5
- } yield (t1,t2,t3,t4,t5)
- g.suchThat { case (t1,t2,t3,t4,t5) =>
- g0.sieveCopy(t1,t2,t3,t4) && g5.sieveCopy(t5)
- }
- }
-
- /** Combines the given generators into one generator that produces a
- * tuple of their generated values. */
- def zip[T1,T2,T3,T4,T5,T6](g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4],
- g5: Gen[T5], g6: Gen[T6]
- ): Gen[(T1,T2,T3,T4,T5,T6)] = {
- val g0 = zip(g1,g2,g3,g4,g5)
- val g = for {
- (t1,t2,t3,t4,t5) <- g0; t6 <- g6
- } yield (t1,t2,t3,t4,t5,t6)
- g.suchThat { case (t1,t2,t3,t4,t5,t6) =>
- g0.sieveCopy(t1,t2,t3,t4,t5) && g6.sieveCopy(t6)
- }
- }
-
- /** Combines the given generators into one generator that produces a
- * tuple of their generated values. */
- def zip[T1,T2,T3,T4,T5,T6,T7](g1: Gen[T1], g2: Gen[T2], g3: Gen[T3],
- g4: Gen[T4], g5: Gen[T5], g6: Gen[T6], g7: Gen[T7]
- ): Gen[(T1,T2,T3,T4,T5,T6,T7)] = {
- val g0 = zip(g1,g2,g3,g4,g5,g6)
- val g = for {
- (t1,t2,t3,t4,t5,t6) <- g0; t7 <- g7
- } yield (t1,t2,t3,t4,t5,t6,t7)
- g.suchThat { case (t1,t2,t3,t4,t5,t6,t7) =>
- g0.sieveCopy(t1,t2,t3,t4,t5,t6) && g7.sieveCopy(t7)
- }
- }
-
- /** Combines the given generators into one generator that produces a
- * tuple of their generated values. */
- def zip[T1,T2,T3,T4,T5,T6,T7,T8](g1: Gen[T1], g2: Gen[T2], g3: Gen[T3],
- g4: Gen[T4], g5: Gen[T5], g6: Gen[T6], g7: Gen[T7], g8: Gen[T8]
- ): Gen[(T1,T2,T3,T4,T5,T6,T7,T8)] = {
- val g0 = zip(g1,g2,g3,g4,g5,g6,g7)
- val g = for {
- (t1,t2,t3,t4,t5,t6,t7) <- g0; t8 <- g8
- } yield (t1,t2,t3,t4,t5,t6,t7,t8)
- g.suchThat { case (t1,t2,t3,t4,t5,t6,t7,t8) =>
- g0.sieveCopy(t1,t2,t3,t4,t5,t6,t7) && g8.sieveCopy(t8)
- }
- }
-
- /** Combines the given generators into one generator that produces a
- * tuple of their generated values. */
- def zip[T1,T2,T3,T4,T5,T6,T7,T8,T9](g1: Gen[T1], g2: Gen[T2], g3: Gen[T3],
- g4: Gen[T4], g5: Gen[T5], g6: Gen[T6], g7: Gen[T7], g8: Gen[T8], g9: Gen[T9]
- ): Gen[(T1,T2,T3,T4,T5,T6,T7,T8,T9)] = {
- val g0 = zip(g1,g2,g3,g4,g5,g6,g7,g8)
- val g = for {
- (t1,t2,t3,t4,t5,t6,t7,t8) <- g0; t9 <- g9
- } yield (t1,t2,t3,t4,t5,t6,t7,t8,t9)
- g.suchThat { case (t1,t2,t3,t4,t5,t6,t7,t8,t9) =>
- g0.sieveCopy(t1,t2,t3,t4,t5,t6,t7,t8) && g9.sieveCopy(t9)
- }
- }
-
- /** Takes a function and returns a generator that generates arbitrary
- * results of that function by feeding it with arbitrarily generated input
- * parameters. */
- def resultOf[T,R](f: T => R)(implicit a: Arbitrary[T]): Gen[R] =
- arbitrary[T] map f
-
- /** Takes a function and returns a generator that generates arbitrary
- * results of that function by feeding it with arbitrarily generated input
- * parameters. */
- def resultOf[T1,T2,R](f: (T1,T2) => R)(implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2]
- ): Gen[R] = arbitrary[T1] flatMap { t => resultOf(f(t, _:T2)) }
-
- /** Takes a function and returns a generator that generates arbitrary
- * results of that function by feeding it with arbitrarily generated input
- * parameters. */
- def resultOf[T1,T2,T3,R](f: (T1,T2,T3) => R)(implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3]
- ): Gen[R] = arbitrary[T1] flatMap { t => resultOf(f(t, _:T2, _:T3)) }
-
- /** Takes a function and returns a generator that generates arbitrary
- * results of that function by feeding it with arbitrarily generated input
- * parameters. */
- def resultOf[T1,T2,T3,T4,R](f: (T1,T2,T3,T4) => R)(implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4]
- ): Gen[R] = arbitrary[T1] flatMap {
- t => resultOf(f(t, _:T2, _:T3, _:T4))
- }
-
- /** Takes a function and returns a generator that generates arbitrary
- * results of that function by feeding it with arbitrarily generated input
- * parameters. */
- def resultOf[T1,T2,T3,T4,T5,R](f: (T1,T2,T3,T4,T5) => R)(implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4],
- a5: Arbitrary[T5]
- ): Gen[R] = arbitrary[T1] flatMap {
- t => resultOf(f(t, _:T2, _:T3, _:T4, _:T5))
- }
-
- /** Takes a function and returns a generator that generates arbitrary
- * results of that function by feeding it with arbitrarily generated input
- * parameters. */
- def resultOf[T1,T2,T3,T4,T5,T6,R](
- f: (T1,T2,T3,T4,T5,T6) => R)(implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3],
- a4: Arbitrary[T4], a5: Arbitrary[T5], a6: Arbitrary[T6]
- ): Gen[R] = arbitrary[T1] flatMap {
- t => resultOf(f(t, _:T2, _:T3, _:T4, _:T5, _:T6))
- }
-
- /** Takes a function and returns a generator that generates arbitrary
- * results of that function by feeding it with arbitrarily generated input
- * parameters. */
- def resultOf[T1,T2,T3,T4,T5,T6,T7,R](
- f: (T1,T2,T3,T4,T5,T6,T7) => R)(implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3],
- a4: Arbitrary[T4], a5: Arbitrary[T5], a6: Arbitrary[T6], a7: Arbitrary[T7]
- ): Gen[R] = arbitrary[T1] flatMap {
- t => resultOf(f(t, _:T2, _:T3, _:T4, _:T5, _:T6, _:T7))
- }
-
- /** Takes a function and returns a generator that generates arbitrary
- * results of that function by feeding it with arbitrarily generated input
- * parameters. */
- def resultOf[T1,T2,T3,T4,T5,T6,T7,T8,R](
- f: (T1,T2,T3,T4,T5,T6,T7,T8) => R)(implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4],
- a5: Arbitrary[T5], a6: Arbitrary[T6], a7: Arbitrary[T7], a8: Arbitrary[T8]
- ): Gen[R] = arbitrary[T1] flatMap {
- t => resultOf(f(t, _:T2, _:T3, _:T4, _:T5, _:T6, _:T7, _:T8))
- }
-
- /** Takes a function and returns a generator that generates arbitrary
- * results of that function by feeding it with arbitrarily generated input
- * parameters. */
- def resultOf[T1,T2,T3,T4,T5,T6,T7,T8,T9,R](
- f: (T1,T2,T3,T4,T5,T6,T7,T8,T9) => R)(implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4],
- a5: Arbitrary[T5], a6: Arbitrary[T6], a7: Arbitrary[T7], a8: Arbitrary[T8],
- a9: Arbitrary[T9]
- ): Gen[R] = arbitrary[T1] flatMap {
- t => resultOf(f(t, _:T2, _:T3, _:T4, _:T5, _:T6, _:T7, _:T8, _:T9))
- }
-}
diff --git a/src/partest-extras/scala/org/scalacheck/Prop.scala b/src/partest-extras/scala/org/scalacheck/Prop.scala
deleted file mode 100644
index 6b607002fd..0000000000
--- a/src/partest-extras/scala/org/scalacheck/Prop.scala
+++ /dev/null
@@ -1,953 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2014 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-import util.{Pretty, FreqMap, Buildable, ConsoleReporter}
-import scala.annotation.tailrec
-
-trait Prop {
-
- import Prop.{Result, Proof, True, False, Exception, Undecided,
- provedToTrue, secure, mergeRes}
- import Gen.Parameters
-
- def apply(prms: Parameters): Result
-
- def map(f: Result => Result): Prop = Prop(prms => f(this(prms)))
-
- def flatMap(f: Result => Prop): Prop = Prop(prms => f(this(prms))(prms))
-
- // TODO In 1.12.0, make p call-by-name, and remove the calls to secure()
- // in the methods that use combine()
- def combine(p: Prop)(f: (Result, Result) => Result) =
- for(r1 <- this; r2 <- p) yield f(r1,r2)
-
- /** Convenience method that checks this property with the given parameters
- * and reports the result on the console. */
- def check(prms: Test.Parameters): Unit = Test.check(
- if(prms.testCallback.isInstanceOf[ConsoleReporter]) prms
- else prms.withTestCallback(prms.testCallback.chain(ConsoleReporter(1))),
- this
- )
-
- /** Convenience method that checks this property and reports the
- * result on the console. The default test parameters
- * ([[Test.Parameters.default]]) are used for the check. */
- def check: Unit = check(Test.Parameters.default)
-
- /** Convenience method that checks this property and reports the result
- * on the console. The provided argument should be a function that takes
- * the default test parameters ([[Test.Parameters.default]])
- * as input and outputs a modified [[Test.Parameters]] instance that
- * Example use:
- *
- * {{{
- * p.check(_.withMinSuccessfulTests(500))
-
- * p.check { _.
- * withMinSuccessfulTests(80000).
- * withWorkers(4)
- * }
- * }}}
- */
- def check(paramFun: Test.Parameters => Test.Parameters): Unit = check(
- paramFun(Test.Parameters.default)
- )
-
- /** Convenience method that checks this property with specified minimal
- * number of successful test and the given testing parameters, and
- * reports the result on the console. If you need to get the results
- * from the test use the `check` methods in [[org.scalacheck.Test]]
- * instead. */
- @deprecated("Use check(prms.withMinSuccessfulTests(n)) instead", "1.11.2")
- def check(minSuccessfulTests: Int, prms: Test.Parameters): Unit = check(
- prms.withMinSuccessfulTests(minSuccessfulTests)
- )
-
- /** Convenience method that checks this property with specified minimal
- * number of successful test and reports the result on the console.
- * If you need to get the results from the test use
- * the `check` methods in [[org.scalacheck.Test]] instead. */
- @deprecated("Use check(_.withMinSuccessfulTests(n)) instead", "1.11.2")
- def check(minSuccessfulTests: Int): Unit = check(
- _.withMinSuccessfulTests(minSuccessfulTests)
- )
-
- /** The logic for main, separated out to make it easier to
- * avoid System.exit calls. Returns exit code.
- */
- def mainRunner(args: Array[String]): Int = {
- Test.parseParams(args) match {
- case Some(params) =>
- if (Test.check(params, this).passed) 0
- else 1
- case None =>
- println("Incorrect options")
- -1
- }
- }
-
- /** Whether main should call System.exit with an exit code.
- * Defaults to true; override to change. */
- def mainCallsExit = true
-
- /** Convenience method that makes it possible to use this property
- * as an application that checks itself on execution */
- def main(args: Array[String]): Unit = {
- val code = mainRunner(args)
- if (mainCallsExit && code != 0)
- System exit code
- }
-
- /** Returns a new property that holds if and only if both this
- * and the given property hold. If one of the properties doesn't
- * generate a result, the new property will generate false. */
- def &&(p: => Prop) = combine(secure(p))(_ && _)
-
- /** Returns a new property that holds if either this
- * or the given property (or both) hold. */
- def ||(p: => Prop) = combine(secure(p))(_ || _)
-
- /** Returns a new property that holds if and only if both this
- * and the given property hold. If one of the properties doesn't
- * generate a result, the new property will generate the same result
- * as the other property. */
- def ++(p: => Prop): Prop = combine(secure(p))(_ ++ _)
-
- /** Combines two properties through implication */
- def ==>(p: => Prop): Prop = flatMap { r1 =>
- if(r1.proved) p map { r2 => mergeRes(r1,r2,r2.status) }
- else if(!r1.success) Prop(r1.copy(status = Undecided))
- else p map { r2 => provedToTrue(mergeRes(r1,r2,r2.status)) }
- }
-
- /** Returns a new property that holds if and only if both this
- * and the given property generates a result with the exact
- * same status. Note that this means that if one of the properties is
- * proved, and the other one passed, then the resulting property
- * will fail. */
- def ==(p: => Prop) = this.flatMap { r1 =>
- p.map { r2 =>
- mergeRes(r1, r2, if(r1.status == r2.status) True else False)
- }
- }
-
- override def toString = "Prop"
-
- /** Put a label on the property to make test reports clearer */
- def label(l: String) = map(_.label(l))
-
- /** Put a label on the property to make test reports clearer */
- def :|(l: String) = label(l)
-
- /** Put a label on the property to make test reports clearer */
- def |:(l: String) = label(l)
-
- /** Put a label on the property to make test reports clearer */
- def :|(l: Symbol) = label(l.toString.drop(1))
-
- /** Put a label on the property to make test reports clearer */
- def |:(l: Symbol) = label(l.toString.drop(1))
-
-}
-
-object Prop {
-
- import Gen.{value, fail, frequency, oneOf, Parameters}
- import Arbitrary.{arbitrary}
- import Shrink.{shrink}
-
- // Types
-
- /** A property argument */
- case class Arg[+T](
- label: String,
- arg: T,
- shrinks: Int,
- origArg: T,
- prettyArg: Pretty,
- prettyOrigArg: Pretty
- )
-
- object Result {
- @deprecated("Will be removed in 1.12.0", "1.11.2")
- def apply(st: Status): Result = Result(status = st)
- @deprecated("Will be removed in 1.12.0", "1.11.2")
- def merge(x: Result, y: Result, status: Status) = mergeRes(x,y,status)
- }
-
- private[scalacheck] def mergeRes(x: Result, y: Result, st: Status) = Result(
- status = st,
- args = x.args ++ y.args,
- collected = x.collected ++ y.collected,
- labels = x.labels ++ y.labels
- )
-
- /** The result of evaluating a property */
- case class Result(
- status: Status,
- args: List[Arg[Any]] = Nil,
- collected: Set[Any] = Set.empty,
- labels: Set[String] = Set.empty
- ) {
- def success = status match {
- case True => true
- case Proof => true
- case _ => false
- }
-
- def failure = status match {
- case False => true
- case Exception(_) => true
- case _ => false
- }
-
- def proved = status == Proof
-
- def addArg(a: Arg[Any]) = copy(args = a::args)
-
- def collect(x: Any) = copy(collected = collected+x)
-
- def label(l: String) = copy(labels = labels+l)
-
- def &&(r: Result) = (this.status, r.status) match {
- case (Exception(_),_) => this
- case (_,Exception(_)) => r
-
- case (False,_) => this
- case (_,False) => r
-
- case (Undecided,_) => this
- case (_,Undecided) => r
-
- case (_,Proof) => mergeRes(this, r, this.status)
- case (Proof,_) => mergeRes(this, r, r.status)
-
- case (True,True) => mergeRes(this, r, True)
- }
-
- def ||(r: Result) = (this.status, r.status) match {
- case (Exception(_),_) => this
- case (_,Exception(_)) => r
-
- case (False,False) => mergeRes(this, r, False)
- case (False,_) => r
- case (_,False) => this
-
- case (Proof,_) => this
- case (_,Proof) => r
-
- case (True,_) => this
- case (_,True) => r
-
- case (Undecided,Undecided) => mergeRes(this, r, Undecided)
- }
-
- def ++(r: Result) = (this.status, r.status) match {
- case (Exception(_),_) => this
- case (_,Exception(_)) => r
-
- case (_, Undecided) => this
- case (Undecided, _) => r
-
- case (_, Proof) => this
- case (Proof, _) => r
-
- case (_, True) => this
- case (True, _) => r
-
- case (False, _) => this
- case (_, False) => r
- }
-
- def ==>(r: Result) = (this.status, r.status) match {
- case (Exception(_),_) => this
- case (_,Exception(_)) => r
-
- case (False,_) => mergeRes(this, r, Undecided)
-
- case (Undecided,_) => this
-
- case (Proof,_) => mergeRes(this, r, r.status)
- case (True,_) => mergeRes(this, r, r.status)
- }
- }
-
- sealed trait Status
-
- /** The property was proved */
- case object Proof extends Status
-
- /** The property was true */
- case object True extends Status
-
- /** The property was false */
- case object False extends Status
-
- /** The property could not be falsified or proved */
- case object Undecided extends Status
-
- /** Evaluating the property raised an exception */
- sealed case class Exception(e: Throwable) extends Status {
- override def equals(o: Any) = o match {
- case Exception(_) => true
- case _ => false
- }
- }
-
- /** Create a new property from the given function. */
- def apply(f: Parameters => Result): Prop = new Prop {
- def apply(prms: Parameters) = try f(prms) catch {
- case e: Throwable => Result(status = Exception(e))
- }
- }
-
- /** Create a property that returns the given result */
- def apply(r: Result): Prop = Prop.apply(prms => r)
-
- /** Create a property from a boolean value */
- def apply(b: Boolean): Prop = if(b) proved else falsified
-
-
- // Implicits
-
- /** A collection of property operators on `Any` values.
- * Import [[Prop.AnyOperators]] to make the operators available. */
- class ExtendedAny[T <% Pretty](x: => T) {
- /** See [[Prop.imply]] */
- def imply(f: PartialFunction[T,Prop]) = Prop.imply(x,f)
- /** See [[Prop.iff]] */
- def iff(f: PartialFunction[T,Prop]) = Prop.iff(x,f)
- /** See [[Prop.?=]] */
- def ?=(y: T) = Prop.?=(x, y)
- /** See [[Prop.=?]] */
- def =?(y: T) = Prop.=?(x, y)
- }
-
- /** A collection of property operators on `Boolean` values.
- * Import [[Prop.BooleanOperators]] to make the operators available. */
- class ExtendedBoolean(b: => Boolean) {
- /** See the documentation for [[org.scalacheck.Prop]] */
- def ==>(p: => Prop) = Prop(b) ==> p
- /** See the documentation for [[org.scalacheck.Prop]] */
- def :|(l: String) = Prop(b) :| l
- /** See the documentation for [[org.scalacheck.Prop]] */
- def |:(l: String) = l |: Prop(b)
- /** See the documentation for [[org.scalacheck.Prop]] */
- def :|(l: Symbol) = Prop(b) :| l
- /** See the documentation for [[org.scalacheck.Prop]] */
- def |:(l: Symbol) = l |: Prop(b)
- }
-
- /** Implicit method that makes a number of property operators on values of
- * type `Any` available in the current scope.
- * See [[Prop.ExtendedAny]] for documentation on the operators. */
- implicit def AnyOperators[T <% Pretty](x: => T) = new ExtendedAny[T](x)
-
- /** Implicit method that makes a number of property operators on boolean
- * values available in the current scope. See [[Prop.ExtendedBoolean]] for
- * documentation on the operators. */
- implicit def BooleanOperators(b: => Boolean) = new ExtendedBoolean(b)
-
- /** Implicit conversion of Boolean values to Prop values. */
- implicit def propBoolean(b: Boolean): Prop = Prop(b)
-
-
- // Private support functions
-
- private def provedToTrue(r: Result) = r.status match {
- case Proof => r.copy(status = True)
- case _ => r
- }
-
-
- // Property combinators
-
- /** A property that never is proved or falsified */
- lazy val undecided = Prop(Result(status = Undecided))
-
- /** A property that always is false */
- lazy val falsified = Prop(Result(status = False))
-
- /** A property that always is proved */
- lazy val proved = Prop(Result(status = Proof))
-
- /** A property that always is passed */
- lazy val passed = Prop(Result(status = True))
-
- /** A property that denotes an exception */
- def exception(e: Throwable): Prop = Prop(Result(status = Exception(e)))
-
- /** A property that denotes an exception */
- lazy val exception: Prop = exception(null)
-
- /** Create a property that compares to values. If the values aren't equal,
- * the property will fail and report that first value doesn't match the
- * expected (second) value. */
- def ?=[T](x: T, y: T)(implicit pp: T => Pretty): Prop =
- if(x == y) proved else falsified :| {
- val exp = Pretty.pretty[T](y, Pretty.Params(0))
- val act = Pretty.pretty[T](x, Pretty.Params(0))
- "Expected "+exp+" but got "+act
- }
-
- /** Create a property that compares to values. If the values aren't equal,
- * the property will fail and report that second value doesn't match the
- * expected (first) value. */
- def =?[T](x: T, y: T)(implicit pp: T => Pretty): Prop = ?=(y, x)
-
- /** A property that depends on the generator size */
- def sizedProp(f: Int => Prop): Prop = Prop { prms =>
- // provedToTrue since if the property is proved for
- // one size, it shouldn't be regarded as proved for
- // all sizes.
- provedToTrue(f(prms.size)(prms))
- }
-
- /** Implication with several conditions */
- def imply[T](x: T, f: PartialFunction[T,Prop]): Prop = secure {
- if(f.isDefinedAt(x)) f(x) else undecided
- }
-
- /** Property holds only if the given partial function is defined at
- * `x`, and returns a property that holds */
- def iff[T](x: T, f: PartialFunction[T,Prop]): Prop = secure {
- if(f.isDefinedAt(x)) f(x) else falsified
- }
-
- /** Combines properties into one, which is true if and only if all the
- * properties are true */
- def all(ps: Prop*) = if(ps.isEmpty) proved else Prop(prms =>
- ps.map(p => p(prms)).reduceLeft(_ && _)
- )
-
- /** Combines properties into one, which is true if at least one of the
- * properties is true */
- def atLeastOne(ps: Prop*) = if(ps.isEmpty) falsified else Prop(prms =>
- ps.map(p => p(prms)).reduceLeft(_ || _)
- )
-
- /** A property that holds if at least one of the given generators
- * fails generating a value */
- def someFailing[T](gs: Seq[Gen[T]]) = atLeastOne(gs.map(_ == fail):_*)
-
- /** A property that holds iff none of the given generators
- * fails generating a value */
- def noneFailing[T](gs: Seq[Gen[T]]) = all(gs.map(_ !== fail):_*)
-
- /** Returns true if the given statement throws an exception
- * of the specified type */
- def throws[T <: Throwable](c: Class[T])(x: => Any): Boolean =
- try { x; false } catch { case e if c.isInstance(e) => true }
-
- /** Collect data for presentation in test report */
- def collect[T, P <% Prop](f: T => P): T => Prop = t => Prop { prms =>
- val prop = f(t)
- prop(prms).collect(t)
- }
-
- /** Collect data for presentation in test report */
- def collect[T](t: T)(prop: Prop) = Prop { prms =>
- prop(prms).collect(t)
- }
-
- /** Collect data for presentation in test report */
- def classify(c: => Boolean, ifTrue: Any)(prop: Prop): Prop =
- if(c) collect(ifTrue)(prop) else collect(())(prop)
-
- /** Collect data for presentation in test report */
- def classify(c: => Boolean, ifTrue: Any, ifFalse: Any)(prop: Prop): Prop =
- if(c) collect(ifTrue)(prop) else collect(ifFalse)(prop)
-
- /** Wraps and protects a property */
- def secure[P <% Prop](p: => P): Prop =
- try (p: Prop) catch { case e: Throwable => exception(e) }
-
- /** Existential quantifier for an explicit generator. */
- def exists[A,P](f: A => P)(implicit
- pv: P => Prop,
- pp: A => Pretty,
- aa: Arbitrary[A]
- ): Prop = exists(aa.arbitrary)(f)
-
- /** Existential quantifier for an explicit generator. */
- def exists[A,P](g: Gen[A])(f: A => P)(implicit
- pv: P => Prop,
- pp: A => Pretty
- ): Prop = Prop { prms =>
- val gr = g.doApply(prms)
- gr.retrieve match {
- case None => undecided(prms)
- case Some(x) =>
- val p = secure(f(x))
- val labels = gr.labels.mkString(",")
- val r = p(prms).addArg(Arg(labels,x,0,x,pp(x),pp(x)))
- r.status match {
- case True => r.copy(status = Proof)
- case False => r.copy(status = Undecided)
- case _ => r
- }
- }
- }
-
- /** Universal quantifier for an explicit generator. Does not shrink failed
- * test cases. */
- def forAllNoShrink[T1,P](
- g1: Gen[T1])(
- f: T1 => P)(implicit
- pv: P => Prop,
- pp1: T1 => Pretty
- ): Prop = Prop { prms =>
- val gr = g1.doApply(prms)
- gr.retrieve match {
- case None => undecided(prms)
- case Some(x) =>
- val p = secure(f(x))
- val labels = gr.labels.mkString(",")
- provedToTrue(p(prms)).addArg(Arg(labels,x,0,x,pp1(x),pp1(x)))
- }
- }
-
- /** Universal quantifier for two explicit generators.
- * Does not shrink failed test cases. */
- def forAllNoShrink[T1,T2,P](
- g1: Gen[T1], g2: Gen[T2])(
- f: (T1,T2) => P)(implicit
- p: P => Prop,
- pp1: T1 => Pretty,
- pp2: T2 => Pretty
- ): Prop = forAllNoShrink(g1)(t => forAllNoShrink(g2)(f(t, _:T2)))
-
- /** Universal quantifier for three explicit generators.
- * Does not shrink failed test cases. */
- def forAllNoShrink[T1,T2,T3,P](
- g1: Gen[T1], g2: Gen[T2], g3: Gen[T3])(
- f: (T1,T2,T3) => P)(implicit
- p: P => Prop,
- pp1: T1 => Pretty,
- pp2: T2 => Pretty,
- pp3: T3 => Pretty
- ): Prop = forAllNoShrink(g1)(t => forAllNoShrink(g2,g3)(f(t, _:T2, _:T3)))
-
- /** Universal quantifier for four explicit generators.
- * Does not shrink failed test cases. */
- def forAllNoShrink[T1,T2,T3,T4,P](
- g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4])(
- f: (T1,T2,T3,T4) => P)(implicit
- p: P => Prop,
- pp1: T1 => Pretty,
- pp2: T2 => Pretty,
- pp3: T3 => Pretty,
- pp4: T4 => Pretty
- ): Prop = forAllNoShrink(g1)(t => forAllNoShrink(g2,g3,g4)(f(t, _:T2, _:T3, _:T4)))
-
- /** Universal quantifier for five explicit generators.
- * Does not shrink failed test cases. */
- def forAllNoShrink[T1,T2,T3,T4,T5,P](
- g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], g5: Gen[T5])(
- f: (T1,T2,T3,T4,T5) => P)(implicit
- p: P => Prop,
- pp1: T1 => Pretty,
- pp2: T2 => Pretty,
- pp3: T3 => Pretty,
- pp4: T4 => Pretty,
- pp5: T5 => Pretty
- ): Prop = forAllNoShrink(g1)(t => forAllNoShrink(g2,g3,g4,g5)(f(t, _:T2, _:T3, _:T4, _:T5)))
-
- /** Universal quantifier for six explicit generators.
- * Does not shrink failed test cases. */
- def forAllNoShrink[T1,T2,T3,T4,T5,T6,P](
- g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], g5: Gen[T5], g6: Gen[T6])(
- f: (T1,T2,T3,T4,T5,T6) => P)(implicit
- p: P => Prop,
- pp1: T1 => Pretty,
- pp2: T2 => Pretty,
- pp3: T3 => Pretty,
- pp4: T4 => Pretty,
- pp5: T5 => Pretty,
- pp6: T6 => Pretty
- ): Prop = forAllNoShrink(g1)(t => forAllNoShrink(g2,g3,g4,g5,g6)(f(t, _:T2, _:T3, _:T4, _:T5, _:T6)))
-
- /** Universal quantifier for seven explicit generators.
- * Does not shrink failed test cases. */
- def forAllNoShrink[T1,T2,T3,T4,T5,T6,T7,P](
- g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], g5: Gen[T5], g6: Gen[T6], g7: Gen[T7])(
- f: (T1,T2,T3,T4,T5,T6,T7) => P)(implicit
- p: P => Prop,
- pp1: T1 => Pretty,
- pp2: T2 => Pretty,
- pp3: T3 => Pretty,
- pp4: T4 => Pretty,
- pp5: T5 => Pretty,
- pp6: T6 => Pretty,
- pp7: T7 => Pretty
- ): Prop = forAllNoShrink(g1)(t => forAllNoShrink(g2,g3,g4,g5,g6,g7)(f(t, _:T2, _:T3, _:T4, _:T5, _:T6, _:T7)))
-
- /** Universal quantifier for eight explicit generators.
- * Does not shrink failed test cases. */
- def forAllNoShrink[T1,T2,T3,T4,T5,T6,T7,T8,P](
- g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], g5: Gen[T5], g6: Gen[T6], g7: Gen[T7], g8: Gen[T8])(
- f: (T1,T2,T3,T4,T5,T6,T7,T8) => P)(implicit
- p: P => Prop,
- pp1: T1 => Pretty,
- pp2: T2 => Pretty,
- pp3: T3 => Pretty,
- pp4: T4 => Pretty,
- pp5: T5 => Pretty,
- pp6: T6 => Pretty,
- pp7: T7 => Pretty,
- pp8: T8 => Pretty
- ): Prop = forAllNoShrink(g1)(t => forAllNoShrink(g2,g3,g4,g5,g6,g7,g8)(f(t, _:T2, _:T3, _:T4, _:T5, _:T6, _:T7, _:T8)))
-
- /** Converts a function into a universally quantified property */
- def forAllNoShrink[A1,P](
- f: A1 => P)(implicit
- pv: P => Prop,
- a1: Arbitrary[A1], pp1: A1 => Pretty
- ): Prop = forAllNoShrink(arbitrary[A1])(f)
-
- /** Converts a function into a universally quantified property */
- def forAllNoShrink[A1,A2,P](
- f: (A1,A2) => P)(implicit
- pv: P => Prop,
- a1: Arbitrary[A1], pp1: A1 => Pretty,
- a2: Arbitrary[A2], pp2: A2 => Pretty
- ): Prop = forAllNoShrink(arbitrary[A1], arbitrary[A2])(f)
-
- /** Converts a function into a universally quantified property */
- def forAllNoShrink[A1,A2,A3,P](
- f: (A1,A2,A3) => P)(implicit
- pv: P => Prop,
- a1: Arbitrary[A1], pp1: A1 => Pretty,
- a2: Arbitrary[A2], pp2: A2 => Pretty,
- a3: Arbitrary[A3], pp3: A3 => Pretty
- ): Prop = forAllNoShrink(arbitrary[A1], arbitrary[A2], arbitrary[A3])(f)
-
- /** Converts a function into a universally quantified property */
- def forAllNoShrink[A1,A2,A3,A4,P](
- f: (A1,A2,A3,A4) => P)(implicit
- pv: P => Prop,
- a1: Arbitrary[A1], pp1: A1 => Pretty,
- a2: Arbitrary[A2], pp2: A2 => Pretty,
- a3: Arbitrary[A3], pp3: A3 => Pretty,
- a4: Arbitrary[A4], pp4: A4 => Pretty
- ): Prop = forAllNoShrink(arbitrary[A1], arbitrary[A2], arbitrary[A3], arbitrary[A4])(f)
-
- /** Converts a function into a universally quantified property */
- def forAllNoShrink[A1,A2,A3,A4,A5,P](
- f: (A1,A2,A3,A4,A5) => P)(implicit
- pv: P => Prop,
- a1: Arbitrary[A1], pp1: A1 => Pretty,
- a2: Arbitrary[A2], pp2: A2 => Pretty,
- a3: Arbitrary[A3], pp3: A3 => Pretty,
- a4: Arbitrary[A4], pp4: A4 => Pretty,
- a5: Arbitrary[A5], pp5: A5 => Pretty
- ): Prop = forAllNoShrink(arbitrary[A1], arbitrary[A2], arbitrary[A3], arbitrary[A4], arbitrary[A5])(f)
-
- /** Converts a function into a universally quantified property */
- def forAllNoShrink[A1,A2,A3,A4,A5,A6,P](
- f: (A1,A2,A3,A4,A5,A6) => P)(implicit
- pv: P => Prop,
- a1: Arbitrary[A1], pp1: A1 => Pretty,
- a2: Arbitrary[A2], pp2: A2 => Pretty,
- a3: Arbitrary[A3], pp3: A3 => Pretty,
- a4: Arbitrary[A4], pp4: A4 => Pretty,
- a5: Arbitrary[A5], pp5: A5 => Pretty,
- a6: Arbitrary[A6], pp6: A6 => Pretty
- ): Prop = forAllNoShrink(arbitrary[A1], arbitrary[A2], arbitrary[A3], arbitrary[A4], arbitrary[A5], arbitrary[A6])(f)
-
- /** Converts a function into a universally quantified property */
- def forAllNoShrink[A1,A2,A3,A4,A5,A6,A7,P](
- f: (A1,A2,A3,A4,A5,A6,A7) => P)(implicit
- pv: P => Prop,
- a1: Arbitrary[A1], pp1: A1 => Pretty,
- a2: Arbitrary[A2], pp2: A2 => Pretty,
- a3: Arbitrary[A3], pp3: A3 => Pretty,
- a4: Arbitrary[A4], pp4: A4 => Pretty,
- a5: Arbitrary[A5], pp5: A5 => Pretty,
- a6: Arbitrary[A6], pp6: A6 => Pretty,
- a7: Arbitrary[A7], pp7: A7 => Pretty
- ): Prop = {
- forAllNoShrink(arbitrary[A1], arbitrary[A2], arbitrary[A3], arbitrary[A4], arbitrary[A5], arbitrary[A6],
- arbitrary[A7])(f)
- }
-
- /** Converts a function into a universally quantified property */
- def forAllNoShrink[A1,A2,A3,A4,A5,A6,A7,A8,P](
- f: (A1,A2,A3,A4,A5,A6,A7,A8) => P)(implicit
- pv: P => Prop,
- a1: Arbitrary[A1], pp1: A1 => Pretty,
- a2: Arbitrary[A2], pp2: A2 => Pretty,
- a3: Arbitrary[A3], pp3: A3 => Pretty,
- a4: Arbitrary[A4], pp4: A4 => Pretty,
- a5: Arbitrary[A5], pp5: A5 => Pretty,
- a6: Arbitrary[A6], pp6: A6 => Pretty,
- a7: Arbitrary[A7], pp7: A7 => Pretty,
- a8: Arbitrary[A8], pp8: A8 => Pretty
- ): Prop = {
- forAllNoShrink(arbitrary[A1], arbitrary[A2], arbitrary[A3], arbitrary[A4], arbitrary[A5], arbitrary[A6],
- arbitrary[A7], arbitrary[A8])(f)
- }
-
- /** Universal quantifier for an explicit generator. Shrinks failed arguments
- * with the given shrink function */
- def forAllShrink[T, P](g: Gen[T],
- shrink: T => Stream[T])(f: T => P
- )(implicit pv: P => Prop, pp: T => Pretty
- ): Prop = Prop { prms =>
-
- val gr = g.doApply(prms)
- val labels = gr.labels.mkString(",")
-
- def result(x: T) = {
- val p = secure(pv(f(x)))
- provedToTrue(p(prms))
- }
-
- /** Returns the first failed result in Left or success in Right */
- def getFirstFailure(xs: Stream[T]): Either[(T,Result),(T,Result)] = {
- assert(!xs.isEmpty, "Stream cannot be empty")
- val results = xs.map(x => (x, result(x)))
- results.dropWhile(!_._2.failure).headOption match {
- case None => Right(results.head)
- case Some(xr) => Left(xr)
- }
- }
-
- def shrinker(x: T, r: Result, shrinks: Int, orig: T): Result = {
- val xs = shrink(x).filter(gr.sieve)
- val res = r.addArg(Arg(labels,x,shrinks,orig,pp(x),pp(orig)))
- if(xs.isEmpty) res else getFirstFailure(xs) match {
- case Right((x2,r2)) => res
- case Left((x2,r2)) => shrinker(x2, replOrig(r,r2), shrinks+1, orig)
- }
- }
-
- def replOrig(r0: Result, r1: Result) = (r0.args,r1.args) match {
- case (a0::_,a1::as) =>
- r1.copy(
- args = a1.copy(
- origArg = a0.origArg,
- prettyOrigArg = a0.prettyOrigArg
- ) :: as
- )
- case _ => r1
- }
-
- gr.retrieve match {
- case None => undecided(prms)
- case Some(x) =>
- val r = result(x)
- if (!r.failure) r.addArg(Arg(labels,x,0,x,pp(x),pp(x)))
- else shrinker(x,r,0,x)
- }
-
- }
-
- /** Universal quantifier for an explicit generator. Shrinks failed arguments
- * with the default shrink function for the type */
- def forAll[T1,P](
- g1: Gen[T1])(
- f: T1 => P)(implicit
- p: P => Prop,
- s1: Shrink[T1],
- pp1: T1 => Pretty
- ): Prop = forAllShrink[T1,P](g1, shrink[T1])(f)
-
- /** Universal quantifier for two explicit generators. Shrinks failed arguments
- * with the default shrink function for the type */
- def forAll[T1,T2,P](
- g1: Gen[T1], g2: Gen[T2])(
- f: (T1,T2) => P)(implicit
- p: P => Prop,
- s1: Shrink[T1], pp1: T1 => Pretty,
- s2: Shrink[T2], pp2: T2 => Pretty
- ): Prop = forAll(g1)(t => forAll(g2)(f(t, _:T2)))
-
- /** Universal quantifier for three explicit generators. Shrinks failed arguments
- * with the default shrink function for the type */
- def forAll[T1,T2,T3,P](
- g1: Gen[T1], g2: Gen[T2], g3: Gen[T3])(
- f: (T1,T2,T3) => P)(implicit
- p: P => Prop,
- s1: Shrink[T1], pp1: T1 => Pretty,
- s2: Shrink[T2], pp2: T2 => Pretty,
- s3: Shrink[T3], pp3: T3 => Pretty
- ): Prop = forAll(g1)(t => forAll(g2,g3)(f(t, _:T2, _:T3)))
-
- /** Universal quantifier for four explicit generators. Shrinks failed arguments
- * with the default shrink function for the type */
- def forAll[T1,T2,T3,T4,P](
- g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4])(
- f: (T1,T2,T3,T4) => P)(implicit
- p: P => Prop,
- s1: Shrink[T1], pp1: T1 => Pretty,
- s2: Shrink[T2], pp2: T2 => Pretty,
- s3: Shrink[T3], pp3: T3 => Pretty,
- s4: Shrink[T4], pp4: T4 => Pretty
- ): Prop = forAll(g1)(t => forAll(g2,g3,g4)(f(t, _:T2, _:T3, _:T4)))
-
- /** Universal quantifier for five explicit generators. Shrinks failed arguments
- * with the default shrink function for the type */
- def forAll[T1,T2,T3,T4,T5,P](
- g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], g5: Gen[T5])(
- f: (T1,T2,T3,T4,T5) => P)(implicit
- p: P => Prop,
- s1: Shrink[T1], pp1: T1 => Pretty,
- s2: Shrink[T2], pp2: T2 => Pretty,
- s3: Shrink[T3], pp3: T3 => Pretty,
- s4: Shrink[T4], pp4: T4 => Pretty,
- s5: Shrink[T5], pp5: T5 => Pretty
- ): Prop = forAll(g1)(t => forAll(g2,g3,g4,g5)(f(t, _:T2, _:T3, _:T4, _:T5)))
-
- /** Universal quantifier for six explicit generators. Shrinks failed arguments
- * with the default shrink function for the type */
- def forAll[T1,T2,T3,T4,T5,T6,P](
- g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], g5: Gen[T5], g6: Gen[T6])(
- f: (T1,T2,T3,T4,T5,T6) => P)(implicit
- p: P => Prop,
- s1: Shrink[T1], pp1: T1 => Pretty,
- s2: Shrink[T2], pp2: T2 => Pretty,
- s3: Shrink[T3], pp3: T3 => Pretty,
- s4: Shrink[T4], pp4: T4 => Pretty,
- s5: Shrink[T5], pp5: T5 => Pretty,
- s6: Shrink[T6], pp6: T6 => Pretty
- ): Prop = forAll(g1)(t => forAll(g2,g3,g4,g5,g6)(f(t, _:T2, _:T3, _:T4, _:T5, _:T6)))
-
- /** Universal quantifier for seven explicit generators. Shrinks failed arguments
- * with the default shrink function for the type */
- def forAll[T1,T2,T3,T4,T5,T6,T7,P](
- g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], g5: Gen[T5], g6: Gen[T6], g7: Gen[T7])(
- f: (T1,T2,T3,T4,T5,T6,T7) => P)(implicit
- p: P => Prop,
- s1: Shrink[T1], pp1: T1 => Pretty,
- s2: Shrink[T2], pp2: T2 => Pretty,
- s3: Shrink[T3], pp3: T3 => Pretty,
- s4: Shrink[T4], pp4: T4 => Pretty,
- s5: Shrink[T5], pp5: T5 => Pretty,
- s6: Shrink[T6], pp6: T6 => Pretty,
- s7: Shrink[T7], pp7: T7 => Pretty
- ): Prop = forAll(g1)(t => forAll(g2,g3,g4,g5,g6,g7)(f(t, _:T2, _:T3, _:T4, _:T5, _:T6, _:T7)))
-
- /** Universal quantifier for eight explicit generators. Shrinks failed arguments
- * with the default shrink function for the type */
- def forAll[T1,T2,T3,T4,T5,T6,T7,T8,P](
- g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], g5: Gen[T5], g6: Gen[T6], g7: Gen[T7], g8: Gen[T8])(
- f: (T1,T2,T3,T4,T5,T6,T7,T8) => P)(implicit
- p: P => Prop,
- s1: Shrink[T1], pp1: T1 => Pretty,
- s2: Shrink[T2], pp2: T2 => Pretty,
- s3: Shrink[T3], pp3: T3 => Pretty,
- s4: Shrink[T4], pp4: T4 => Pretty,
- s5: Shrink[T5], pp5: T5 => Pretty,
- s6: Shrink[T6], pp6: T6 => Pretty,
- s7: Shrink[T7], pp7: T7 => Pretty,
- s8: Shrink[T8], pp8: T8 => Pretty
- ): Prop = forAll(g1)(t => forAll(g2,g3,g4,g5,g6,g7,g8)(f(t, _:T2, _:T3, _:T4, _:T5, _:T6, _:T7, _:T8)))
-
- /** Converts a function into a universally quantified property */
- def forAll[A1,P] (
- f: A1 => P)(implicit
- p: P => Prop,
- a1: Arbitrary[A1], s1: Shrink[A1], pp1: A1 => Pretty
- ): Prop = forAllShrink(arbitrary[A1],shrink[A1])(f andThen p)
-
- /** Converts a function into a universally quantified property */
- def forAll[A1,A2,P] (
- f: (A1,A2) => P)(implicit
- p: P => Prop,
- a1: Arbitrary[A1], s1: Shrink[A1], pp1: A1 => Pretty,
- a2: Arbitrary[A2], s2: Shrink[A2], pp2: A2 => Pretty
- ): Prop = forAll((a: A1) => forAll(f(a, _:A2)))
-
- /** Converts a function into a universally quantified property */
- def forAll[A1,A2,A3,P] (
- f: (A1,A2,A3) => P)(implicit
- p: P => Prop,
- a1: Arbitrary[A1], s1: Shrink[A1], pp1: A1 => Pretty,
- a2: Arbitrary[A2], s2: Shrink[A2], pp2: A2 => Pretty,
- a3: Arbitrary[A3], s3: Shrink[A3], pp3: A3 => Pretty
- ): Prop = forAll((a: A1) => forAll(f(a, _:A2, _:A3)))
-
- /** Converts a function into a universally quantified property */
- def forAll[A1,A2,A3,A4,P] (
- f: (A1,A2,A3,A4) => P)(implicit
- p: P => Prop,
- a1: Arbitrary[A1], s1: Shrink[A1], pp1: A1 => Pretty,
- a2: Arbitrary[A2], s2: Shrink[A2], pp2: A2 => Pretty,
- a3: Arbitrary[A3], s3: Shrink[A3], pp3: A3 => Pretty,
- a4: Arbitrary[A4], s4: Shrink[A4], pp4: A4 => Pretty
- ): Prop = forAll((a: A1) => forAll(f(a, _:A2, _:A3, _:A4)))
-
- /** Converts a function into a universally quantified property */
- def forAll[A1,A2,A3,A4,A5,P] (
- f: (A1,A2,A3,A4,A5) => P)(implicit
- p: P => Prop,
- a1: Arbitrary[A1], s1: Shrink[A1], pp1: A1 => Pretty,
- a2: Arbitrary[A2], s2: Shrink[A2], pp2: A2 => Pretty,
- a3: Arbitrary[A3], s3: Shrink[A3], pp3: A3 => Pretty,
- a4: Arbitrary[A4], s4: Shrink[A4], pp4: A4 => Pretty,
- a5: Arbitrary[A5], s5: Shrink[A5], pp5: A5 => Pretty
- ): Prop = forAll((a: A1) => forAll(f(a, _:A2, _:A3, _:A4, _:A5)))
-
- /** Converts a function into a universally quantified property */
- def forAll[A1,A2,A3,A4,A5,A6,P] (
- f: (A1,A2,A3,A4,A5,A6) => P)(implicit
- p: P => Prop,
- a1: Arbitrary[A1], s1: Shrink[A1], pp1: A1 => Pretty,
- a2: Arbitrary[A2], s2: Shrink[A2], pp2: A2 => Pretty,
- a3: Arbitrary[A3], s3: Shrink[A3], pp3: A3 => Pretty,
- a4: Arbitrary[A4], s4: Shrink[A4], pp4: A4 => Pretty,
- a5: Arbitrary[A5], s5: Shrink[A5], pp5: A5 => Pretty,
- a6: Arbitrary[A6], s6: Shrink[A6], pp6: A6 => Pretty
- ): Prop = forAll((a: A1) => forAll(f(a, _:A2, _:A3, _:A4, _:A5, _:A6)))
-
- /** Converts a function into a universally quantified property */
- def forAll[A1,A2,A3,A4,A5,A6,A7,P] (
- f: (A1,A2,A3,A4,A5,A6,A7) => P)(implicit
- p: P => Prop,
- a1: Arbitrary[A1], s1: Shrink[A1], pp1: A1 => Pretty,
- a2: Arbitrary[A2], s2: Shrink[A2], pp2: A2 => Pretty,
- a3: Arbitrary[A3], s3: Shrink[A3], pp3: A3 => Pretty,
- a4: Arbitrary[A4], s4: Shrink[A4], pp4: A4 => Pretty,
- a5: Arbitrary[A5], s5: Shrink[A5], pp5: A5 => Pretty,
- a6: Arbitrary[A6], s6: Shrink[A6], pp6: A6 => Pretty,
- a7: Arbitrary[A7], s7: Shrink[A7], pp7: A7 => Pretty
- ): Prop = forAll((a: A1) => forAll(f(a, _:A2, _:A3, _:A4, _:A5, _:A6, _:A7)))
-
- /** Converts a function into a universally quantified property */
- def forAll[A1,A2,A3,A4,A5,A6,A7,A8,P] (
- f: (A1,A2,A3,A4,A5,A6,A7,A8) => P)(implicit
- p: P => Prop,
- a1: Arbitrary[A1], s1: Shrink[A1], pp1: A1 => Pretty,
- a2: Arbitrary[A2], s2: Shrink[A2], pp2: A2 => Pretty,
- a3: Arbitrary[A3], s3: Shrink[A3], pp3: A3 => Pretty,
- a4: Arbitrary[A4], s4: Shrink[A4], pp4: A4 => Pretty,
- a5: Arbitrary[A5], s5: Shrink[A5], pp5: A5 => Pretty,
- a6: Arbitrary[A6], s6: Shrink[A6], pp6: A6 => Pretty,
- a7: Arbitrary[A7], s7: Shrink[A7], pp7: A7 => Pretty,
- a8: Arbitrary[A8], s8: Shrink[A8], pp8: A8 => Pretty
- ): Prop = forAll((a: A1) => forAll(f(a, _:A2, _:A3, _:A4, _:A5, _:A6, _:A7, _:A8)))
-
- /** Ensures that the property expression passed in completes within the given
- * space of time. */
- def within(maximumMs: Long)(wrappedProp: => Prop): Prop = new Prop {
- @tailrec private def attempt(prms: Parameters, endTime: Long): Result = {
- val result = wrappedProp.apply(prms)
- if (System.currentTimeMillis > endTime) {
- (if(result.failure) result else Result(status = False)).label("Timeout")
- } else {
- if (result.success) result
- else attempt(prms, endTime)
- }
- }
- def apply(prms: Parameters) = attempt(prms, System.currentTimeMillis + maximumMs)
- }
-}
diff --git a/src/partest-extras/scala/org/scalacheck/Properties.scala b/src/partest-extras/scala/org/scalacheck/Properties.scala
deleted file mode 100644
index abaac61c7f..0000000000
--- a/src/partest-extras/scala/org/scalacheck/Properties.scala
+++ /dev/null
@@ -1,82 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2014 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-import util.ConsoleReporter
-
-/** Represents a collection of properties, with convenient methods
- * for checking all properties at once. This class is itself a property, which
- * holds if and only if all of the contained properties hold.
- * <p>Properties are added in the following way:</p>
- *
- * {{{
- * object MyProps extends Properties("MyProps") {
- * property("myProp1") = forAll { (n:Int, m:Int) =>
- * n+m == m+n
- * }
- * }
- * }}}
- */
-class Properties(val name: String) extends Prop {
-
- private val props = new scala.collection.mutable.ListBuffer[(String,Prop)]
-
- /** Returns one property which holds if and only if all of the
- * properties in this property collection hold */
- private def oneProperty: Prop = Prop.all((properties map (_._2)):_*)
-
- /** Returns all properties of this collection in a list of name/property
- * pairs. */
- def properties: Seq[(String,Prop)] = props
-
- def apply(p: Gen.Parameters) = oneProperty(p)
-
- /** Convenience method that checks the properties with the given parameters
- * and reports the result on the console. If you need to get the results
- * from the test use the `check` methods in [[org.scalacheck.Test]]
- * instead. */
- override def check(prms: Test.Parameters): Unit = Test.checkProperties(
- prms.withTestCallback(ConsoleReporter(1) chain prms.testCallback), this
- )
-
- /** Convenience method that checks the properties and reports the
- * result on the console. If you need to get the results from the test use
- * the `check` methods in [[org.scalacheck.Test]] instead. */
- override def check: Unit = check(Test.Parameters.default)
-
- /** The logic for main, separated out to make it easier to
- * avoid System.exit calls. Returns exit code.
- */
- override def mainRunner(args: Array[String]): Int = {
- Test.parseParams(args) match {
- case Some(params) =>
- val res = Test.checkProperties(params, this)
- val failed = res.filter(!_._2.passed).size
- failed
- case None =>
- println("Incorrect options")
- -1
- }
- }
-
- /** Adds all properties from another property collection to this one. */
- def include(ps: Properties) = for((n,p) <- ps.properties) property(n) = p
-
- /** Used for specifying properties. Usage:
- * {{{
- * property("myProp") = ...
- * }}}
- */
- class PropertySpecifier() {
- def update(propName: String, p: Prop) = props += ((name+"."+propName, p))
- }
-
- lazy val property = new PropertySpecifier()
-}
diff --git a/src/partest-extras/scala/org/scalacheck/ScalaCheckFramework.scala b/src/partest-extras/scala/org/scalacheck/ScalaCheckFramework.scala
deleted file mode 100644
index 754b67764d..0000000000
--- a/src/partest-extras/scala/org/scalacheck/ScalaCheckFramework.scala
+++ /dev/null
@@ -1,93 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2014 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-import util.Pretty
-
-import org.scalatools.testing._
-
-class ScalaCheckFramework extends Framework {
-
- private def mkFP(mod: Boolean, cname: String) =
- new SubclassFingerprint {
- val superClassName = cname
- val isModule = mod
- }
-
- val name = "ScalaCheck"
-
- val tests = Array[Fingerprint](
- mkFP(true, "org.scalacheck.Properties"),
- mkFP(false, "org.scalacheck.Prop"),
- mkFP(false, "org.scalacheck.Properties"),
- mkFP(true, "org.scalacheck.Prop")
- )
-
- def testRunner(loader: ClassLoader, loggers: Array[Logger]) = new Runner2 {
-
- private def asEvent(nr: (String, Test.Result)) = nr match {
- case (n: String, r: Test.Result) => new Event {
- val testName = n
- val description = n
- val result = r.status match {
- case Test.Passed => Result.Success
- case _:Test.Proved => Result.Success
- case _:Test.Failed => Result.Failure
- case Test.Exhausted => Result.Skipped
- case _:Test.PropException | _:Test.GenException => Result.Error
- }
- val error = r.status match {
- case Test.PropException(_, e, _) => e
- case _:Test.Failed => new Exception(Pretty.pretty(r,Pretty.Params(0)))
- case _ => null
- }
- }
- }
-
- def run(testClassName: String, fingerprint: Fingerprint, handler: EventHandler, args: Array[String]) {
-
- val testCallback = new Test.TestCallback {
- override def onPropEval(n: String, w: Int, s: Int, d: Int) = {}
-
- override def onTestResult(n: String, r: Test.Result) = {
- for (l <- loggers) {
- import Pretty._
- val verbosityOpts = Set("-verbosity", "-v")
- val verbosity = args.grouped(2).filter(twos => verbosityOpts(twos.head)).toSeq.headOption.map(_.last).map(_.toInt).getOrElse(0)
- l.info(
- (if (r.passed) "+ " else "! ") + n + ": " + pretty(r, Params(verbosity))
- )
- }
- handler.handle(asEvent((n,r)))
- }
- }
-
- val prms = Test.parseParams(args) match {
- case Some(params) =>
- params.withTestCallback(testCallback).withCustomClassLoader(Some(loader))
- // TODO: Maybe handle this a bit better than throwing exception?
- case None => throw new Exception()
- }
-
- fingerprint match {
- case fp: SubclassFingerprint =>
- val obj =
- if(fp.isModule) Class.forName(testClassName + "$", true, loader).getField("MODULE$").get(null)
- else Class.forName(testClassName, true, loader).newInstance
- if(obj.isInstanceOf[Properties])
- Test.checkProperties(prms, obj.asInstanceOf[Properties])
- else
- handler.handle(asEvent((testClassName, Test.check(prms, obj.asInstanceOf[Prop]))))
- }
- }
-
- }
-
-}
diff --git a/src/partest-extras/scala/org/scalacheck/Shrink.scala b/src/partest-extras/scala/org/scalacheck/Shrink.scala
deleted file mode 100644
index 8ec28f4c4b..0000000000
--- a/src/partest-extras/scala/org/scalacheck/Shrink.scala
+++ /dev/null
@@ -1,215 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2014 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-import util.{Buildable,Buildable2}
-import scala.collection.{ JavaConversions => jcl }
-
-sealed abstract class Shrink[T] {
- def shrink(x: T): Stream[T]
-}
-
-object Shrink {
-
- import Stream.{cons, empty}
- import scala.collection._
- import java.util.ArrayList
-
- /** Interleaves two streams */
- private def interleave[T](xs: Stream[T], ys: Stream[T]): Stream[T] =
- if(xs.isEmpty) ys
- else if(ys.isEmpty) xs
- else cons(xs.head, cons(ys.head, interleave(xs.tail, ys.tail)))
-
- /** Shrink instance factory */
- def apply[T](s: T => Stream[T]): Shrink[T] = new Shrink[T] {
- override def shrink(x: T) = s(x)
- }
-
- /** Shrink a value */
- def shrink[T](x: T)(implicit s: Shrink[T]): Stream[T] = s.shrink(x)
-
- /** Default shrink instance */
- implicit def shrinkAny[T]: Shrink[T] = Shrink(x => empty)
-
- /** Shrink instance of container */
- implicit def shrinkContainer[C[_],T](implicit v: C[T] => Traversable[T], s: Shrink[T],
- b: Buildable[T,C]
- ): Shrink[C[T]] = Shrink { xs: C[T] =>
- val ys = v(xs)
- val zs = ys.toStream
- removeChunks(ys.size,zs).append(shrinkOne(zs)).map(b.fromIterable)
- }
-
- /** Shrink instance of container2 */
- implicit def shrinkContainer2[C[_,_],T,U](implicit v: C[T,U] => Traversable[(T,U)], s: Shrink[(T,U)],
- b: Buildable2[T,U,C]
- ): Shrink[C[T,U]] = Shrink { xs: C[T,U] =>
- val ys = v(xs)
- val zs = ys.toStream
- removeChunks(ys.size,zs).append(shrinkOne(zs)).map(b.fromIterable)
- }
-
- private def removeChunks[T](n: Int, xs: Stream[T]): Stream[Stream[T]] =
- if (xs.isEmpty) empty
- else if (xs.tail.isEmpty) cons(empty, empty)
- else {
- val n1 = n / 2
- val n2 = n - n1
- lazy val xs1 = xs.take(n1)
- lazy val xs2 = xs.drop(n1)
- lazy val xs3 =
- for (ys1 <- removeChunks(n1, xs1) if !ys1.isEmpty) yield ys1 append xs2
- lazy val xs4 =
- for (ys2 <- removeChunks(n2, xs2) if !ys2.isEmpty) yield xs1 append ys2
-
- cons(xs1, cons(xs2, interleave(xs3, xs4)))
- }
-
- private def shrinkOne[T : Shrink](zs: Stream[T]): Stream[Stream[T]] =
- if (zs.isEmpty) empty
- else {
- val x = zs.head
- val xs = zs.tail
- shrink(x).map(cons(_,xs)).append(shrinkOne(xs).map(cons(x,_)))
- }
-
- /** Shrink instance of integer */
- implicit lazy val shrinkInt: Shrink[Int] = Shrink { n =>
-
- def halfs(n: Int): Stream[Int] =
- if(n == 0) empty else cons(n, halfs(n/2))
-
- if(n == 0) empty else {
- val ns = halfs(n/2).map(n - _)
- cons(0, interleave(ns, ns.map(-1 * _)))
- }
- }
-
- /** Shrink instance of String */
- implicit lazy val shrinkString: Shrink[String] = Shrink { s =>
- shrinkContainer[List,Char].shrink(s.toList).map(_.mkString)
- }
-
- /** Shrink instance of Option */
- implicit def shrinkOption[T : Shrink]: Shrink[Option[T]] = Shrink {
- case None => empty
- case Some(x) => cons(None, for(y <- shrink(x)) yield Some(y))
- }
-
- /** Shrink instance of 2-tuple */
- implicit def shrinkTuple2[
- T1:Shrink, T2:Shrink
- ]: Shrink[(T1,T2)] =
- Shrink { case (t1,t2) =>
- shrink(t1).map((_,t2)) append
- shrink(t2).map((t1,_))
- }
-
- /** Shrink instance of 3-tuple */
- implicit def shrinkTuple3[
- T1:Shrink, T2:Shrink, T3:Shrink
- ]: Shrink[(T1,T2,T3)] =
- Shrink { case (t1,t2,t3) =>
- shrink(t1).map((_, t2, t3)) append
- shrink(t2).map((t1, _, t3)) append
- shrink(t3).map((t1, t2, _))
- }
-
- /** Shrink instance of 4-tuple */
- implicit def shrinkTuple4[
- T1:Shrink, T2:Shrink, T3:Shrink, T4:Shrink
- ]: Shrink[(T1,T2,T3,T4)] =
- Shrink { case (t1,t2,t3,t4) =>
- shrink(t1).map((_, t2, t3, t4)) append
- shrink(t2).map((t1, _, t3, t4)) append
- shrink(t3).map((t1, t2, _, t4)) append
- shrink(t4).map((t1, t2, t3, _))
- }
-
- /** Shrink instance of 5-tuple */
- implicit def shrinkTuple5[
- T1:Shrink, T2:Shrink, T3:Shrink, T4:Shrink, T5:Shrink
- ]: Shrink[(T1,T2,T3,T4,T5)] =
- Shrink { case (t1,t2,t3,t4,t5) =>
- shrink(t1).map((_, t2, t3, t4, t5)) append
- shrink(t2).map((t1, _, t3, t4, t5)) append
- shrink(t3).map((t1, t2, _, t4, t5)) append
- shrink(t4).map((t1, t2, t3, _, t5)) append
- shrink(t5).map((t1, t2, t3, t4, _))
- }
-
- /** Shrink instance of 6-tuple */
- implicit def shrinkTuple6[
- T1:Shrink, T2:Shrink, T3:Shrink, T4:Shrink, T5:Shrink, T6:Shrink
- ]: Shrink[(T1,T2,T3,T4,T5,T6)] =
- Shrink { case (t1,t2,t3,t4,t5,t6) =>
- shrink(t1).map((_, t2, t3, t4, t5, t6)) append
- shrink(t2).map((t1, _, t3, t4, t5, t6)) append
- shrink(t3).map((t1, t2, _, t4, t5, t6)) append
- shrink(t4).map((t1, t2, t3, _, t5, t6)) append
- shrink(t5).map((t1, t2, t3, t4, _, t6)) append
- shrink(t6).map((t1, t2, t3, t4, t5, _))
- }
-
- /** Shrink instance of 7-tuple */
- implicit def shrinkTuple7[
- T1:Shrink, T2:Shrink, T3:Shrink, T4:Shrink, T5:Shrink, T6:Shrink, T7:Shrink
- ]: Shrink[(T1,T2,T3,T4,T5,T6,T7)] =
- Shrink { case (t1,t2,t3,t4,t5,t6,t7) =>
- shrink(t1).map((_, t2, t3, t4, t5, t6, t7)) append
- shrink(t2).map((t1, _, t3, t4, t5, t6, t7)) append
- shrink(t3).map((t1, t2, _, t4, t5, t6, t7)) append
- shrink(t4).map((t1, t2, t3, _, t5, t6, t7)) append
- shrink(t5).map((t1, t2, t3, t4, _, t6, t7)) append
- shrink(t6).map((t1, t2, t3, t4, t5, _, t7)) append
- shrink(t7).map((t1, t2, t3, t4, t5, t6, _))
- }
-
- /** Shrink instance of 8-tuple */
- implicit def shrinkTuple8[
- T1:Shrink, T2:Shrink, T3:Shrink, T4:Shrink, T5:Shrink, T6:Shrink,
- T7:Shrink, T8:Shrink
- ]: Shrink[(T1,T2,T3,T4,T5,T6,T7,T8)] =
- Shrink { case (t1,t2,t3,t4,t5,t6,t7,t8) =>
- shrink(t1).map((_, t2, t3, t4, t5, t6, t7, t8)) append
- shrink(t2).map((t1, _, t3, t4, t5, t6, t7, t8)) append
- shrink(t3).map((t1, t2, _, t4, t5, t6, t7, t8)) append
- shrink(t4).map((t1, t2, t3, _, t5, t6, t7, t8)) append
- shrink(t5).map((t1, t2, t3, t4, _, t6, t7, t8)) append
- shrink(t6).map((t1, t2, t3, t4, t5, _, t7, t8)) append
- shrink(t7).map((t1, t2, t3, t4, t5, t6, _, t8)) append
- shrink(t8).map((t1, t2, t3, t4, t5, t6, t7, _))
- }
-
- /** Shrink instance of 9-tuple */
- implicit def shrinkTuple9[
- T1:Shrink, T2:Shrink, T3:Shrink, T4:Shrink, T5:Shrink, T6:Shrink,
- T7:Shrink, T8:Shrink, T9:Shrink
- ]: Shrink[(T1,T2,T3,T4,T5,T6,T7,T8,T9)] =
- Shrink { case (t1,t2,t3,t4,t5,t6,t7,t8,t9) =>
- shrink(t1).map((_, t2, t3, t4, t5, t6, t7, t8, t9)) append
- shrink(t2).map((t1, _, t3, t4, t5, t6, t7, t8, t9)) append
- shrink(t3).map((t1, t2, _, t4, t5, t6, t7, t8, t9)) append
- shrink(t4).map((t1, t2, t3, _, t5, t6, t7, t8, t9)) append
- shrink(t5).map((t1, t2, t3, t4, _, t6, t7, t8, t9)) append
- shrink(t6).map((t1, t2, t3, t4, t5, _, t7, t8, t9)) append
- shrink(t7).map((t1, t2, t3, t4, t5, t6, _, t8, t9)) append
- shrink(t8).map((t1, t2, t3, t4, t5, t6, t7, _, t9)) append
- shrink(t9).map((t1, t2, t3, t4, t5, t6, t7, t8, _))
- }
-
- /** Transform a Shrink[T] to a Shrink[U] where T and U are two isomorphic types
- * whose relationship is described by the provided transformation functions.
- * (exponential functor map) */
- def xmap[T, U](from: T => U, to: U => T)(implicit st: Shrink[T]): Shrink[U] = Shrink[U] { u: U ⇒
- st.shrink(to(u)).map(from)
- }
-}
diff --git a/src/partest-extras/scala/org/scalacheck/Test.scala b/src/partest-extras/scala/org/scalacheck/Test.scala
deleted file mode 100644
index 9a9c62b93f..0000000000
--- a/src/partest-extras/scala/org/scalacheck/Test.scala
+++ /dev/null
@@ -1,372 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2014 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-import Prop.Arg
-
-object Test {
-
- import util.{FreqMap, ConsoleReporter}
-
- /** Test parameters used by the check methods. Default
- * parameters are defined by [[Test.Parameters.Default]]. */
- trait Parameters {
- /** The minimum number of tests that must succeed for ScalaCheck to
- * consider a property passed. */
- val minSuccessfulTests: Int
-
- /** Create a copy of this [[Test.Parameters]] instance with
- * [[Test.Parameters.minSuccessfulTests]] set to the specified value. */
- def withMinSuccessfulTests(minSuccessfulTests: Int): Parameters = cp(
- minSuccessfulTests = minSuccessfulTests
- )
-
- /** The starting size given as parameter to the generators. */
- val minSize: Int
-
- /** Create a copy of this [[Test.Parameters]] instance with
- * [[Test.Parameters.minSize]] set to the specified value. */
- def withMinSize(minSize: Int): Parameters = cp(
- minSize = minSize
- )
-
- /** The maximum size given as parameter to the generators. */
- val maxSize: Int
-
- /** Create a copy of this [[Test.Parameters]] instance with
- * [[Test.Parameters.maxSize]] set to the specified value. */
- def withMaxSize(maxSize: Int): Parameters = cp(
- maxSize = maxSize
- )
-
- /** The random number generator used. */
- val rng: scala.util.Random
-
- /** Create a copy of this [[Test.Parameters]] instance with
- * [[Test.Parameters.rng]] set to the specified value. */
- def withRng(rng: scala.util.Random): Parameters = cp(
- rng = rng
- )
-
- /** The number of tests to run in parallel. */
- val workers: Int
-
- /** Create a copy of this [[Test.Parameters]] instance with
- * [[Test.Parameters.workers]] set to the specified value. */
- def withWorkers(workers: Int): Parameters = cp(
- workers = workers
- )
-
- /** A callback that ScalaCheck calls each time a test is executed. */
- val testCallback: TestCallback
-
- /** Create a copy of this [[Test.Parameters]] instance with
- * [[Test.Parameters.testCallback]] set to the specified value. */
- def withTestCallback(testCallback: TestCallback): Parameters = cp(
- testCallback = testCallback
- )
-
- /** The maximum ratio between discarded and passed tests allowed before
- * ScalaCheck gives up and discards the property. At least
- * `minSuccesfulTests` will always be run, though. */
- val maxDiscardRatio: Float
-
- /** Create a copy of this [[Test.Parameters]] instance with
- * [[Test.Parameters.maxDiscardRatio]] set to the specified value. */
- def withMaxDiscardRatio(maxDiscardRatio: Float): Parameters = cp(
- maxDiscardRatio = maxDiscardRatio
- )
-
- /** A custom class loader that should be used during test execution. */
- val customClassLoader: Option[ClassLoader]
-
- /** Create a copy of this [[Test.Parameters]] instance with
- * [[Test.Parameters.customClassLoader]] set to the specified value. */
- def withCustomClassLoader(customClassLoader: Option[ClassLoader]
- ): Parameters = cp(
- customClassLoader = customClassLoader
- )
-
- // private since we can't guarantee binary compatibility for this one
- private case class cp(
- minSuccessfulTests: Int = minSuccessfulTests,
- minSize: Int = minSize,
- maxSize: Int = maxSize,
- rng: scala.util.Random = rng,
- workers: Int = workers,
- testCallback: TestCallback = testCallback,
- maxDiscardRatio: Float = maxDiscardRatio,
- customClassLoader: Option[ClassLoader] = customClassLoader
- ) extends Parameters
- }
-
- /** Test parameters used by the check methods. Default
- * parameters are defined by [[Test.Parameters.Default]]. */
- object Parameters {
- /** Default test parameters trait. This can be overriden if you need to
- * tweak the parameters:
- *
- * {{{
- * val myParams = new Parameters.Default {
- * override val minSuccesfulTests = 600
- * override val maxDiscardRatio = 8
- * }
- * }}}
- *
- * You can also use the withXXX-methods in
- * [[org.scalacheck.Test.Parameters]] to achieve
- * the same thing:
- *
- * {{{
- * val myParams = Parameters.default
- * .withMinSuccessfulTests(600)
- * .withMaxDiscardRatio(8)
- * }}} */
- trait Default extends Parameters {
- val minSuccessfulTests: Int = 100
- val minSize: Int = 0
- val maxSize: Int = Gen.Parameters.default.size
- val rng: scala.util.Random = Gen.Parameters.default.rng
- val workers: Int = 1
- val testCallback: TestCallback = new TestCallback {}
- val maxDiscardRatio: Float = 5
- val customClassLoader: Option[ClassLoader] = None
- }
-
- /** Default test parameters instance. */
- val default: Parameters = new Default {}
-
- /** Verbose console reporter test parameters instance. */
- val defaultVerbose: Parameters = new Default {
- override val testCallback = ConsoleReporter(2)
- }
- }
-
- /** Test statistics */
- case class Result(
- status: Status,
- succeeded: Int,
- discarded: Int,
- freqMap: FreqMap[Set[Any]],
- time: Long = 0
- ) {
- def passed = status match {
- case Passed => true
- case Proved(_) => true
- case _ => false
- }
- }
-
- /** Test status */
- sealed trait Status
-
- /** ScalaCheck found enough cases for which the property holds, so the
- * property is considered correct. (It is not proved correct, though). */
- case object Passed extends Status
-
- /** ScalaCheck managed to prove the property correct */
- sealed case class Proved(args: List[Arg[Any]]) extends Status
-
- /** The property was proved wrong with the given concrete arguments. */
- sealed case class Failed(args: List[Arg[Any]], labels: Set[String]) extends Status
-
- /** The property test was exhausted, it wasn't possible to generate enough
- * concrete arguments satisfying the preconditions to get enough passing
- * property evaluations. */
- case object Exhausted extends Status
-
- /** An exception was raised when trying to evaluate the property with the
- * given concrete arguments. If an exception was raised before or during
- * argument generation, the argument list will be empty. */
- sealed case class PropException(args: List[Arg[Any]], e: Throwable,
- labels: Set[String]) extends Status
-
- /** An exception was raised when trying to generate concrete arguments
- * for evaluating the property.
- * @deprecated Not used. The type PropException is used for all exceptions.
- */
- @deprecated("Not used. The type PropException is used for all exceptions.", "1.11.2")
- sealed case class GenException(e: Throwable) extends Status
-
- trait TestCallback { self =>
- /** Called each time a property is evaluated */
- def onPropEval(name: String, threadIdx: Int, succeeded: Int,
- discarded: Int): Unit = ()
-
- /** Called whenever a property has finished testing */
- def onTestResult(name: String, result: Result): Unit = ()
-
- def chain(testCallback: TestCallback) = new TestCallback {
- override def onPropEval(name: String, threadIdx: Int,
- succeeded: Int, discarded: Int
- ): Unit = {
- self.onPropEval(name,threadIdx,succeeded,discarded)
- testCallback.onPropEval(name,threadIdx,succeeded,discarded)
- }
-
- override def onTestResult(name: String, result: Result): Unit = {
- self.onTestResult(name,result)
- testCallback.onTestResult(name,result)
- }
- }
- }
-
- private def assertParams(prms: Parameters) = {
- import prms._
- if(
- minSuccessfulTests <= 0 ||
- maxDiscardRatio <= 0 ||
- minSize < 0 ||
- maxSize < minSize ||
- workers <= 0
- ) throw new IllegalArgumentException("Invalid test parameters")
- }
-
- private def secure[T](x: => T): Either[T,Throwable] =
- try { Left(x) } catch { case e: Throwable => Right(e) }
-
- def parseParams(args: Array[String]): Option[Parameters] = {
- var params = Parameters.default
- args.grouped(2).filter(_.size > 1).map(a => (a(0), a(1))).foreach {
- case ("-workers" | "-w", n) => params = params.withWorkers(n.toInt)
- case ("-minSize" | "-n", n) => params = params.withMinSize(n.toInt)
- case ("-maxSize" | "-x", n) => params = params.withMaxSize(n.toInt)
- case ("-verbosity" | "-v", n) => params = params.withTestCallback(ConsoleReporter(n.toInt))
- case ("-maxDiscardRatio" | "-r", n) => params = params.withMaxDiscardRatio(n.toFloat)
- case ("-minSuccessfulTests" | "-s", n) => params = params.withMinSuccessfulTests(n.toInt)
- case _ =>
- }
- Some(params)
- }
-
- /** Tests a property with parameters that are calculated by applying
- * the provided function to [[Test.Parameters.default]].
- * Example use:
- *
- * {{{
- * Test.check(p) { _.
- * withMinSuccessfulTests(80000).
- * withWorkers(4)
- * }
- * }}}
- */
- def check(p: Prop)(f: Parameters => Parameters): Result =
- check(f(Parameters.default), p)
-
- /** Tests a property with the given testing parameters, and returns
- * the test results. */
- def check(params: Parameters, p: Prop): Result = {
- import params._
- import concurrent._
-
- assertParams(params)
- if(workers > 1) {
- assert(!p.isInstanceOf[Commands], "Commands cannot be checked multi-threaded")
- }
-
- val iterations = math.ceil(minSuccessfulTests / (workers: Double))
- val sizeStep = (maxSize-minSize) / (iterations*workers)
- var stop = false
- val genPrms = new Gen.Parameters.Default { override val rng = params.rng }
- val tp = java.util.concurrent.Executors.newFixedThreadPool(workers)
- implicit val ec = ExecutionContext.fromExecutor(tp)
-
- def workerFun(workerIdx: Int): Result = {
- var n = 0 // passed tests
- var d = 0 // discarded tests
- var res: Result = null
- var fm = FreqMap.empty[Set[Any]]
- while(!stop && res == null && n < iterations) {
- val size = (minSize: Double) + (sizeStep * (workerIdx + (workers*(n+d))))
- val propRes = p(genPrms.withSize(size.round.toInt))
- fm = if(propRes.collected.isEmpty) fm else fm + propRes.collected
- propRes.status match {
- case Prop.Undecided =>
- d += 1
- testCallback.onPropEval("", workerIdx, n, d)
- // The below condition is kind of hacky. We have to have
- // some margin, otherwise workers might stop testing too
- // early because they have been exhausted, but the overall
- // test has not.
- if (n+d > minSuccessfulTests && 1+workers*maxDiscardRatio*n < d)
- res = Result(Exhausted, n, d, fm)
- case Prop.True =>
- n += 1
- testCallback.onPropEval("", workerIdx, n, d)
- case Prop.Proof =>
- n += 1
- res = Result(Proved(propRes.args), n, d, fm)
- stop = true
- case Prop.False =>
- res = Result(Failed(propRes.args,propRes.labels), n, d, fm)
- stop = true
- case Prop.Exception(e) =>
- res = Result(PropException(propRes.args,e,propRes.labels), n, d, fm)
- stop = true
- }
- }
- if (res == null) {
- if (maxDiscardRatio*n > d) Result(Passed, n, d, fm)
- else Result(Exhausted, n, d, fm)
- } else res
- }
-
- def mergeResults(r1: Result, r2: Result): Result = {
- val Result(st1, s1, d1, fm1, _) = r1
- val Result(st2, s2, d2, fm2, _) = r2
- if (st1 != Passed && st1 != Exhausted)
- Result(st1, s1+s2, d1+d2, fm1++fm2, 0)
- else if (st2 != Passed && st2 != Exhausted)
- Result(st2, s1+s2, d1+d2, fm1++fm2, 0)
- else {
- if (s1+s2 >= minSuccessfulTests && maxDiscardRatio*(s1+s2) >= (d1+d2))
- Result(Passed, s1+s2, d1+d2, fm1++fm2, 0)
- else
- Result(Exhausted, s1+s2, d1+d2, fm1++fm2, 0)
- }
- }
-
- try {
- val start = System.currentTimeMillis
- val r =
- if(workers < 2) workerFun(0)
- else {
- val fs = List.range(0,workers) map (idx => Future {
- params.customClassLoader.map(
- Thread.currentThread.setContextClassLoader(_)
- )
- blocking { workerFun(idx) }
- })
- val zeroRes = Result(Passed,0,0,FreqMap.empty[Set[Any]],0)
- val res = Future.fold(fs)(zeroRes)(mergeResults)
- Await.result(res, concurrent.duration.Duration.Inf)
- }
- val timedRes = r.copy(time = System.currentTimeMillis-start)
- params.testCallback.onTestResult("", timedRes)
- timedRes
- } finally {
- stop = true
- tp.shutdown()
- }
- }
-
- /** Check a set of properties. */
- def checkProperties(prms: Parameters, ps: Properties): Seq[(String,Result)] =
- ps.properties.map { case (name,p) =>
- val testCallback = new TestCallback {
- override def onPropEval(n: String, t: Int, s: Int, d: Int) =
- prms.testCallback.onPropEval(name,t,s,d)
- override def onTestResult(n: String, r: Result) =
- prms.testCallback.onTestResult(name,r)
- }
- val res = check(prms.withTestCallback(testCallback), p)
- (name,res)
- }
-}
diff --git a/src/partest-extras/scala/org/scalacheck/util/Buildable.scala b/src/partest-extras/scala/org/scalacheck/util/Buildable.scala
deleted file mode 100644
index 6a275b05c2..0000000000
--- a/src/partest-extras/scala/org/scalacheck/util/Buildable.scala
+++ /dev/null
@@ -1,77 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2014 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck.util
-
-import collection._
-
-trait Buildable[T,C[_]] {
- def builder: mutable.Builder[T,C[T]]
- def fromIterable(it: Traversable[T]): C[T] = {
- val b = builder
- b ++= it
- b.result()
- }
-}
-
-trait Buildable2[T,U,C[_,_]] {
- def builder: mutable.Builder[(T,U),C[T,U]]
- def fromIterable(it: Traversable[(T,U)]): C[T,U] = {
- val b = builder
- b ++= it
- b.result()
- }
-}
-
-object Buildable {
- import generic.CanBuildFrom
-
- implicit def buildableCanBuildFrom[T, C[_]](implicit c: CanBuildFrom[C[_], T, C[T]]) =
- new Buildable[T, C] {
- def builder = c.apply
- }
-
- import java.util.ArrayList
- implicit def buildableArrayList[T] = new Buildable[T,ArrayList] {
- def builder = new mutable.Builder[T,ArrayList[T]] {
- val al = new ArrayList[T]
- def +=(x: T) = {
- al.add(x)
- this
- }
- def clear() = al.clear()
- def result() = al
- }
- }
-
-}
-
-object Buildable2 {
-
- implicit def buildableMutableMap[T,U] = new Buildable2[T,U,mutable.Map] {
- def builder = mutable.Map.newBuilder
- }
-
- implicit def buildableImmutableMap[T,U] = new Buildable2[T,U,immutable.Map] {
- def builder = immutable.Map.newBuilder
- }
-
- implicit def buildableMap[T,U] = new Buildable2[T,U,Map] {
- def builder = Map.newBuilder
- }
-
- implicit def buildableImmutableSortedMap[T: Ordering, U] = new Buildable2[T,U,immutable.SortedMap] {
- def builder = immutable.SortedMap.newBuilder
- }
-
- implicit def buildableSortedMap[T: Ordering, U] = new Buildable2[T,U,SortedMap] {
- def builder = SortedMap.newBuilder
- }
-
-}
diff --git a/src/partest-extras/scala/org/scalacheck/util/CmdLineParser.scala b/src/partest-extras/scala/org/scalacheck/util/CmdLineParser.scala
deleted file mode 100644
index 45b6ac6948..0000000000
--- a/src/partest-extras/scala/org/scalacheck/util/CmdLineParser.scala
+++ /dev/null
@@ -1,41 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2014 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck.util
-
-import scala.collection.Set
-import org.scalacheck.Test
-
-private[scalacheck] trait CmdLineParser {
-
- type Elem = String
-
- trait Opt[+T] {
- val default: T
- val names: Set[String]
- val help: String
- }
- trait Flag extends Opt[Unit]
- trait IntOpt extends Opt[Int]
- trait FloatOpt extends Opt[Float]
- trait StrOpt extends Opt[String]
-
- class OptMap {
- private val opts = new collection.mutable.HashMap[Opt[_], Any]
- def apply(flag: Flag): Boolean = opts.contains(flag)
- def apply[T](opt: Opt[T]): T = opts.get(opt) match {
- case None => opt.default
- case Some(v) => v.asInstanceOf[T]
- }
- def update[T](opt: Opt[T], optVal: T) = opts.update(opt, optVal)
- }
-
- val opts: Set[Opt[_]]
-
-}
diff --git a/src/partest-extras/scala/org/scalacheck/util/ConsoleReporter.scala b/src/partest-extras/scala/org/scalacheck/util/ConsoleReporter.scala
deleted file mode 100644
index 89858dfb64..0000000000
--- a/src/partest-extras/scala/org/scalacheck/util/ConsoleReporter.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2014 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck.util
-
-import Pretty.{Params, pretty, format}
-import org.scalacheck.{Prop, Properties, Test}
-
-/** A [[org.scalacheck.Test.TestCallback]] implementation that prints
- * test results directly to the console. This is the callback used
- * by ScalaCheck's command line test runner, and when you run [[org.scalacheck.Prop!.check:Unit*]]
- */
-class ConsoleReporter(val verbosity: Int) extends Test.TestCallback {
-
- private val prettyPrms = Params(verbosity)
-
- override def onTestResult(name: String, res: Test.Result) = {
- if(verbosity > 0) {
- if(name == "") {
- val s = (if(res.passed) "+ " else "! ") + pretty(res, prettyPrms)
- printf("\r%s\n", format(s, "", "", 75))
- } else {
- val s = (if(res.passed) "+ " else "! ") + name + ": " +
- pretty(res, prettyPrms)
- printf("\r%s\n", format(s, "", "", 75))
- }
- }
- }
-
-}
-
-object ConsoleReporter {
-
- /** Factory method, creates a ConsoleReporter with the
- * the given verbosity */
- def apply(verbosity: Int = 0) = new ConsoleReporter(verbosity)
-
-}
diff --git a/src/partest-extras/scala/org/scalacheck/util/FreqMap.scala b/src/partest-extras/scala/org/scalacheck/util/FreqMap.scala
deleted file mode 100644
index 2a9f36f1e5..0000000000
--- a/src/partest-extras/scala/org/scalacheck/util/FreqMap.scala
+++ /dev/null
@@ -1,65 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2014 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck.util
-
-trait FreqMap[T] {
- protected val underlying: scala.collection.immutable.Map[T,Int]
- val total: Int
-
- def +(t: T) = new FreqMap[T] {
- private val n = FreqMap.this.underlying.get(t) match {
- case None => 1
- case Some(n) => n+1
- }
- val underlying = FreqMap.this.underlying + (t -> n)
- val total = FreqMap.this.total + 1
- }
-
- def -(t: T) = new FreqMap[T] {
- val underlying = FreqMap.this.underlying.get(t) match {
- case None => FreqMap.this.underlying
- case Some(n) => FreqMap.this.underlying + (t -> (n-1))
- }
- val total = FreqMap.this.total + 1
- }
-
- def ++(fm: FreqMap[T]) = new FreqMap[T] {
- private val keys = FreqMap.this.underlying.keySet ++ fm.underlying.keySet
- private val mappings = keys.toStream.map { x =>
- (x, fm.getCount(x).getOrElse(0) + FreqMap.this.getCount(x).getOrElse(0))
- }
- val underlying = scala.collection.immutable.Map(mappings: _*)
- val total = FreqMap.this.total + fm.total
- }
-
- def --(fm: FreqMap[T]) = new FreqMap[T] {
- val underlying = FreqMap.this.underlying transform {
- case (x,n) => n - fm.getCount(x).getOrElse(0)
- }
- lazy val total = (0 /: underlying.valuesIterator) (_ + _)
- }
-
- def getCount(t: T) = underlying.get(t)
-
- def getCounts: List[(T,Int)] = underlying.toList.sortBy(-_._2)
-
- def getRatio(t: T) = for(c <- getCount(t)) yield (c: Float)/total
-
- def getRatios = for((t,c) <- getCounts) yield (t, (c: Float)/total)
-
- override def toString = underlying.toString
-}
-
-object FreqMap {
- def empty[T] = new FreqMap[T] {
- val underlying = scala.collection.immutable.Map.empty[T,Int]
- val total = 0
- }
-}
diff --git a/src/partest-extras/scala/org/scalacheck/util/Pretty.scala b/src/partest-extras/scala/org/scalacheck/util/Pretty.scala
deleted file mode 100644
index 13a1b44b51..0000000000
--- a/src/partest-extras/scala/org/scalacheck/util/Pretty.scala
+++ /dev/null
@@ -1,129 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2014 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck.util
-
-import org.scalacheck.Prop.Arg
-import org.scalacheck.Test
-
-import math.round
-
-
-sealed trait Pretty {
- def apply(prms: Pretty.Params): String
-
- def map(f: String => String) = Pretty(prms => f(Pretty.this(prms)))
-
- def flatMap(f: String => Pretty) = Pretty(prms => f(Pretty.this(prms))(prms))
-}
-
-object Pretty {
-
- case class Params(verbosity: Int)
-
- val defaultParams = Params(0)
-
- def apply(f: Params => String) = new Pretty { def apply(p: Params) = f(p) }
-
- def pretty[T <% Pretty](t: T, prms: Params): String = t(prms)
-
- def pretty[T <% Pretty](t: T): String = t(defaultParams)
-
- implicit def strBreak(s1: String) = new {
- def /(s2: String) = if(s2 == "") s1 else s1+"\n"+s2
- }
-
- def pad(s: String, c: Char, length: Int) =
- if(s.length >= length) s
- else s + List.fill(length-s.length)(c).mkString
-
- def break(s: String, lead: String, length: Int): String =
- if(s.length <= length) s
- else s.substring(0, length) / break(lead+s.substring(length), lead, length)
-
- def format(s: String, lead: String, trail: String, width: Int) =
- s.lines.map(l => break(lead+l+trail, " ", width)).mkString("\n")
-
- implicit def prettyAny(t: Any) = Pretty { p => t.toString }
-
- implicit def prettyString(t: String) = Pretty { p => "\""++t++"\"" }
-
- implicit def prettyList(l: List[Any]) = Pretty { p =>
- l.map("\""+_+"\"").mkString("List(", ", ", ")")
- }
-
- implicit def prettyThrowable(e: Throwable) = Pretty { prms =>
- val strs = e.getStackTrace.map { st =>
- import st._
- getClassName+"."+getMethodName + "("+getFileName+":"+getLineNumber+")"
- }
-
- val strs2 =
- if(prms.verbosity <= 0) Array[String]()
- else if(prms.verbosity <= 1) strs.take(5)
- else strs
-
- e.getClass.getName + ": " + e.getMessage / strs2.mkString("\n")
- }
-
- def prettyArgs(args: Seq[Arg[Any]]): Pretty = Pretty { prms =>
- if(args.isEmpty) "" else {
- for((a,i) <- args.zipWithIndex) yield {
- val l = "> "+(if(a.label == "") "ARG_"+i else a.label)
- val s =
- if(a.shrinks == 0) ""
- else "\n"+l+"_ORIGINAL: "+a.prettyOrigArg(prms)
- l+": "+a.prettyArg(prms)+""+s
- }
- }.mkString("\n")
- }
-
- implicit def prettyFreqMap(fm: FreqMap[Set[Any]]) = Pretty { prms =>
- if(fm.total == 0) ""
- else {
- "> Collected test data: " / {
- for {
- (xs,r) <- fm.getRatios
- ys = xs - (())
- if !ys.isEmpty
- } yield round(r*100)+"% " + ys.mkString(", ")
- }.mkString("\n")
- }
- }
-
- implicit def prettyTestRes(res: Test.Result) = Pretty { prms =>
- def labels(ls: collection.immutable.Set[String]) =
- if(ls.isEmpty) ""
- else "> Labels of failing property: " / ls.mkString("\n")
- val s = res.status match {
- case Test.Proved(args) => "OK, proved property."/prettyArgs(args)(prms)
- case Test.Passed => "OK, passed "+res.succeeded+" tests."
- case Test.Failed(args, l) =>
- "Falsified after "+res.succeeded+" passed tests."/labels(l)/prettyArgs(args)(prms)
- case Test.Exhausted =>
- "Gave up after only "+res.succeeded+" passed tests. " +
- res.discarded+" tests were discarded."
- case Test.PropException(args,e,l) =>
- "Exception raised on property evaluation."/labels(l)/prettyArgs(args)(prms)/
- "> Exception: "+pretty(e,prms)
- case Test.GenException(e) =>
- "Exception raised on argument generation."/
- "> Exception: "+pretty(e,prms)
- }
- val t = if(prms.verbosity <= 1) "" else "Elapsed time: "+prettyTime(res.time)
- s/t/pretty(res.freqMap,prms)
- }
-
- def prettyTime(millis: Long): String = {
- val min = millis/(60*1000)
- val sec = (millis-(60*1000*min)) / 1000d
- if(min <= 0) "%.3f sec ".format(sec)
- else "%d min %.3f sec ".format(min, sec)
- }
-}
diff --git a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
index 78f9721713..0ef52213e5 100644
--- a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
+++ b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
@@ -100,7 +100,7 @@ trait BaseTypeSeqs {
def copy(head: Type, offset: Int): BaseTypeSeq = {
val arr = new Array[Type](elems.length + offset)
- scala.compat.Platform.arraycopy(elems, 0, arr, offset, elems.length)
+ java.lang.System.arraycopy(elems, 0, arr, offset, elems.length)
arr(0) = head
newBaseTypeSeq(parents, arr)
}
diff --git a/src/reflect/scala/reflect/internal/Mirrors.scala b/src/reflect/scala/reflect/internal/Mirrors.scala
index 3d1c160d52..6b1063ccd9 100644
--- a/src/reflect/scala/reflect/internal/Mirrors.scala
+++ b/src/reflect/scala/reflect/internal/Mirrors.scala
@@ -91,7 +91,6 @@ trait Mirrors extends api.Mirrors {
private def ensureClassSymbol(fullname: String, sym: Symbol): ClassSymbol = {
var result = sym
- while (result.isAliasType) result = result.info.typeSymbol
result match {
case x: ClassSymbol => x
case _ => MissingRequirementError.notFound("class " + fullname)
@@ -212,27 +211,6 @@ trait Mirrors extends api.Mirrors {
try body
catch { case _: MissingRequirementError => NoSymbol }
- /** getModule2/getClass2 aren't needed at present but may be again,
- * so for now they're mothballed.
- */
- // def getModule2(name1: Name, name2: Name) = {
- // try getModuleOrClass(name1.toTermName)
- // catch { case ex1: FatalError =>
- // try getModuleOrClass(name2.toTermName)
- // catch { case ex2: FatalError => throw ex1 }
- // }
- // }
- // def getClass2(name1: Name, name2: Name) = {
- // try {
- // val result = getModuleOrClass(name1.toTypeName)
- // if (result.isAliasType) getClass(name2) else result
- // }
- // catch { case ex1: FatalError =>
- // try getModuleOrClass(name2.toTypeName)
- // catch { case ex2: FatalError => throw ex1 }
- // }
- // }
-
def init() {
if (initialized) return
// Still fiddling with whether it's cleaner to do some of this setup here
diff --git a/src/reflect/scala/reflect/internal/Names.scala b/src/reflect/scala/reflect/internal/Names.scala
index 97f51149ba..9d39ef8b42 100644
--- a/src/reflect/scala/reflect/internal/Names.scala
+++ b/src/reflect/scala/reflect/internal/Names.scala
@@ -68,7 +68,7 @@ trait Names extends api.Names {
while (i < len) {
if (nc + i == chrs.length) {
val newchrs = new Array[Char](chrs.length * 2)
- scala.compat.Platform.arraycopy(chrs, 0, newchrs, 0, chrs.length)
+ java.lang.System.arraycopy(chrs, 0, newchrs, 0, chrs.length)
chrs = newchrs
}
chrs(nc + i) = cs(offset + i)
@@ -220,7 +220,7 @@ trait Names extends api.Names {
/** Copy bytes of this name to buffer cs, starting at position `offset`. */
final def copyChars(cs: Array[Char], offset: Int) =
- scala.compat.Platform.arraycopy(chrs, index, cs, offset, len)
+ java.lang.System.arraycopy(chrs, index, cs, offset, len)
/** @return the ascii representation of this name */
final def toChars: Array[Char] = { // used by ide
diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala
index f870ecfc15..8d77e334db 100644
--- a/src/reflect/scala/reflect/internal/Symbols.scala
+++ b/src/reflect/scala/reflect/internal/Symbols.scala
@@ -302,9 +302,9 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def newClassConstructor(pos: Position): MethodSymbol =
newConstructor(pos) setInfo MethodType(Nil, this.tpe)
- def newLinkedModule(clazz: Symbol, newFlags: Long = 0L): ModuleSymbol = {
- val m = newModuleSymbol(clazz.name.toTermName, clazz.pos, MODULE | newFlags)
- connectModuleToClass(m, clazz.asInstanceOf[ClassSymbol])
+ def newLinkedModule(moduleClass: Symbol, newFlags: Long = 0L): ModuleSymbol = {
+ val m = newModuleSymbol(moduleClass.name.toTermName, moduleClass.pos, MODULE | newFlags)
+ connectModuleToClass(m, moduleClass.asInstanceOf[ClassSymbol])
}
final def newModule(name: TermName, pos: Position = NoPosition, newFlags0: Long = 0L): ModuleSymbol = {
val newFlags = newFlags0 | MODULE
@@ -1063,7 +1063,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
// parent LowPriorityImplicits. See comment in c5441dc for more elaboration.
// Since the fix for SI-7335 Predef parents must be defined in Predef.scala, and we should not
// get here anymore.
- devWarning(s"calling Symbol#exists with sourcefile based symbol loader may give incorrect results.");
+ devWarning(s"calling Symbol#exists with sourcefile based symbol loader may give incorrect results.")
}
rawInfo load this
@@ -2223,7 +2223,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* to the class. As presently implemented this potentially returns class for
* any symbol except NoSymbol.
*/
- def companionClass: Symbol = flatOwnerInfo.decl(name.toTypeName).suchThat(_ isCoDefinedWith this)
+ def companionClass: Symbol = flatOwnerInfo.decl(name.toTypeName).suchThat(d => d.isClass && d.isCoDefinedWith(this))
/** For a class: the module or case class factory with the same name in the same package.
* For all others: NoSymbol
@@ -2860,8 +2860,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def associatedFile_=(f: AbstractFile) { moduleClass.associatedFile = f }
override def moduleClass = referenced
- override def companionClass =
- flatOwnerInfo.decl(name.toTypeName).suchThat(sym => sym.isClass && (sym isCoDefinedWith this))
override def owner = {
if (Statistics.hotEnabled) Statistics.incCounter(ownerCount)
diff --git a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
index c6cb0d0223..6dea184826 100644
--- a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
+++ b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
@@ -17,6 +17,7 @@ import PickleFormat._
import scala.collection.mutable
import scala.collection.mutable.ListBuffer
import scala.annotation.switch
+import scala.util.control.NonFatal
/** @author Martin Odersky
* @version 1.0
@@ -29,25 +30,22 @@ abstract class UnPickler {
* from an array of bytes.
* @param bytes bytearray from which we unpickle
* @param offset offset from which unpickling starts
- * @param classRoot the top-level class which is unpickled, or NoSymbol if inapplicable
- * @param moduleRoot the top-level module which is unpickled, or NoSymbol if inapplicable
+ * @param classRoot the top-level class which is unpickled
+ * @param moduleRoot the top-level module which is unpickled
* @param filename filename associated with bytearray, only used for error messages
*/
- def unpickle(bytes: Array[Byte], offset: Int, classRoot: Symbol, moduleRoot: Symbol, filename: String) {
+ def unpickle(bytes: Array[Byte], offset: Int, classRoot: ClassSymbol, moduleRoot: ModuleSymbol, filename: String) {
try {
+ assert(classRoot != NoSymbol && moduleRoot != NoSymbol, s"The Unpickler expects a class and module symbol: $classRoot - $moduleRoot")
new Scan(bytes, offset, classRoot, moduleRoot, filename).run()
} catch {
- case ex: IOException =>
- throw ex
- case ex: MissingRequirementError =>
- throw ex
- case ex: Throwable =>
+ case NonFatal(ex) =>
/*if (settings.debug.value)*/ ex.printStackTrace()
throw new RuntimeException("error reading Scala signature of "+filename+": "+ex.getMessage())
}
}
- class Scan(_bytes: Array[Byte], offset: Int, classRoot: Symbol, moduleRoot: Symbol, filename: String) extends PickleBuffer(_bytes, offset, -1) {
+ class Scan(_bytes: Array[Byte], offset: Int, classRoot: ClassSymbol, moduleRoot: ModuleSymbol, filename: String) extends PickleBuffer(_bytes, offset, -1) {
//println("unpickle " + classRoot + " and " + moduleRoot)//debug
protected def debug = settings.debug.value
@@ -293,10 +291,11 @@ abstract class UnPickler {
case Right(sym) => sym -> readNat()
}
- def isModuleFlag = (flags & MODULE) != 0L
- def isClassRoot = (name == classRoot.name) && (owner == classRoot.owner)
- def isModuleRoot = (name == moduleRoot.name) && (owner == moduleRoot.owner)
- def pflags = flags & PickledFlags
+ def isModuleFlag = (flags & MODULE) != 0L
+ def isClassRoot = (name == classRoot.name) && (owner == classRoot.owner)
+ def isModuleRoot = (name == moduleRoot.name) && (owner == moduleRoot.owner)
+ def isModuleClassRoot = (name == moduleRoot.name.toTypeName) && (owner == moduleRoot.owner)
+ def pflags = flags & PickledFlags
def finishSym(sym: Symbol): Symbol = {
/**
@@ -341,22 +340,22 @@ abstract class UnPickler {
finishSym(tag match {
case TYPEsym | ALIASsym =>
owner.newNonClassSymbol(name.toTypeName, NoPosition, pflags)
+
case CLASSsym =>
- val sym = (
- if (isClassRoot) {
- if (isModuleFlag) moduleRoot.moduleClass setFlag pflags
- else classRoot setFlag pflags
- }
+ val sym = {
+ if (isModuleFlag && isModuleClassRoot) moduleRoot.moduleClass setFlag pflags
+ else if (!isModuleFlag && isClassRoot) classRoot setFlag pflags
else owner.newClassSymbol(name.toTypeName, NoPosition, pflags)
- )
+ }
if (!atEnd)
sym.typeOfThis = newLazyTypeRef(readNat())
-
sym
+
case MODULEsym =>
- val clazz = at(inforef, () => readType()).typeSymbol // after NMT_TRANSITION, we can leave off the () => ... ()
+ val moduleClass = at(inforef, () => readType()).typeSymbol // after NMT_TRANSITION, we can leave off the () => ... ()
if (isModuleRoot) moduleRoot setFlag pflags
- else owner.newLinkedModule(clazz, pflags)
+ else owner.newLinkedModule(moduleClass, pflags)
+
case VALsym =>
if (isModuleRoot) { abort(s"VALsym at module root: owner = $owner, name = $name") }
else owner.newTermSymbol(name.toTermName, NoPosition, pflags)
diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
index 9b0d66f41c..95440ebc00 100644
--- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala
+++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
@@ -578,7 +578,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive
* @param jclazz The Java class which contains the unpickled information in a
* ScalaSignature or ScalaLongSignature annotation.
*/
- def unpickleClass(clazz: Symbol, module: Symbol, jclazz: jClass[_]): Unit = {
+ def unpickleClass(clazz: ClassSymbol, module: ModuleSymbol, jclazz: jClass[_]): Unit = {
def markAbsent(tpe: Type) = setAllInfos(clazz, module, tpe)
def handleError(ex: Exception) = {
markAbsent(ErrorType)
diff --git a/src/reflect/scala/reflect/runtime/SymbolLoaders.scala b/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
index 768a3d5ce5..3f2864ee7b 100644
--- a/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
+++ b/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
@@ -14,7 +14,7 @@ private[reflect] trait SymbolLoaders { self: SymbolTable =>
* by unpickling information from the corresponding Java class. If no Java class
* is found, a package is created instead.
*/
- class TopClassCompleter(clazz: Symbol, module: Symbol) extends SymLoader with FlagAssigningCompleter {
+ class TopClassCompleter(clazz: ClassSymbol, module: ModuleSymbol) extends SymLoader with FlagAssigningCompleter {
markFlagsCompleted(clazz, module)(mask = ~TopLevelPickledFlags)
override def complete(sym: Symbol) = {
debugInfo("completing "+sym+"/"+clazz.fullName)
@@ -36,7 +36,7 @@ private[reflect] trait SymbolLoaders { self: SymbolTable =>
* @param name The simple name of the newly created class
* @param completer The completer to be used to set the info of the class and the module
*/
- protected def initAndEnterClassAndModule(owner: Symbol, name: TypeName, completer: (Symbol, Symbol) => LazyType) = {
+ protected def initAndEnterClassAndModule(owner: Symbol, name: TypeName, completer: (ClassSymbol, ModuleSymbol) => LazyType) = {
assert(!(name.toString endsWith "[]"), name)
val clazz = owner.newClass(name)
val module = owner.newModule(name.toTermName)
diff --git a/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala b/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala
index d8ec7b18fd..4e99434051 100644
--- a/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala
@@ -101,12 +101,26 @@ trait ScaladocAnalyzer extends Analyzer {
abstract class ScaladocSyntaxAnalyzer[G <: Global](val global: G) extends SyntaxAnalyzer {
import global._
- class ScaladocUnitScanner(unit0: CompilationUnit, patches0: List[BracePatch]) extends UnitScanner(unit0, patches0) {
-
- private var docBuffer: StringBuilder = null // buffer for comments (non-null while scanning)
- private var inDocComment = false // if buffer contains double-star doc comment
- private var lastDoc: DocComment = null // last comment if it was double-star doc
+ trait ScaladocScanner extends DocScanner {
+ // When `docBuffer == null`, we're not in a doc comment.
+ private var docBuffer: StringBuilder = null
+
+ override protected def beginDocComment(prefix: String): Unit =
+ if (docBuffer == null) docBuffer = new StringBuilder(prefix)
+
+ protected def ch: Char
+ override protected def processCommentChar(): Unit =
+ if (docBuffer != null) docBuffer append ch
+
+ protected def docPosition: Position
+ override protected def finishDocComment(): Unit =
+ if (docBuffer != null) {
+ registerDocComment(docBuffer.toString, docPosition)
+ docBuffer = null
+ }
+ }
+ class ScaladocUnitScanner(unit0: CompilationUnit, patches0: List[BracePatch]) extends UnitScanner(unit0, patches0) with ScaladocScanner {
private object unmooredParser extends { // minimalist comment parser
val global: Global = ScaladocSyntaxAnalyzer.this.global
}
@@ -148,40 +162,7 @@ abstract class ScaladocSyntaxAnalyzer[G <: Global](val global: G) extends Syntax
reporter.warning(doc.pos, "discarding unmoored doc comment")
}
- override def flushDoc(): DocComment = (try lastDoc finally lastDoc = null)
-
- override protected def putCommentChar() {
- if (inDocComment)
- docBuffer append ch
-
- nextChar()
- }
- override def skipDocComment(): Unit = {
- inDocComment = true
- docBuffer = new StringBuilder("/**")
- super.skipDocComment()
- }
- override def skipBlockComment(): Unit = {
- inDocComment = false // ??? this means docBuffer won't receive contents of this comment???
- docBuffer = new StringBuilder("/*")
- super.skipBlockComment()
- }
- override def skipComment(): Boolean = {
- // emit a block comment; if it's double-star, make Doc at this pos
- def foundStarComment(start: Int, end: Int) = try {
- val str = docBuffer.toString
- val pos = Position.range(unit.source, start, start, end)
- if (inDocComment) {
- signalParsedDocComment(str, pos)
- lastDoc = DocComment(str, pos)
- }
- true
- } finally {
- docBuffer = null
- inDocComment = false
- }
- super.skipComment() && ((docBuffer eq null) || foundStarComment(offset, charOffset - 2))
- }
+ protected def docPosition: Position = Position.range(unit.source, offset, offset, charOffset - 2)
}
class ScaladocUnitParser(unit: CompilationUnit, patches: List[BracePatch]) extends UnitParser(unit, patches) {
override def newScanner() = new ScaladocUnitScanner(unit, patches)
@@ -214,52 +195,17 @@ abstract class ScaladocSyntaxAnalyzer[G <: Global](val global: G) extends Syntax
}
}
- class ScaladocJavaUnitScanner(unit: CompilationUnit) extends JavaUnitScanner(unit) {
-
- private var docBuffer: StringBuilder = _
- private var inDocComment = false
+ class ScaladocJavaUnitScanner(unit: CompilationUnit) extends JavaUnitScanner(unit) with ScaladocScanner {
private var docStart: Int = 0
- private var lastDoc: DocComment = null
-
- override def init() = {
- docBuffer = new StringBuilder
- super.init()
- }
-
- // get last doc comment
- def flushDoc(): DocComment = try lastDoc finally lastDoc = null
-
- override protected def putCommentChar(): Unit = {
- if (inDocComment) docBuffer append in.ch
- in.next
- }
- override protected def skipBlockComment(isDoc: Boolean): Unit = {
- // condition is true when comment is entered the first time,
- // i.e. immediately after "/*" and when current character is "*"
- if (!inDocComment && isDoc) {
- docBuffer append "/*"
- docStart = currentPos.start
- inDocComment = true
- }
- super.skipBlockComment(isDoc)
+ override protected def beginDocComment(prefix: String): Unit = {
+ super.beginDocComment(prefix)
+ docStart = currentPos.start
}
- override protected def skipComment(): Boolean = {
- val skipped = super.skipComment()
- if (skipped && inDocComment) {
- val raw = docBuffer.toString
- val position = Position.range(unit.source, docStart, docStart, in.cpos)
- lastDoc = DocComment(raw, position)
- signalParsedDocComment(raw, position)
- docBuffer.setLength(0) // clear buffer
- inDocComment = false
- true
- } else {
- skipped
- }
- }
+ protected def ch = in.ch
+ override protected def docPosition = Position.range(unit.source, docStart, docStart, in.cpos)
}
class ScaladocJavaUnitParser(unit: CompilationUnit) extends {