summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/compiler/scala/reflect/reify/Errors.scala5
-rw-r--r--src/compiler/scala/reflect/reify/Reifier.scala14
-rw-r--r--src/compiler/scala/reflect/reify/utils/NodePrinters.scala39
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala4
-rw-r--r--src/compiler/scala/tools/nsc/ast/NodePrinters.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/Printers.scala (renamed from src/compiler/scala/tools/nsc/ast/TreePrinters.scala)6
-rw-r--r--src/compiler/scala/tools/nsc/javac/JavaParsers.scala3
-rw-r--r--src/compiler/scala/tools/nsc/matching/MatchSupport.scala2
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaSettings.scala2
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala12
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala4
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala19
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Analyzer.scala7
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala16
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala17
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Duplicators.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala234
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala3
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Macros.scala15
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Modes.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala39
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala11
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala156
-rw-r--r--src/compiler/scala/tools/nsc/util/Statistics.scala200
-rw-r--r--src/compiler/scala/tools/nsc/util/StatisticsInfo.scala38
-rw-r--r--src/compiler/scala/tools/reflect/ToolBoxFactory.scala5
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala19
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala40
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala15
-rw-r--r--src/library/scala/Predef.scala52
-rw-r--r--src/library/scala/collection/GenTraversableOnce.scala19
-rw-r--r--src/library/scala/collection/Iterator.scala3
-rw-r--r--src/library/scala/collection/MapLike.scala28
-rw-r--r--src/library/scala/collection/TraversableLike.scala7
-rw-r--r--src/library/scala/collection/TraversableOnce.scala17
-rw-r--r--src/library/scala/collection/concurrent/TrieMap.scala6
-rw-r--r--src/library/scala/collection/immutable/Vector.scala2
-rw-r--r--src/library/scala/collection/mutable/ArrayOps.scala2
-rw-r--r--src/library/scala/collection/mutable/LinkedHashMap.scala21
-rw-r--r--src/library/scala/collection/parallel/ParIterableLike.scala6
-rw-r--r--src/library/scala/collection/parallel/immutable/ParVector.scala2
-rw-r--r--src/library/scala/concurrent/impl/Future.scala106
-rw-r--r--src/library/scala/concurrent/impl/Promise.scala25
-rw-r--r--src/library/scala/reflect/base/Base.scala2
-rw-r--r--src/library/scala/reflect/base/Trees.scala4
-rw-r--r--src/library/scala/util/control/Breaks.scala2
-rw-r--r--src/reflect/scala/reflect/api/Printers.scala94
-rw-r--r--src/reflect/scala/reflect/api/Symbols.scala36
-rw-r--r--src/reflect/scala/reflect/api/TreePrinters.scala87
-rw-r--r--src/reflect/scala/reflect/api/Universe.scala2
-rw-r--r--src/reflect/scala/reflect/internal/BaseTypeSeqs.scala12
-rw-r--r--src/reflect/scala/reflect/internal/Definitions.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Printers.scala (renamed from src/reflect/scala/reflect/internal/TreePrinters.scala)176
-rw-r--r--src/reflect/scala/reflect/internal/SymbolTable.scala4
-rw-r--r--src/reflect/scala/reflect/internal/Symbols.scala18
-rw-r--r--src/reflect/scala/reflect/internal/Trees.scala12
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala86
-rw-r--r--src/reflect/scala/reflect/internal/util/StatBase.scala97
-rw-r--r--src/reflect/scala/reflect/internal/util/Statistics.scala261
-rw-r--r--src/reflect/scala/reflect/makro/Universe.scala19
-rw-r--r--src/reflect/scala/reflect/runtime/JavaUniverse.scala6
-rw-r--r--src/scalacheck/org/scalacheck/Arbitrary.scala36
-rw-r--r--src/scalacheck/org/scalacheck/Arg.scala2
-rw-r--r--src/scalacheck/org/scalacheck/Commands.scala23
-rw-r--r--src/scalacheck/org/scalacheck/ConsoleReporter.scala27
-rw-r--r--src/scalacheck/org/scalacheck/Gen.scala50
-rw-r--r--src/scalacheck/org/scalacheck/Pretty.scala4
-rw-r--r--src/scalacheck/org/scalacheck/Prop.scala36
-rw-r--r--src/scalacheck/org/scalacheck/Properties.scala2
-rw-r--r--src/scalacheck/org/scalacheck/Shrink.scala2
-rw-r--r--src/scalacheck/org/scalacheck/Test.scala191
-rw-r--r--src/scalacheck/org/scalacheck/util/Buildable.scala4
-rw-r--r--src/scalacheck/org/scalacheck/util/CmdLineParser.scala9
-rw-r--r--src/scalacheck/org/scalacheck/util/FreqMap.scala2
-rw-r--r--src/scalacheck/org/scalacheck/util/StdRand.scala2
75 files changed, 1441 insertions, 1098 deletions
diff --git a/src/compiler/scala/reflect/reify/Errors.scala b/src/compiler/scala/reflect/reify/Errors.scala
index 714795503b..1b72b3075b 100644
--- a/src/compiler/scala/reflect/reify/Errors.scala
+++ b/src/compiler/scala/reflect/reify/Errors.scala
@@ -17,11 +17,6 @@ trait Errors {
// expected errors: these can happen if the user casually writes whatever.reify(...)
// hence we don't crash here, but nicely report a typechecking error and bail out asap
- def CannotReifyReifeeThatHasTypeLocalToReifee(tree: Tree) = {
- val msg = "implementation restriction: cannot reify block of type %s that involves a type declared inside the block being reified. consider casting the return value to a suitable type".format(tree.tpe)
- throw new ReificationError(tree.pos, msg)
- }
-
def CannotReifyType(tpe: Type) = {
val msg = "implementation restriction: cannot reify type %s (%s)".format(tpe, tpe.kind)
throw new ReificationError(defaultErrorPosition, msg)
diff --git a/src/compiler/scala/reflect/reify/Reifier.scala b/src/compiler/scala/reflect/reify/Reifier.scala
index 00f25f0d8b..8fba7274be 100644
--- a/src/compiler/scala/reflect/reify/Reifier.scala
+++ b/src/compiler/scala/reflect/reify/Reifier.scala
@@ -68,20 +68,6 @@ abstract class Reifier extends States
val pipeline = mkReificationPipeline
val rtree = pipeline(tree)
- // consider the following code snippet
- //
- // val x = reify { class C; new C }
- //
- // inferred type for x will be C
- // but C ceases to exist after reification so this type is clearly incorrect
- // however, reify is "just" a library function, so it cannot affect type inference
- //
- // hence we crash here even though the reification itself goes well
- // fortunately, all that it takes to fix the error is to cast "new C" to Object
- // so I'm not very much worried about introducing this restriction
- if (tree.tpe exists (sub => sub.typeSymbol.isLocalToReifee))
- CannotReifyReifeeThatHasTypeLocalToReifee(tree)
-
val tpe = typer.packedType(tree, NoSymbol)
val ReifiedType(_, _, tpeSymtab, _, rtpe, tpeReificationIsConcrete) = `package`.reifyType(global)(typer, universe, mirror, tpe, concrete = false)
state.reificationIsConcrete &= tpeReificationIsConcrete
diff --git a/src/compiler/scala/reflect/reify/utils/NodePrinters.scala b/src/compiler/scala/reflect/reify/utils/NodePrinters.scala
index ce0ab2196a..7214da597e 100644
--- a/src/compiler/scala/reflect/reify/utils/NodePrinters.scala
+++ b/src/compiler/scala/reflect/reify/utils/NodePrinters.scala
@@ -15,41 +15,6 @@ trait NodePrinters {
import Flag._
object reifiedNodeToString extends (Tree => String) {
- // [Eugene++ to Martin] can we do better?
- // didn't want to invent anything myself in order not to interfere with your line of thought
- def bitsToFlags(bits: String): String = {
- val flags = bits.toLong
- if (flags == NoFlags) nme.NoFlags.toString
- else {
- val s_flags = new collection.mutable.ListBuffer[String]
- if (flags containsAll TRAIT) s_flags += "TRAIT"
- if (flags containsAll MODULE) s_flags += "MODULE"
- if (flags containsAll MUTABLE) s_flags += "MUTABLE"
- if (flags containsAll PACKAGE) s_flags += "PACKAGE"
- if (flags containsAll METHOD) s_flags += "METHOD"
- if (flags containsAll DEFERRED) s_flags += "DEFERRED"
- if (flags containsAll ABSTRACT) s_flags += "ABSTRACT"
- if (flags containsAll FINAL) s_flags += "FINAL"
- if (flags containsAll SEALED) s_flags += "SEALED"
- if (flags containsAll IMPLICIT) s_flags += "IMPLICIT"
- if (flags containsAll LAZY) s_flags += "LAZY"
- if (flags containsAll OVERRIDE) s_flags += "OVERRIDE"
- if (flags containsAll PRIVATE) s_flags += "PRIVATE"
- if (flags containsAll PROTECTED) s_flags += "PROTECTED"
- if (flags containsAll CASE) s_flags += "CASE"
- if (flags containsAll ABSOVERRIDE) s_flags += "ABSOVERRIDE"
- if (flags containsAll BYNAMEPARAM) s_flags += "BYNAMEPARAM"
- if (flags containsAll PARAM) s_flags += "PARAM"
- if (flags containsAll PARAMACCESSOR) s_flags += "PARAMACCESSOR"
- if (flags containsAll CASEACCESSOR) s_flags += "CASEACCESSOR"
- if (flags containsAll COVARIANT) s_flags += "COVARIANT"
- if (flags containsAll CONTRAVARIANT) s_flags += "CONTRAVARIANT"
- if (flags containsAll DEFAULTPARAM) s_flags += "DEFAULTPARAM"
- if (flags containsAll INTERFACE) s_flags += "INTERFACE"
- s_flags mkString " | "
- }
- }
-
def apply(tree: Tree): String = {
var mirrorIsUsed = false
var flagsAreUsed = false
@@ -70,7 +35,7 @@ trait NodePrinters {
s = s.replace("immutable.this.Nil", "List()")
s = """build\.flagsFromBits\((\d+)[lL]\)""".r.replaceAllIn(s, m => {
flagsAreUsed = true
- bitsToFlags(m.group(1))
+ show(m.group(1).toLong)
})
s = s.replace("Modifiers(0L, newTypeName(\"\"), List())", "Modifiers()")
s = """Modifiers\((\d+)[lL], newTypeName\("(.*?)"\), List\((.*?)\)\)""".r.replaceAllIn(s, m => {
@@ -87,7 +52,7 @@ trait NodePrinters {
val bits = m.group(1)
if (buf.nonEmpty || bits != "0L") {
flagsAreUsed = true
- buf.append(bitsToFlags(bits))
+ buf.append(show(bits.toLong))
}
val replacement = "Modifiers(" + buf.reverse.mkString(", ") + ")"
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index a701e41153..35bf2dd288 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -12,7 +12,7 @@ import scala.tools.util.PathResolver
import scala.collection.{ mutable, immutable }
import io.{ SourceReader, AbstractFile, Path }
import reporters.{ Reporter, ConsoleReporter }
-import util.{ Exceptional, ClassPath, MergedClassPath, Statistics, StatisticsInfo, ScalaClassLoader, returning }
+import util.{ Exceptional, ClassPath, MergedClassPath, StatisticsInfo, ScalaClassLoader, returning }
import scala.reflect.internal.util.{ NoPosition, SourceFile, NoSourceFile, BatchSourceFile, ScriptSourceFile }
import scala.reflect.internal.pickling.{ PickleBuffer, PickleFormat }
import settings.{ AestheticSettings }
@@ -39,7 +39,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
with Plugins
with PhaseAssembly
with Trees
- with TreePrinters
+ with Printers
with DocComments
with Positions { self =>
diff --git a/src/compiler/scala/tools/nsc/ast/NodePrinters.scala b/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
index ba1f3b2e3c..4afd3545b9 100644
--- a/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
+++ b/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
@@ -146,7 +146,7 @@ abstract class NodePrinters {
}
def printModifiers(tree: MemberDef) {
// [Eugene++] there's most likely a bug here (?)
- // see `TreePrinters.printAnnotations` for more information
+ // see `Printers.printAnnotations` for more information
val annots0 = tree.symbol.annotations match {
case Nil => tree.mods.annotations
case xs => xs map annotationInfoToString
diff --git a/src/compiler/scala/tools/nsc/ast/TreePrinters.scala b/src/compiler/scala/tools/nsc/ast/Printers.scala
index 3371353f25..94d0c4f45e 100644
--- a/src/compiler/scala/tools/nsc/ast/TreePrinters.scala
+++ b/src/compiler/scala/tools/nsc/ast/Printers.scala
@@ -10,7 +10,7 @@ import java.io.{ OutputStream, PrintWriter, StringWriter, Writer }
import symtab.Flags._
import symtab.SymbolTable
-trait TreePrinters extends reflect.internal.TreePrinters { this: Global =>
+trait Printers extends reflect.internal.Printers { this: Global =>
import treeInfo.{ IsTrue, IsFalse }
@@ -276,8 +276,8 @@ trait TreePrinters extends reflect.internal.TreePrinters { this: Global =>
}
}
- def asString(t: Tree): String = show(t, newStandardTreePrinter)
- def asCompactString(t: Tree): String = show(t, newCompactTreePrinter)
+ def asString(t: Tree): String = render(t, newStandardTreePrinter, settings.printtypes.value, settings.uniqid.value, settings.Yshowsymkinds.value)
+ def asCompactString(t: Tree): String = render(t, newCompactTreePrinter, settings.printtypes.value, settings.uniqid.value, settings.Yshowsymkinds.value)
def newStandardTreePrinter(writer: PrintWriter): TreePrinter = new TreePrinter(writer)
def newStandardTreePrinter(stream: OutputStream): TreePrinter = newStandardTreePrinter(new PrintWriter(stream))
diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
index e2203e07b3..3797d32d8b 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
@@ -911,7 +911,8 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
buf ++= importDecl()
while (in.token != EOF && in.token != RBRACE) {
while (in.token == SEMI) in.nextToken
- buf ++= typeDecl(modifiers(false))
+ if (in.token != EOF)
+ buf ++= typeDecl(modifiers(false))
}
accept(EOF)
atPos(pos) {
diff --git a/src/compiler/scala/tools/nsc/matching/MatchSupport.scala b/src/compiler/scala/tools/nsc/matching/MatchSupport.scala
index 72e6f32af1..16761144d7 100644
--- a/src/compiler/scala/tools/nsc/matching/MatchSupport.scala
+++ b/src/compiler/scala/tools/nsc/matching/MatchSupport.scala
@@ -7,7 +7,7 @@ package scala.tools.nsc
package matching
import transform.ExplicitOuter
-import ast.{ TreePrinters, Trees }
+import ast.{ Printers, Trees }
import java.io.{ StringWriter, PrintWriter }
import annotation.elidable
import language.postfixOps
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
index 04acba06e8..9b223a13ba 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
@@ -160,7 +160,7 @@ trait ScalaSettings extends AbsScalaSettings
val Ygenjavap = StringSetting ("-Ygen-javap", "dir", "Generate a parallel output directory of .javap files.", "")
val Ydumpclasses = StringSetting ("-Ydump-classes", "dir", "Dump the generated bytecode to .class files (useful for reflective compilation that utilizes in-memory classloaders).", "")
val Ynosqueeze = BooleanSetting ("-Yno-squeeze", "Disable creation of compact code in matching.")
- val Ystatistics = BooleanSetting ("-Ystatistics", "Print compiler statistics.") andThen (util.Statistics.enabled = _)
+ val Ystatistics = BooleanSetting ("-Ystatistics", "Print compiler statistics.") andThen (scala.reflect.internal.util.Statistics.enabled = _)
val stopAfter = PhasesSetting ("-Ystop-after", "Stop after") withAbbreviation ("-stop") // backward compat
val stopBefore = PhasesSetting ("-Ystop-before", "Stop before")
val refinementMethodDispatch
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
index 15edac16d5..0c988ceae4 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
@@ -12,7 +12,7 @@ import scala.tools.nsc.util.{ ClassPath }
import classfile.ClassfileParser
import reflect.internal.Flags._
import reflect.internal.MissingRequirementError
-import util.Statistics._
+import reflect.internal.util.Statistics
import scala.tools.nsc.io.{ AbstractFile, MsilFile }
/** This class ...
@@ -23,6 +23,7 @@ import scala.tools.nsc.io.{ AbstractFile, MsilFile }
abstract class SymbolLoaders {
val global: Global
import global._
+ import SymbolLoadersStats._
protected def enterIfNew(owner: Symbol, member: Symbol, completer: SymbolLoader): Symbol = {
assert(owner.info.decls.lookup(member.name) == NoSymbol, owner.fullName + "." + member.name)
@@ -236,7 +237,7 @@ abstract class SymbolLoaders {
protected def description = "class file "+ classfile.toString
protected def doComplete(root: Symbol) {
- val start = startTimer(classReadNanos)
+ val start = Statistics.startTimer(classReadNanos)
classfileParser.parse(classfile, root)
if (root.associatedFile eq null) {
root match {
@@ -248,7 +249,7 @@ abstract class SymbolLoaders {
debuglog("Not setting associatedFile to %s because %s is a %s".format(classfile, root.name, root.shortSymbolClass))
}
}
- stopTimer(classReadNanos, start)
+ Statistics.stopTimer(classReadNanos, start)
}
override def sourcefile: Option[AbstractFile] = classfileParser.srcfile
}
@@ -284,3 +285,8 @@ abstract class SymbolLoaders {
var parentsLevel = 0
var pendingLoadActions: List[() => Unit] = Nil
}
+
+object SymbolLoadersStats {
+ import reflect.internal.TypesStats.typerNanos
+ val classReadNanos = Statistics.newSubTimer ("time classfilereading", typerNanos)
+}
diff --git a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
index 1c97eaad8b..5f66cadbc9 100644
--- a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
@@ -107,7 +107,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
def extensionMethInfo(extensionMeth: Symbol, origInfo: Type, clazz: Symbol): Type = {
var newTypeParams = cloneSymbolsAtOwner(clazz.typeParams, extensionMeth)
- val thisParamType = appliedType(clazz.typeConstructor, newTypeParams map (_.tpe))
+ val thisParamType = appliedType(clazz.typeConstructor, newTypeParams map (_.tpeHK))
val thisParam = extensionMeth.newValueParameter(nme.SELF, extensionMeth.pos) setInfo thisParamType
def transform(clonedType: Type): Type = clonedType match {
case MethodType(params, restpe) =>
@@ -159,7 +159,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
.changeOwner((origMeth, extensionMeth))
extensionDefs(companion) += atPos(tree.pos) { DefDef(extensionMeth, extensionBody) }
val extensionCallPrefix = Apply(
- gen.mkTypeApply(gen.mkAttributedRef(companion), extensionMeth, origTpeParams map (_.tpe)),
+ gen.mkTypeApply(gen.mkAttributedRef(companion), extensionMeth, origTpeParams map (_.tpeHK)),
List(This(currentOwner)))
val extensionCall = atOwner(origMeth) {
localTyper.typedPos(rhs.pos) {
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index 4b488a6437..c4c769d7cf 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -1448,20 +1448,29 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case ddef @ DefDef(_, _, _, vparamss, _, _) if info.isDefinedAt(symbol) =>
// log("--> method: " + ddef + " in " + ddef.symbol.owner + ", " + info(symbol))
+ def reportTypeError(body: =>Tree) =
+ try body
+ catch {
+ case te: TypeError =>
+ reporter.error(te.pos, te.toString)
+ ddef
+ }
if (symbol.isConstructor) {
val t = atOwner(symbol)(forwardCtorCall(tree.pos, gen.mkSuperSelect, vparamss, symbol.owner))
if (symbol.isPrimaryConstructor)
localTyper.typedPos(symbol.pos)(deriveDefDef(tree)(_ => Block(List(t), Literal(Constant()))))
- else // duplicate the original constructor
- duplicateBody(ddef, info(symbol).target)
+ else // duplicate the original constructor
+ reportTypeError(duplicateBody(ddef, info(symbol).target))
}
else info(symbol) match {
case Implementation(target) =>
assert(body.isDefinedAt(target), "sym: " + symbol.fullName + " target: " + target.fullName)
// we have an rhs, specialize it
- val tree1 = duplicateBody(ddef, target)
+ val tree1 = reportTypeError {
+ duplicateBody(ddef, target)
+ }
debuglog("implementation: " + tree1)
deriveDefDef(tree1)(transform)
@@ -1472,7 +1481,9 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
else {
// we have an rhs, specialize it
- val tree1 = duplicateBody(ddef, target)
+ val tree1 = reportTypeError {
+ duplicateBody(ddef, target)
+ }
debuglog("implementation: " + tree1)
deriveDefDef(tree1)(transform)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
index a77df71312..2077ab0997 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
@@ -6,7 +6,7 @@
package scala.tools.nsc
package typechecker
-import util.Statistics._
+import reflect.internal.util.Statistics
/** The main attribution phase.
*/
@@ -72,6 +72,7 @@ trait Analyzer extends AnyRef
}
object typerFactory extends SubComponent {
+ import reflect.internal.TypesStats.typerNanos
val global: Analyzer.this.global.type = Analyzer.this.global
val phaseName = "typer"
val runsAfter = List[String]()
@@ -84,13 +85,13 @@ trait Analyzer extends AnyRef
// compiler run). This is good enough for the resident compiler, which was the most affected.
undoLog.clear()
override def run() {
- val start = startTimer(typerNanos)
+ val start = Statistics.startTimer(typerNanos)
global.echoPhaseSummary(this)
currentRun.units foreach applyPhase
undoLog.clear()
// need to clear it after as well or 10K+ accumulated entries are
// uncollectable the rest of the way.
- stopTimer(typerNanos, start)
+ Statistics.stopTimer(typerNanos, start)
}
def apply(unit: CompilationUnit) {
try {
diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
index bcf2ba6b71..60cc9e5fb8 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
@@ -90,7 +90,7 @@ trait ContextErrors {
import infer.setError
object TyperErrorGen {
- implicit val context0: Context = infer.getContext
+ implicit val contextTyperErrorGen: Context = infer.getContext
def UnstableTreeError(tree: Tree) = {
def addendum = {
@@ -222,7 +222,13 @@ trait ContextErrors {
NormalTypeError(tree, "super constructor cannot be passed a self reference unless parameter is declared by-name")
def SuperConstrArgsThisReferenceError(tree: Tree) =
- NormalTypeError(tree, "super constructor arguments cannot reference unconstructed `this`")
+ ConstrArgsThisReferenceError("super", tree)
+
+ def SelfConstrArgsThisReferenceError(tree: Tree) =
+ ConstrArgsThisReferenceError("self", tree)
+
+ private def ConstrArgsThisReferenceError(prefix: String, tree: Tree) =
+ NormalTypeError(tree, s"$prefix constructor arguments cannot reference unconstructed `this`")
def TooManyArgumentListsForConstructor(tree: Tree) = {
issueNormalTypeError(tree, "too many argument lists for constructor invocation")
@@ -642,7 +648,7 @@ trait ContextErrors {
object InferErrorGen {
- implicit val context0 = getContext
+ implicit val contextInferErrorGen = getContext
object PolyAlternativeErrorKind extends Enumeration {
type ErrorType = Value
@@ -828,7 +834,7 @@ trait ContextErrors {
object NamerErrorGen {
- implicit val context0 = context
+ implicit val contextNamerErrorGen = context
object SymValidateErrors extends Enumeration {
val ImplicitConstr, ImplicitNotTermOrClass, ImplicitAtToplevel,
@@ -863,7 +869,7 @@ trait ContextErrors {
case CyclicReference(sym, info: TypeCompleter) =>
issueNormalTypeError(tree, typer.cyclicReferenceMessage(sym, info.tree) getOrElse ex.getMessage())
case _ =>
- context0.issue(TypeErrorWithUnderlyingTree(tree, ex))
+ contextNamerErrorGen.issue(TypeErrorWithUnderlyingTree(tree, ex))
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index ac3c94c47a..bcf529ecd2 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -100,6 +100,12 @@ trait Contexts { self: Analyzer =>
var outer: Context = _ // The next outer context
var enclClass: Context = _ // The next outer context whose tree is a
// template or package definition
+ @inline final def savingEnclClass[A](c: Context)(a: => A): A = {
+ val saved = enclClass
+ enclClass = c
+ try a finally enclClass = saved
+ }
+
var enclMethod: Context = _ // The next outer context whose tree is a method
var variance: Int = _ // Variance relative to enclosing class
private var _undetparams: List[Symbol] = List() // Undetermined type parameters,
@@ -638,11 +644,12 @@ trait Contexts { self: Analyzer =>
if (owner != nextOuter.owner && owner.isClass && !owner.isPackageClass && !inSelfSuperCall) {
if (!owner.isInitialized) return nextOuter.implicitss
// debuglog("collect member implicits " + owner + ", implicit members = " + owner.thisType.implicitMembers)//DEBUG
- val savedEnclClass = enclClass
- this.enclClass = this
- val res = collectImplicits(owner.thisType.implicitMembers, owner.thisType)
- this.enclClass = savedEnclClass
- res
+ savingEnclClass(this) {
+ // !!! In the body of `class C(implicit a: A) { }`, `implicitss` returns `List(List(a), List(a), List(<predef..)))`
+ // it handled correctly by implicit search, which considers the second `a` to be shadowed, but should be
+ // remedied nonetheless.
+ collectImplicits(owner.thisType.implicitMembers, owner.thisType)
+ }
} else if (scope != nextOuter.scope && !owner.isPackageClass) {
debuglog("collect local implicits " + scope.toList)//DEBUG
collectImplicits(scope.toList, NoPrefix)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
index b7a6ea677e..6386273c9d 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
@@ -360,7 +360,7 @@ abstract class Duplicators extends Analyzer {
tree
case _ =>
- debuglog("Duplicators default case: " + tree.summaryString)
+ // log("Duplicators default case: " + tree.summaryString + " -> " + tree)
if (tree.hasSymbol && tree.symbol != NoSymbol && (tree.symbol.owner == definitions.AnyClass)) {
tree.symbol = NoSymbol // maybe we can find a more specific member in a subclass of Any (see AnyVal members, like ==)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index aa63f3ec31..f7e00109ae 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -16,7 +16,7 @@ import scala.collection.{ mutable, immutable }
import mutable.{ LinkedHashMap, ListBuffer }
import scala.util.matching.Regex
import symtab.Flags._
-import util.Statistics._
+import scala.reflect.internal.util.Statistics
import language.implicitConversions
/** This trait provides methods to find various kinds of implicits.
@@ -29,6 +29,7 @@ trait Implicits {
import global._
import definitions._
+ import ImplicitsStats._
import typeDebug.{ ptTree, ptBlock, ptLine }
import global.typer.{ printTyping, deindentTyping, indentTyping, printInference }
@@ -71,10 +72,10 @@ trait Implicits {
)
indentTyping()
- val rawTypeStart = startCounter(rawTypeImpl)
- val findMemberStart = startCounter(findMemberImpl)
- val subtypeStart = startCounter(subtypeImpl)
- val start = startTimer(implicitNanos)
+ val rawTypeStart = Statistics.startCounter(rawTypeImpl)
+ val findMemberStart = Statistics.startCounter(findMemberImpl)
+ val subtypeStart = Statistics.startCounter(subtypeImpl)
+ val start = Statistics.startTimer(implicitNanos)
if (printInfers && !tree.isEmpty && !context.undetparams.isEmpty)
printTyping("typing implicit: %s %s".format(tree, context.undetparamsString))
val implicitSearchContext = context.makeImplicit(reportAmbiguous)
@@ -86,10 +87,10 @@ trait Implicits {
printInference("[infer implicit] inferred " + result)
context.undetparams = context.undetparams filterNot result.subst.from.contains
- stopTimer(implicitNanos, start)
- stopCounter(rawTypeImpl, rawTypeStart)
- stopCounter(findMemberImpl, findMemberStart)
- stopCounter(subtypeImpl, subtypeStart)
+ Statistics.stopTimer(implicitNanos, start)
+ Statistics.stopCounter(rawTypeImpl, rawTypeStart)
+ Statistics.stopCounter(findMemberImpl, findMemberStart)
+ Statistics.stopCounter(subtypeImpl, subtypeStart)
deindentTyping()
printTyping("Implicit search yielded: "+ result)
result
@@ -307,12 +308,12 @@ trait Implicits {
/** Is implicit info `info1` better than implicit info `info2`?
*/
def improves(info1: ImplicitInfo, info2: ImplicitInfo) = {
- incCounter(improvesCount)
+ Statistics.incCounter(improvesCount)
(info2 == NoImplicitInfo) ||
(info1 != NoImplicitInfo) && {
if (info1.sym.isStatic && info2.sym.isStatic) {
improvesCache get (info1, info2) match {
- case Some(b) => incCounter(improvesCachedCount); b
+ case Some(b) => Statistics.incCounter(improvesCachedCount); b
case None =>
val result = isStrictlyMoreSpecific(info1.tpe, info2.tpe, info1.sym, info2.sym)
improvesCache((info1, info2)) = result
@@ -376,7 +377,7 @@ trait Implicits {
overlaps(dtor1, dted1) && (dtor1 =:= dted1 || complexity(dtor1) > complexity(dted1))
}
- incCounter(implicitSearchCount)
+ Statistics.incCounter(implicitSearchCount)
/** The type parameters to instantiate */
val undetParams = if (isView) List() else context.outer.undetparams
@@ -390,9 +391,10 @@ trait Implicits {
* Detect infinite search trees for implicits.
*
* @param info The given implicit info describing the implicit definition
+ * @param isLocal Is the implicit in the local scope of the call site?
* @pre `info.tpe` does not contain an error
*/
- private def typedImplicit(info: ImplicitInfo, ptChecked: Boolean): SearchResult = {
+ private def typedImplicit(info: ImplicitInfo, ptChecked: Boolean, isLocal: Boolean): SearchResult = {
(context.openImplicits find { case (tp, tree1) => tree1.symbol == tree.symbol && dominates(pt, tp)}) match {
case Some(pending) =>
//println("Pending implicit "+pending+" dominates "+pt+"/"+undetParams) //@MDEBUG
@@ -401,7 +403,7 @@ trait Implicits {
try {
context.openImplicits = (pt, tree) :: context.openImplicits
// println(" "*context.openImplicits.length+"typed implicit "+info+" for "+pt) //@MDEBUG
- typedImplicit0(info, ptChecked)
+ typedImplicit0(info, ptChecked, isLocal)
} catch {
case ex: DivergentImplicit =>
//println("DivergentImplicit for pt:"+ pt +", open implicits:"+context.openImplicits) //@MDEBUG
@@ -427,7 +429,7 @@ trait Implicits {
* This method is performance critical: 5-8% of typechecking time.
*/
private def matchesPt(tp: Type, pt: Type, undet: List[Symbol]): Boolean = {
- val start = startTimer(matchesPtNanos)
+ val start = Statistics.startTimer(matchesPtNanos)
val result = normSubType(tp, pt) || isView && {
pt match {
case TypeRef(_, Function1.Sym, args) =>
@@ -436,7 +438,7 @@ trait Implicits {
false
}
}
- stopTimer(matchesPtNanos, start)
+ Statistics.stopTimer(matchesPtNanos, start)
result
}
private def matchesPt(info: ImplicitInfo): Boolean = (
@@ -534,8 +536,8 @@ trait Implicits {
case _ => false
}
- private def typedImplicit0(info: ImplicitInfo, ptChecked: Boolean): SearchResult = {
- incCounter(plausiblyCompatibleImplicits)
+ private def typedImplicit0(info: ImplicitInfo, ptChecked: Boolean, isLocal: Boolean): SearchResult = {
+ Statistics.incCounter(plausiblyCompatibleImplicits)
printTyping (
ptBlock("typedImplicit0",
"info.name" -> info.name,
@@ -549,17 +551,24 @@ trait Implicits {
)
if (ptChecked || matchesPt(info))
- typedImplicit1(info)
+ typedImplicit1(info, isLocal)
else
SearchFailure
}
- private def typedImplicit1(info: ImplicitInfo): SearchResult = {
- incCounter(matchingImplicits)
+ private def typedImplicit1(info: ImplicitInfo, isLocal: Boolean): SearchResult = {
+ Statistics.incCounter(matchingImplicits)
val itree = atPos(pos.focus) {
- if (info.pre == NoPrefix) Ident(info.name)
- else {
+ // workaround for deficient context provided by ModelFactoryImplicitSupport#makeImplicitConstraints
+ val isScalaDoc = context.tree == EmptyTree
+
+ if (isLocal && !isScalaDoc) {
+ // SI-4270 SI-5376 Always use an unattributed Ident for implicits in the local scope,
+ // rather than an attributed Select, to detect shadowing.
+ Ident(info.name)
+ } else {
+ assert(info.pre != NoPrefix, info)
// SI-2405 Not info.name, which might be an aliased import
val implicitMemberName = info.sym.name
Select(gen.mkAttributedQualifier(info.pre), implicitMemberName)
@@ -586,7 +595,7 @@ trait Implicits {
if (context.hasErrors)
return fail("typed implicit %s has errors".format(info.sym.fullLocationString))
- incCounter(typedImplicits)
+ Statistics.incCounter(typedImplicits)
printTyping("typed implicit %s:%s, pt=%s".format(itree1, itree1.tpe, wildPt))
val itree2 = if (isView) (itree1: @unchecked) match { case Apply(fun, _) => fun }
@@ -607,8 +616,8 @@ trait Implicits {
if (context.hasErrors)
fail("hasMatchingSymbol reported threw error(s)")
- else if (!hasMatchingSymbol(itree1))
- fail("candidate implicit %s is shadowed by other implicit %s".format(
+ else if (isLocal && !hasMatchingSymbol(itree1))
+ fail("candidate implicit %s is shadowed by %s".format(
info.sym.fullLocationString, itree1.symbol.fullLocationString))
else {
val tvars = undetParams map freshVar
@@ -669,7 +678,7 @@ trait Implicits {
fail("typing TypeApply reported errors for the implicit tree")
else {
val result = new SearchResult(itree2, subst)
- incCounter(foundImplicits)
+ Statistics.incCounter(foundImplicits)
printInference("[success] found %s for pt %s".format(result, ptInstantiated))
result
}
@@ -683,17 +692,6 @@ trait Implicits {
}
}
- // #3453: in addition to the implicit symbols that may shadow the implicit with
- // name `name`, this method tests whether there's a non-implicit symbol with name
- // `name` in scope. Inspired by logic in typedIdent.
- private def nonImplicitSynonymInScope(name: Name) = {
- // the implicit ones are handled by the `shadowed` set above
- context.scope.lookupEntry(name) match {
- case x: ScopeEntry => reallyExists(x.sym) && !x.sym.isImplicit
- case _ => false
- }
- }
-
/** Should implicit definition symbol `sym` be considered for applicability testing?
* This is the case if one of the following holds:
* - the symbol's type is initialized
@@ -737,19 +735,38 @@ trait Implicits {
/** Prune ImplicitInfos down to either all the eligible ones or the best one.
*
* @param iss list of list of infos
- * @param shadowed set in which to record names that are shadowed by implicit infos
- * If it is null, no shadowing.
+ * @param isLocal if true, `iss` represents in-scope implicits, which must respect the normal rules of
+ * shadowing. The head of the list `iss` must represent implicits from the closest
+ * enclosing scope, and so on.
*/
- class ImplicitComputation(iss: Infoss, shadowed: util.HashSet[Name]) {
+ class ImplicitComputation(iss: Infoss, isLocal: Boolean) {
+ abstract class Shadower {
+ def addInfos(infos: Infos)
+ def isShadowed(name: Name): Boolean
+ }
+ private val shadower: Shadower = {
+ /** Used for exclude implicits from outer scopes that are shadowed by same-named implicits */
+ final class LocalShadower extends Shadower {
+ val shadowed = util.HashSet[Name](512)
+ def addInfos(infos: Infos) {
+ shadowed addEntries infos.map(_.name)
+ }
+ def isShadowed(name: Name) = shadowed(name)
+ }
+ /** Used for the implicits of expected type, when no shadowing checks are needed. */
+ object NoShadower extends Shadower {
+ def addInfos(infos: Infos) {}
+ def isShadowed(name: Name) = false
+ }
+ if (isLocal) new LocalShadower else NoShadower
+ }
+
private var best: SearchResult = SearchFailure
- private def isShadowed(name: Name) = (
- (shadowed != null)
- && (shadowed(name) || nonImplicitSynonymInScope(name))
- )
+
private def isIneligible(info: ImplicitInfo) = (
info.isCyclicOrErroneous
|| isView && isPredefMemberNamed(info.sym, nme.conforms)
- || isShadowed(info.name)
+ || shadower.isShadowed(info.name)
|| (!context.macrosEnabled && info.sym.isTermMacro)
)
@@ -788,9 +805,7 @@ trait Implicits {
val eligible = {
val matches = iss flatMap { is =>
val result = is filter (info => checkValid(info.sym) && survives(info))
- if (shadowed ne null)
- shadowed addEntries (is map (_.name))
-
+ shadower addInfos is
result
}
@@ -812,7 +827,7 @@ trait Implicits {
case Nil => acc
case i :: is =>
def tryImplicitInfo(i: ImplicitInfo) =
- try typedImplicit(i, true)
+ try typedImplicit(i, ptChecked = true, isLocal)
catch divergenceHandler
tryImplicitInfo(i) match {
@@ -842,7 +857,7 @@ trait Implicits {
/** Returns all eligible ImplicitInfos and their SearchResults in a map.
*/
- def findAll() = mapFrom(eligible)(typedImplicit(_, false))
+ def findAll() = mapFrom(eligible)(typedImplicit(_, ptChecked = false, isLocal))
/** Returns the SearchResult of the best match.
*/
@@ -890,11 +905,11 @@ trait Implicits {
* @return map from infos to search results
*/
def applicableInfos(iss: Infoss, isLocal: Boolean): Map[ImplicitInfo, SearchResult] = {
- val start = startCounter(subtypeAppInfos)
- val computation = new ImplicitComputation(iss, if (isLocal) util.HashSet[Name](512) else null) { }
+ val start = Statistics.startCounter(subtypeAppInfos)
+ val computation = new ImplicitComputation(iss, isLocal) { }
val applicable = computation.findAll()
- stopCounter(subtypeAppInfos, start)
+ Statistics.stopCounter(subtypeAppInfos, start)
applicable
}
@@ -909,7 +924,7 @@ trait Implicits {
*/
def searchImplicit(implicitInfoss: Infoss, isLocal: Boolean): SearchResult =
if (implicitInfoss.forall(_.isEmpty)) SearchFailure
- else new ImplicitComputation(implicitInfoss, if (isLocal) util.HashSet[Name](128) else null) findBest()
+ else new ImplicitComputation(implicitInfoss, isLocal) findBest()
/** Produce an implicict info map, i.e. a map from the class symbols C of all parts of this type to
* the implicit infos in the companion objects of these class symbols C.
@@ -1109,26 +1124,28 @@ trait Implicits {
* These are all implicits found in companion objects of classes C
* such that some part of `tp` has C as one of its superclasses.
*/
- private def implicitsOfExpectedType: Infoss = implicitsCache get pt match {
- case Some(implicitInfoss) =>
- incCounter(implicitCacheHits)
- implicitInfoss
- case None =>
- incCounter(implicitCacheMisses)
- val start = startTimer(subtypeETNanos)
-// val implicitInfoss = companionImplicits(pt)
- val implicitInfoss1 = companionImplicitMap(pt).valuesIterator.toList
-// val is1 = implicitInfoss.flatten.toSet
-// val is2 = implicitInfoss1.flatten.toSet
-// for (i <- is1)
-// if (!(is2 contains i)) println("!!! implicit infos of "+pt+" differ, new does not contain "+i+",\nold: "+implicitInfoss+",\nnew: "+implicitInfoss1)
-// for (i <- is2)
-// if (!(is1 contains i)) println("!!! implicit infos of "+pt+" differ, old does not contain "+i+",\nold: "+implicitInfoss+",\nnew: "+implicitInfoss1)
- stopTimer(subtypeETNanos, start)
- implicitsCache(pt) = implicitInfoss1
- if (implicitsCache.size >= sizeLimit)
- implicitsCache -= implicitsCache.keysIterator.next
- implicitInfoss1
+ private def implicitsOfExpectedType: Infoss = {
+ Statistics.incCounter(implicitCacheHits)
+ implicitsCache get pt match {
+ case Some(implicitInfoss) =>
+ Statistics.incCounter(implicitCacheHits)
+ implicitInfoss
+ case None =>
+ val start = Statistics.startTimer(subtypeETNanos)
+ // val implicitInfoss = companionImplicits(pt)
+ val implicitInfoss1 = companionImplicitMap(pt).valuesIterator.toList
+ // val is1 = implicitInfoss.flatten.toSet
+ // val is2 = implicitInfoss1.flatten.toSet
+ // for (i <- is1)
+ // if (!(is2 contains i)) println("!!! implicit infos of "+pt+" differ, new does not contain "+i+",\nold: "+implicitInfoss+",\nnew: "+implicitInfoss1)
+ // for (i <- is2)
+ // if (!(is1 contains i)) println("!!! implicit infos of "+pt+" differ, old does not contain "+i+",\nold: "+implicitInfoss+",\nnew: "+implicitInfoss1)
+ Statistics.stopTimer(subtypeETNanos, start)
+ implicitsCache(pt) = implicitInfoss1
+ if (implicitsCache.size >= sizeLimit)
+ implicitsCache -= implicitsCache.keysIterator.next
+ implicitInfoss1
+ }
}
private def TagSymbols = TagMaterializers.keySet
@@ -1319,7 +1336,7 @@ trait Implicits {
if (full) {
val cm = typed(Ident(ReflectRuntimeCurrentMirror))
gen.mkMethodCall(ReflectRuntimeUniverse, nme.typeTagToManifest, List(tp), List(cm, tagInScope))
- } else gen.mkMethodCall(ReflectRuntimeUniverse, nme.classTagToClassManifest, List(tp), List(tagInScope))
+ } else gen.mkMethodCall(ReflectBasis, nme.classTagToClassManifest, List(tp), List(tagInScope))
wrapResult(interop)
}
}
@@ -1354,30 +1371,30 @@ trait Implicits {
* If all fails return SearchFailure
*/
def bestImplicit: SearchResult = {
- val failstart = startTimer(inscopeFailNanos)
- val succstart = startTimer(inscopeSucceedNanos)
+ val failstart = Statistics.startTimer(inscopeFailNanos)
+ val succstart = Statistics.startTimer(inscopeSucceedNanos)
var result = searchImplicit(context.implicitss, true)
if (result == SearchFailure) {
- stopTimer(inscopeFailNanos, failstart)
+ Statistics.stopTimer(inscopeFailNanos, failstart)
} else {
- stopTimer(inscopeSucceedNanos, succstart)
- incCounter(inscopeImplicitHits)
+ Statistics.stopTimer(inscopeSucceedNanos, succstart)
+ Statistics.incCounter(inscopeImplicitHits)
}
if (result == SearchFailure) {
val previousErrs = context.flushAndReturnBuffer()
- val failstart = startTimer(oftypeFailNanos)
- val succstart = startTimer(oftypeSucceedNanos)
+ val failstart = Statistics.startTimer(oftypeFailNanos)
+ val succstart = Statistics.startTimer(oftypeSucceedNanos)
result = implicitTagOrOfExpectedType(pt)
if (result == SearchFailure) {
context.updateBuffer(previousErrs)
- stopTimer(oftypeFailNanos, failstart)
+ Statistics.stopTimer(oftypeFailNanos, failstart)
} else {
- stopTimer(oftypeSucceedNanos, succstart)
- incCounter(oftypeImplicitHits)
+ Statistics.stopTimer(oftypeSucceedNanos, succstart)
+ Statistics.incCounter(oftypeImplicitHits)
}
}
@@ -1397,20 +1414,23 @@ trait Implicits {
def allImplicitsPoly(tvars: List[TypeVar]): List[(SearchResult, List[TypeConstraint])] = {
def resetTVars() = tvars foreach { _.constr = new TypeConstraint }
- def eligibleInfos(iss: Infoss, isLocal: Boolean) = new ImplicitComputation(iss, if (isLocal) util.HashSet[Name](512) else null).eligible
- val allEligibleInfos = (eligibleInfos(context.implicitss, true) ++ eligibleInfos(implicitsOfExpectedType, false)).toList
-
- allEligibleInfos flatMap { ii =>
+ def eligibleInfos(iss: Infoss, isLocal: Boolean) = {
+ val eligible = new ImplicitComputation(iss, isLocal).eligible
+ eligible.toList.flatMap {
+ (ii: ImplicitInfo) =>
// each ImplicitInfo contributes a distinct set of constraints (generated indirectly by typedImplicit)
// thus, start each type var off with a fresh for every typedImplicit
resetTVars()
// any previous errors should not affect us now
context.flushBuffer()
- val res = typedImplicit(ii, false)
+
+ val res = typedImplicit(ii, ptChecked = false, isLocal)
if (res.tree ne EmptyTree) List((res, tvars map (_.constr)))
else Nil
}
}
+ eligibleInfos(context.implicitss, isLocal = true) ++ eligibleInfos(implicitsOfExpectedType, isLocal = false)
+ }
}
object ImplicitNotFoundMsg {
@@ -1455,5 +1475,37 @@ trait Implicits {
}
}
}
+
+object ImplicitsStats {
+
+ import reflect.internal.TypesStats._
+
+ val rawTypeImpl = Statistics.newSubCounter (" of which in implicits", rawTypeCount)
+ val subtypeImpl = Statistics.newSubCounter(" of which in implicit", subtypeCount)
+ val findMemberImpl = Statistics.newSubCounter(" of which in implicit", findMemberCount)
+ val subtypeAppInfos = Statistics.newSubCounter(" of which in app impl", subtypeCount)
+ val subtypeImprovCount = Statistics.newSubCounter(" of which in improves", subtypeCount)
+ val implicitSearchCount = Statistics.newCounter ("#implicit searches", "typer")
+ val triedImplicits = Statistics.newSubCounter(" #tried", implicitSearchCount)
+ val plausiblyCompatibleImplicits
+ = Statistics.newSubCounter(" #plausibly compatible", implicitSearchCount)
+ val matchingImplicits = Statistics.newSubCounter(" #matching", implicitSearchCount)
+ val typedImplicits = Statistics.newSubCounter(" #typed", implicitSearchCount)
+ val foundImplicits = Statistics.newSubCounter(" #found", implicitSearchCount)
+ val improvesCount = Statistics.newSubCounter("implicit improves tests", implicitSearchCount)
+ val improvesCachedCount = Statistics.newSubCounter("#implicit improves cached ", implicitSearchCount)
+ val inscopeImplicitHits = Statistics.newSubCounter("#implicit inscope hits", implicitSearchCount)
+ val oftypeImplicitHits = Statistics.newSubCounter("#implicit oftype hits ", implicitSearchCount)
+ val implicitNanos = Statistics.newSubTimer ("time spent in implicits", typerNanos)
+ val inscopeSucceedNanos = Statistics.newSubTimer (" successful in scope", typerNanos)
+ val inscopeFailNanos = Statistics.newSubTimer (" failed in scope", typerNanos)
+ val oftypeSucceedNanos = Statistics.newSubTimer (" successful of type", typerNanos)
+ val oftypeFailNanos = Statistics.newSubTimer (" failed of type", typerNanos)
+ val subtypeETNanos = Statistics.newSubTimer (" assembling parts", typerNanos)
+ val matchesPtNanos = Statistics.newSubTimer (" matchesPT", typerNanos)
+ val implicitCacheAccs = Statistics.newCounter ("implicit cache accesses", "typer")
+ val implicitCacheHits = Statistics.newSubCounter("implicit cache hits", implicitCacheAccs)
+}
+
class DivergentImplicit extends Exception
object DivergentImplicit extends DivergentImplicit
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index 9e371dd2dd..688dcd91ac 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -1276,7 +1276,8 @@ trait Infer {
} else {
for (arg <- args) {
if (sym == ArrayClass) check(arg, bound)
- else if (arg.typeArgs.nonEmpty) () // avoid spurious warnings with higher-kinded types
+ else if (arg.typeArgs.nonEmpty) () // avoid spurious warnings with higher-kinded types
+ else if (sym == NonLocalReturnControlClass) () // no way to suppress unchecked warnings on try/catch
else arg match {
case TypeRef(_, sym, _) if isLocalBinding(sym) =>
;
diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
index ec14476d1a..d157666e47 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
@@ -7,7 +7,7 @@ import scala.tools.nsc.util.ClassPath._
import scala.reflect.runtime.ReflectionUtils
import scala.collection.mutable.ListBuffer
import scala.compat.Platform.EOL
-import util.Statistics._
+import reflect.internal.util.Statistics
import scala.reflect.makro.util._
import java.lang.{Class => jClass}
import java.lang.reflect.{Array => jArray, Method => jMethod}
@@ -42,6 +42,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
import global._
import definitions._
+ import MacrosStats._
def globalSettings = global.settings
val globalMacroCache = collection.mutable.Map[Any, Any]()
@@ -945,8 +946,8 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
this.fail(typer, tree, err.errPos, "failed to %s: %s".format(what, err.errMsg))
return expandee
}
- val start = startTimer(macroExpandNanos)
- incCounter(macroExpandCount)
+ val start = Statistics.startTimer(macroExpandNanos)
+ Statistics.incCounter(macroExpandCount)
try {
macroExpand1(typer, expandee) match {
case Success(expanded0) =>
@@ -993,7 +994,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
result
}
} finally {
- stopTimer(macroExpandNanos, start)
+ Statistics.stopTimer(macroExpandNanos, start)
}
}
@@ -1292,3 +1293,9 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
})
}.transform(expandee)
}
+
+object MacrosStats {
+ import reflect.internal.TypesStats.typerNanos
+ val macroExpandCount = Statistics.newCounter ("#macro expansions", "typer")
+ val macroExpandNanos = Statistics.newSubTimer("time spent in macroExpand", typerNanos)
+}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Modes.scala b/src/compiler/scala/tools/nsc/typechecker/Modes.scala
index 3eff5ef024..bde3ad98c9 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Modes.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Modes.scala
@@ -86,6 +86,10 @@ trait Modes {
*/
final val TYPEPATmode = 0x10000
+ /** RETmode is set when we are typing a return expression.
+ */
+ final val RETmode = 0x20000
+
final private val StickyModes = EXPRmode | PATTERNmode | TYPEmode | ALTmode
final def onlyStickyModes(mode: Int) =
diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
index 4e8f416b16..9b8ddffb49 100644
--- a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
@@ -14,7 +14,7 @@ import scala.tools.nsc.transform.TypingTransformers
import scala.tools.nsc.transform.Transform
import scala.collection.mutable.HashSet
import scala.collection.mutable.HashMap
-import scala.tools.nsc.util.Statistics
+import reflect.internal.util.Statistics
/** Translate pattern matching.
*
@@ -38,6 +38,7 @@ import scala.tools.nsc.util.Statistics
*/
trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL { // self: Analyzer =>
import Statistics._
+ import PatternMatchingStats._
val global: Global // need to repeat here because otherwise last mixin defines global as
// SymbolTable. If we had DOT this would not be an issue
@@ -183,7 +184,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
case _ => tp
}
- val start = startTimer(patmatNanos)
+ val start = Statistics.startTimer(patmatNanos)
val selectorTp = repeatedToSeq(elimAnonymousClass(selector.tpe.widen.withoutAnnotations))
@@ -210,7 +211,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// pt = Any* occurs when compiling test/files/pos/annotDepMethType.scala with -Xexperimental
val combined = combineCases(selector, selectorSym, cases map translateCase(selectorSym, pt), pt, matchOwner, matchFailGenOverride)
- stopTimer(patmatNanos, start)
+ Statistics.stopTimer(patmatNanos, start)
combined
}
@@ -1694,7 +1695,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// TODO: for V1 representing x1 and V2 standing for x1.head, encode that
// V1 = Nil implies -(V2 = Ci) for all Ci in V2's domain (i.e., it is unassignable)
def removeVarEq(props: List[Prop], considerNull: Boolean = false): (Prop, List[Prop]) = {
- val start = startTimer(patmatAnaVarEq)
+ val start = Statistics.startTimer(patmatAnaVarEq)
val vars = new collection.mutable.HashSet[Var]
@@ -1766,7 +1767,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// patmatDebug("eqAxioms:\n"+ cnfString(eqFreePropToSolvable(eqAxioms)))
// patmatDebug("pure:\n"+ cnfString(eqFreePropToSolvable(pure)))
- stopTimer(patmatAnaVarEq, start)
+ Statistics.stopTimer(patmatAnaVarEq, start)
(eqAxioms, pure)
}
@@ -1865,10 +1866,10 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
}
- val start = startTimer(patmatCNF)
+ val start = Statistics.startTimer(patmatCNF)
val res = conjunctiveNormalForm(negationNormalForm(p))
- stopTimer(patmatCNF, start)
- patmatCNFSizes(res.size) += 1
+ Statistics.stopTimer(patmatCNF, start)
+ patmatCNFSizes(res.size).value += 1
// patmatDebug("cnf for\n"+ p +"\nis:\n"+cnfString(res))
res
@@ -1945,7 +1946,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// patmatDebug("dpll\n"+ cnfString(f))
- val start = startTimer(patmatAnaDPLL)
+ val start = Statistics.startTimer(patmatAnaDPLL)
val satisfiableWithModel: Model =
if (f isEmpty) EmptyModel
@@ -1983,7 +1984,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
}
- stopTimer(patmatAnaDPLL, start)
+ Statistics.stopTimer(patmatAnaDPLL, start)
satisfiableWithModel
}
@@ -2291,7 +2292,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def makeCondPessimistic(tm: TreeMaker)(recurse: TreeMaker => Cond): Cond = makeCond(tm)(recurse)
}
- val start = startTimer(patmatAnaReach)
+ val start = Statistics.startTimer(patmatAnaReach)
// use the same approximator so we share variables,
// but need different conditions depending on whether we're conservatively looking for failure or success
@@ -2340,7 +2341,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
}
- stopTimer(patmatAnaReach, start)
+ Statistics.stopTimer(patmatAnaReach, start)
if (reachable) None else Some(caseIndex)
} catch {
@@ -2428,7 +2429,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// - back off (to avoid crying exhaustive too often) when:
// - there are guards -->
// - there are extractor calls (that we can't secretly/soundly) rewrite
- val start = startTimer(patmatAnaExhaust)
+ val start = Statistics.startTimer(patmatAnaExhaust)
var backoff = false
object exhaustivityApproximation extends TreeMakersToConds(prevBinder) {
def makeCondExhaustivity(tm: TreeMaker)(recurse: TreeMaker => Cond): Cond = tm match {
@@ -2503,7 +2504,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
val pruned = CounterExample.prune(counterExamples).map(_.toString).sorted
- stopTimer(patmatAnaExhaust, start)
+ Statistics.stopTimer(patmatAnaExhaust, start)
pruned
} catch {
case e : CNFBudgetExceeded =>
@@ -3186,3 +3187,13 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
}
}
+
+object PatternMatchingStats {
+ val patmatNanos = Statistics.newTimer ("time spent in patmat", "patmat")
+ val patmatAnaDPLL = Statistics.newSubTimer (" of which DPLL", patmatNanos)
+ val patmatCNF = Statistics.newSubTimer (" of which in CNF conversion", patmatNanos)
+ val patmatCNFSizes = Statistics.newQuantMap[Int, Statistics.Counter](" CNF size counts", "patmat")(Statistics.newCounter(""))
+ val patmatAnaVarEq = Statistics.newSubTimer (" of which variable equality", patmatNanos)
+ val patmatAnaExhaust = Statistics.newSubTimer (" of which in exhaustivity", patmatNanos)
+ val patmatAnaReach = Statistics.newSubTimer (" of which in unreachability", patmatNanos)
+}
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index 26cf246ed7..119bb0852c 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -766,7 +766,16 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
for (member <- clazz.info.decls)
if (member.isAnyOverride && !(clazz.thisType.baseClasses exists (hasMatchingSym(_, member)))) {
// for (bc <- clazz.info.baseClasses.tail) Console.println("" + bc + " has " + bc.info.decl(member.name) + ":" + bc.info.decl(member.name).tpe);//DEBUG
- unit.error(member.pos, member.toString() + " overrides nothing");
+
+ val nonMatching: List[Symbol] = clazz.info.member(member.name).alternatives.filterNot(_.owner == clazz).filterNot(_.isFinal)
+ def issueError(suffix: String) = unit.error(member.pos, member.toString() + " overrides nothing" + suffix);
+ nonMatching match {
+ case Nil =>
+ issueError("")
+ case ms =>
+ val superSigs = ms.map(m => m.defStringSeenAs(clazz.tpe memberType m)).mkString("\n")
+ issueError(s".\nNote: the super classes of ${member.owner} contain the following, non final members named ${member.name}:\n${superSigs}")
+ }
member resetFlag (OVERRIDE | ABSOVERRIDE) // Any Override
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 2bdae4164a..b12ca4f0b4 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -16,8 +16,7 @@ import scala.collection.mutable
import scala.reflect.internal.util.BatchSourceFile
import mutable.ListBuffer
import symtab.Flags._
-import util.Statistics
-import util.Statistics._
+import reflect.internal.util.Statistics
// Suggestion check whether we can do without priming scopes with symbols of outer scopes,
// like the IDE does.
@@ -31,6 +30,7 @@ trait Typers extends Modes with Adaptations with Tags {
import global._
import definitions._
+ import TypersStats._
import patmat.DefaultOverrideMatchAttachment
final def forArgMode(fun: Tree, mode: Int) =
@@ -707,10 +707,16 @@ trait Typers extends Modes with Adaptations with Tags {
def silent[T](op: Typer => T,
reportAmbiguousErrors: Boolean = context.ambiguousErrors,
newtree: Tree = context.tree): SilentResult[T] = {
- val rawTypeStart = startCounter(rawTypeFailed)
- val findMemberStart = startCounter(findMemberFailed)
- val subtypeStart = startCounter(subtypeFailed)
- val failedSilentStart = startTimer(failedSilentNanos)
+ val rawTypeStart = Statistics.startCounter(rawTypeFailed)
+ val findMemberStart = Statistics.startCounter(findMemberFailed)
+ val subtypeStart = Statistics.startCounter(subtypeFailed)
+ val failedSilentStart = Statistics.startTimer(failedSilentNanos)
+ def stopStats() = {
+ Statistics.stopCounter(rawTypeFailed, rawTypeStart)
+ Statistics.stopCounter(findMemberFailed, findMemberStart)
+ Statistics.stopCounter(subtypeFailed, subtypeStart)
+ Statistics.stopTimer(failedSilentNanos, failedSilentStart)
+ }
try {
if (context.reportErrors ||
reportAmbiguousErrors != context.ambiguousErrors ||
@@ -724,8 +730,10 @@ trait Typers extends Modes with Adaptations with Tags {
context.undetparams = context1.undetparams
context.savedTypeBounds = context1.savedTypeBounds
context.namedApplyBlockInfo = context1.namedApplyBlockInfo
- if (context1.hasErrors) SilentTypeError(context1.errBuffer.head)
- else SilentResultValue(result)
+ if (context1.hasErrors) {
+ stopStats()
+ SilentTypeError(context1.errBuffer.head)
+ } else SilentResultValue(result)
} else {
assert(context.bufferErrors || isPastTyper, "silent mode is not available past typer")
withSavedContext(context){
@@ -739,10 +747,7 @@ trait Typers extends Modes with Adaptations with Tags {
case ex: TypeError =>
// fallback in case TypeError is still thrown
// @H this happens for example in cps annotation checker
- stopCounter(rawTypeFailed, rawTypeStart)
- stopCounter(findMemberFailed, findMemberStart)
- stopCounter(subtypeFailed, subtypeStart)
- stopTimer(failedSilentNanos, failedSilentStart)
+ stopStats()
SilentTypeError(TypeErrorWrapper(ex))
}
}
@@ -1841,16 +1846,13 @@ trait Typers extends Modes with Adaptations with Tags {
val pending = ListBuffer[AbsTypeError]()
// an object cannot be allowed to pass a reference to itself to a superconstructor
// because of initialization issues; bug #473
- for (arg <- superArgs ; tree <- arg) {
- val sym = tree.symbol
- if (sym != null && (sym.info.baseClasses contains clazz)) {
- if (sym.isModule)
- pending += SuperConstrReferenceError(tree)
- tree match {
- case This(qual) =>
- pending += SuperConstrArgsThisReferenceError(tree)
- case _ => ()
- }
+ foreachSubTreeBoundTo(superArgs, clazz) { tree =>
+ if (tree.symbol.isModule)
+ pending += SuperConstrReferenceError(tree)
+ tree match {
+ case This(qual) =>
+ pending += SuperConstrArgsThisReferenceError(tree)
+ case _ => ()
}
}
@@ -1884,7 +1886,39 @@ trait Typers extends Modes with Adaptations with Tags {
pending.foreach(ErrorUtils.issueTypeError)
}
- /** Check if a structurally defined method violates implementation restrictions.
+ // Check for SI-4842.
+ private def checkSelfConstructorArgs(ddef: DefDef, clazz: Symbol) {
+ val pending = ListBuffer[AbsTypeError]()
+ ddef.rhs match {
+ case Block(stats, expr) =>
+ val selfConstructorCall = stats.headOption.getOrElse(expr)
+ foreachSubTreeBoundTo(List(selfConstructorCall), clazz) {
+ case tree @ This(qual) =>
+ pending += SelfConstrArgsThisReferenceError(tree)
+ case _ => ()
+ }
+ case _ =>
+ }
+ pending.foreach(ErrorUtils.issueTypeError)
+ }
+
+ /**
+ * Run the provided function for each sub tree of `trees` that
+ * are bound to a symbol with `clazz` as a base class.
+ *
+ * @param f This function can assume that `tree.symbol` is non null
+ */
+ private def foreachSubTreeBoundTo[A](trees: List[Tree], clazz: Symbol)(f: Tree => Unit): Unit =
+ for {
+ tree <- trees
+ subTree <- tree
+ } {
+ val sym = subTree.symbol
+ if (sym != null && sym.info.baseClasses.contains(clazz))
+ f(subTree)
+ }
+
+ /** Check if a structurally defined method violates implementation restrictions.
* A method cannot be called if it is a non-private member of a refinement type
* and if its parameter's types are any of:
* - the self-type of the refinement
@@ -2002,11 +2036,14 @@ trait Typers extends Modes with Adaptations with Tags {
transformedOrTyped(ddef.rhs, EXPRmode, tpt1.tpe)
}
- if (meth.isPrimaryConstructor && meth.isClassConstructor && !isPastTyper && !reporter.hasErrors && !meth.owner.isSubClass(AnyValClass)) {
- // At this point in AnyVal there is no supercall, which will blow up
- // in computeParamAliases; there's nothing to be computed for Anyval anyway.
+ if (meth.isClassConstructor && !isPastTyper && !reporter.hasErrors && !meth.owner.isSubClass(AnyValClass)) {
+ // At this point in AnyVal there is no supercall, which will blow up
+ // in computeParamAliases; there's nothing to be computed for Anyval anyway.
+ if (meth.isPrimaryConstructor)
computeParamAliases(meth.owner, vparamss1, rhs1)
- }
+ else
+ checkSelfConstructorArgs(ddef, meth.owner)
+ }
if (tpt1.tpe.typeSymbol != NothingClass && !context.returnsSeen && rhs1.tpe.typeSymbol != NothingClass)
rhs1 = checkDead(rhs1)
@@ -3481,9 +3518,9 @@ trait Typers extends Modes with Adaptations with Tags {
def isCapturedExistential(sym: Symbol) =
(sym hasAllFlags (EXISTENTIAL | CAPTURED)) && {
- val start = startTimer(isReferencedNanos)
+ val start = Statistics.startTimer(isReferencedNanos)
try !isReferencedFrom(context, sym)
- finally stopTimer(isReferencedNanos, start)
+ finally Statistics.stopTimer(isReferencedNanos, start)
}
def packCaptured(tpe: Type): Type = {
@@ -3987,7 +4024,8 @@ trait Typers extends Modes with Adaptations with Tags {
ReturnWithoutTypeError(tree, enclMethod.owner)
} else {
context.enclMethod.returnsSeen = true
- val expr1: Tree = typed(expr, EXPRmode | BYVALmode, restpt.tpe)
+ val expr1: Tree = typed(expr, EXPRmode | BYVALmode | RETmode, restpt.tpe)
+
// Warn about returning a value if no value can be returned.
if (restpt.tpe.typeSymbol == UnitClass) {
// The typing in expr1 says expr is Unit (it has already been coerced if
@@ -4109,10 +4147,10 @@ trait Typers extends Modes with Adaptations with Tags {
* insert an implicit conversion.
*/
def tryTypedApply(fun: Tree, args: List[Tree]): Tree = {
- val start = startTimer(failedApplyNanos)
+ val start = Statistics.startTimer(failedApplyNanos)
def onError(typeError: AbsTypeError): Tree = {
- stopTimer(failedApplyNanos, start)
+ Statistics.stopTimer(failedApplyNanos, start)
// If the problem is with raw types, copnvert to existentials and try again.
// See #4712 for a case where this situation arises,
@@ -4175,8 +4213,8 @@ trait Typers extends Modes with Adaptations with Tags {
typed1(tree, mode & ~PATTERNmode | EXPRmode, pt)
} else {
val funpt = if (isPatternMode) pt else WildcardType
- val appStart = startTimer(failedApplyNanos)
- val opeqStart = startTimer(failedOpEqNanos)
+ val appStart = Statistics.startTimer(failedApplyNanos)
+ val opeqStart = Statistics.startTimer(failedOpEqNanos)
def onError(reportError: => Tree): Tree = {
fun match {
@@ -4184,14 +4222,14 @@ trait Typers extends Modes with Adaptations with Tags {
if !isPatternMode && nme.isOpAssignmentName(newTermName(name.decode)) =>
val qual1 = typedQualifier(qual)
if (treeInfo.isVariableOrGetter(qual1)) {
- stopTimer(failedOpEqNanos, opeqStart)
+ Statistics.stopTimer(failedOpEqNanos, opeqStart)
convertToAssignment(fun, qual1, name, args)
} else {
- stopTimer(failedApplyNanos, appStart)
+ Statistics.stopTimer(failedApplyNanos, appStart)
reportError
}
case _ =>
- stopTimer(failedApplyNanos, appStart)
+ Statistics.stopTimer(failedApplyNanos, appStart)
reportError
}
}
@@ -4200,7 +4238,7 @@ trait Typers extends Modes with Adaptations with Tags {
if ((mode & EXPRmode) != 0) tree else context.tree) match {
case SilentResultValue(fun1) =>
val fun2 = if (stableApplication) stabilizeFun(fun1, mode, pt) else fun1
- incCounter(typedApplyCount)
+ Statistics.incCounter(typedApplyCount)
def isImplicitMethod(tpe: Type) = tpe match {
case mt: MethodType => mt.isImplicit
case _ => false
@@ -4976,7 +5014,7 @@ trait Typers extends Modes with Adaptations with Tags {
typedSelect(qual1, nme.CONSTRUCTOR)
case Select(qual, name) =>
- incCounter(typedSelectCount)
+ Statistics.incCounter(typedSelectCount)
var qual1 = checkDead(typedQualifier(qual, mode))
if (name.isTypeName) qual1 = checkStable(qual1)
@@ -5008,7 +5046,7 @@ trait Typers extends Modes with Adaptations with Tags {
else tree1
case Ident(name) =>
- incCounter(typedIdentCount)
+ Statistics.incCounter(typedIdentCount)
if ((name == nme.WILDCARD && (mode & (PATTERNmode | FUNmode)) == PATTERNmode) ||
(name == tpnme.WILDCARD && (mode & TYPEmode) != 0))
tree setType makeFullyDefined(pt)
@@ -5083,15 +5121,9 @@ trait Typers extends Modes with Adaptations with Tags {
indentTyping()
var alreadyTyped = false
+ val startByType = Statistics.pushTimerClass(byTypeNanos, tree.getClass)
+ Statistics.incCounter(visitsByType, tree.getClass)
try {
- if (Statistics.enabled) {
- val t = currentTime()
- if (pendingTreeTypes.nonEmpty) {
- microsByType(pendingTreeTypes.head) += ((t - typerTime) / 1000).toInt
- }
- typerTime = t
- pendingTreeTypes = tree.getClass :: pendingTreeTypes
- }
if (context.retyping &&
(tree.tpe ne null) && (tree.tpe.isErroneous || !(tree.tpe <:< pt))) {
tree.tpe = null
@@ -5145,14 +5177,7 @@ trait Typers extends Modes with Adaptations with Tags {
}
finally {
deindentTyping()
-
- if (Statistics.enabled) {
- val t = currentTime()
- microsByType(pendingTreeTypes.head) += ((t - typerTime) / 1000).toInt
- visitsByType(pendingTreeTypes.head) += 1
- typerTime = t
- pendingTreeTypes = pendingTreeTypes.tail
- }
+ Statistics.popTimerClass(byTypeNanos, startByType)
}
}
@@ -5328,3 +5353,22 @@ trait Typers extends Modes with Adaptations with Tags {
}
}
+object TypersStats {
+ import reflect.internal.TypesStats._
+ import reflect.internal.BaseTypeSeqsStats._
+ val typedIdentCount = Statistics.newCounter("#typechecked identifiers")
+ val typedSelectCount = Statistics.newCounter("#typechecked selections")
+ val typedApplyCount = Statistics.newCounter("#typechecked applications")
+ val rawTypeFailed = Statistics.newSubCounter (" of which in failed", rawTypeCount)
+ val subtypeFailed = Statistics.newSubCounter(" of which in failed", subtypeCount)
+ val findMemberFailed = Statistics.newSubCounter(" of which in failed", findMemberCount)
+ val compoundBaseTypeSeqCount = Statistics.newSubCounter(" of which for compound types", baseTypeSeqCount)
+ val typerefBaseTypeSeqCount = Statistics.newSubCounter(" of which for typerefs", baseTypeSeqCount)
+ val singletonBaseTypeSeqCount = Statistics.newSubCounter(" of which for singletons", baseTypeSeqCount)
+ val failedSilentNanos = Statistics.newSubTimer ("time spent in failed", typerNanos)
+ val failedApplyNanos = Statistics.newSubTimer (" failed apply", typerNanos)
+ val failedOpEqNanos = Statistics.newSubTimer (" failed op=", typerNanos)
+ val isReferencedNanos = Statistics.newSubTimer ("time spent ref scanning", typerNanos)
+ val visitsByType = Statistics.newByClass ("#visits by tree node", "typer")(Statistics.newCounter(""))
+ val byTypeNanos = Statistics.newByClassTimerStack("time spent by tree node", typerNanos)
+}
diff --git a/src/compiler/scala/tools/nsc/util/Statistics.scala b/src/compiler/scala/tools/nsc/util/Statistics.scala
deleted file mode 100644
index 087111a7ba..0000000000
--- a/src/compiler/scala/tools/nsc/util/Statistics.scala
+++ /dev/null
@@ -1,200 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package util
-
-class Statistics extends scala.reflect.internal.util.Statistics {
-
- var nodeByType = new ClassCounts
-
- var microsByType = new ClassCounts
- var visitsByType = new ClassCounts
- var pendingTreeTypes: List[Class[_]] = List()
- var typerTime: Long = 0L
-
- val typedApplyCount = new Counter
- val typedIdentCount = new Counter
- val typedSelectCount = new Counter
- val typerNanos = new Timer
- val classReadNanos = new Timer
-
- val failedApplyNanos = new Timer
- val failedOpEqNanos = new Timer
- val failedSilentNanos = new Timer
-
- val implicitSearchCount = new Counter
- val implicitNanos = new Timer
- val oftypeImplicitHits = new Counter
- val inscopeImplicitHits = new Counter
-
- val triedImplicits = new Counter
- val plausiblyCompatibleImplicits = new Counter
- val matchingImplicits = new Counter
- val typedImplicits = new Counter
- val foundImplicits = new Counter
-
- val inscopeSucceedNanos = new Timer
- val inscopeFailNanos = new Timer
- val oftypeSucceedNanos = new Timer
- val oftypeFailNanos = new Timer
- val implicitCacheHits = new Counter
- val implicitCacheMisses = new Counter
- val improvesCount = new Counter
- val improvesCachedCount = new Counter
- val subtypeAppInfos = new SubCounter(subtypeCount)
- val subtypeImprovCount = new SubCounter(subtypeCount)
- val subtypeETNanos = new Timer
- val matchesPtNanos = new Timer
- val isReferencedNanos = new Timer
- val ctr1 = new Counter
- val ctr2 = new Counter
- val ctr3 = new Counter
- val ctr4 = new Counter
- val counter1: SubCounter = new SubCounter(subtypeCount)
- val counter2: SubCounter = new SubCounter(subtypeCount)
- val timer1: Timer = new Timer
- val timer2: Timer = new Timer
-
- val macroExpandCount = new Counter
- val macroExpandNanos = new Timer
-
- val patmatNanos = new Timer
- val patmatAnaDPLL = new Timer
- val patmatAnaVarEq = new Timer
- val patmatCNF = new Timer
- val patmatAnaExhaust = new Timer
- val patmatAnaReach = new Timer
- val patmatCNFSizes = new collection.mutable.HashMap[Int, Int] withDefaultValue 0
-}
-
-object Statistics extends Statistics
-
-abstract class StatisticsInfo {
-
- import Statistics._
-
- val global: Global
- import global._
-
- var phasesShown = List("parser", "typer", "patmat", "erasure", "cleanup")
-
- def countNodes(tree: Tree, counts: ClassCounts) {
- for (t <- tree) counts(t.getClass) += 1
- }
-
- def showRelative(base: Long)(value: Long) =
- value+showPercent(value, base)
-
- def showRelTyper(timer: Timer) =
- timer+showPercent(timer.nanos, typerNanos.nanos)
-
- def showRelPatmat(timer: Timer) =
- timer+showPercent(timer.nanos, patmatNanos.nanos)
-
- def showCounts[T](counts: scala.collection.mutable.Map[T, Int]) =
- counts.toSeq.sortWith(_._2 > _._2).map {
- case (cls: Class[_], cnt) =>
- cls.toString.substring(cls.toString.lastIndexOf("$") + 1)+": "+cnt
- case (o, cnt) =>
- o.toString +": "+cnt
- }
-
- def print(phase: Phase) = if (phasesShown contains phase.name) {
- inform("*** Cumulative statistics at phase " + phase)
- inform("#created tree nodes : " + nodeCount)
- inform("#created tree nodes by type: "+showCounts(nodeByType))
- if (phase.name != "parser") {
- val counts = new ClassCounts
- for (u <- currentRun.units; t <- u.body) counts(t.getClass) += 1
- inform("#retained nodes : " + counts.values.sum)
- inform("#retained nodes by type : " + showCounts(counts))
- inform("#typechecked identifiers : " + typedIdentCount)
- inform("#typechecked selections : " + typedSelectCount)
- inform("#typechecked applications: " + typedApplyCount)
- inform("#raw type creations : " + rawTypeCount)
- inform(" of which in failed : " + rawTypeFailed)
- inform(" of which in implicits : " + rawTypeImpl)
- inform("#unique types : " + uniqueTypeCount)
- inform("#symbols : " + symbolCount)
- inform(" of which type symbols : " + typeSymbolCount)
- inform(" of which class symbols : " + classSymbolCount)
- inform("#base type seqs : " + baseTypeSeqCount)
- inform("avg base type seq length : " + baseTypeSeqLenTotal.value.toFloat / baseTypeSeqCount.value)
- inform("#singleton base type seqs: " + singletonBaseTypeSeqCount)
- inform("#compound base type seqs : " + compoundBaseTypeSeqCount)
- inform("#typeref base type seqs : " + typerefBaseTypeSeqCount)
- inform("#findMember ops : " + findMemberCount)
- inform(" of which in failed : " + findMemberFailed)
- inform(" of which in implicits : " + findMemberImpl)
- inform("#notfound member : " + noMemberCount)
- inform("#multiple member : " + multMemberCount)
- inform("#asSeenFrom ops : " + asSeenFromCount)
- inform("#subtype : " + subtypeCount)
- inform(" of which in failed : " + subtypeFailed)
- inform(" of which in implicits : " + subtypeImpl)
- inform(" of which in app impl : " + subtypeAppInfos)
- inform(" of which in improv : " + subtypeImprovCount)
- inform("#sametype : " + sametypeCount)
- inform("#toplevel lub : " + lubCount)
- inform("#all lub : " + nestedLubCount)
- inform("ms type-flow-analysis: " + analysis.timer.millis)
-
- if (phase.name == "typer") {
- inform("time spent typechecking : " + showRelTyper(typerNanos))
- inform("time classfilereading : " + showRelTyper(classReadNanos))
- inform("time spent in implicits : " + showRelTyper(implicitNanos))
- inform(" successful in scope : " + showRelTyper(inscopeSucceedNanos))
- inform(" failed in scope : " + showRelTyper(inscopeFailNanos))
- inform(" successful of type : " + showRelTyper(oftypeSucceedNanos))
- inform(" failed of type : " + showRelTyper(oftypeFailNanos))
- inform(" assembling parts : " + showRelTyper(subtypeETNanos))
- inform(" matchesPT : " + showRelTyper(matchesPtNanos))
- inform("implicit cache hits : " + showRelative(implicitCacheHits.value + implicitCacheMisses.value)(implicitCacheHits.value))
- inform("time spent in failed : " + showRelTyper(failedSilentNanos))
- inform(" failed apply : " + showRelTyper(failedApplyNanos))
- inform(" failed op= : " + showRelTyper(failedOpEqNanos))
- inform("time spent ref scanning : " + showRelTyper(isReferencedNanos))
- inform("time spent in lubs : " + showRelTyper(lubNanos))
- inform("micros by tree node : " + showCounts(microsByType))
- inform("#visits by tree node : " + showCounts(visitsByType))
- val average = new ClassCounts
- for (c <- microsByType.keysIterator) average(c) = microsByType(c)/visitsByType(c)
- inform("avg micros by tree node : " + showCounts(average))
- inform("time spent in <:< : " + showRelTyper(subtypeNanos))
- inform("time spent in findmember : " + showRelTyper(findMemberNanos))
- inform("time spent in asSeenFrom : " + showRelTyper(asSeenFromNanos))
- inform("#implicit searches : " + implicitSearchCount)
- inform("#tried, plausible, matching, typed, found implicits: "+triedImplicits+", "+plausiblyCompatibleImplicits+", "+matchingImplicits+", "+typedImplicits+", "+foundImplicits)
- inform("#implicit improves tests : " + improvesCount)
- inform("#implicit improves cached : " + improvesCachedCount)
- inform("#implicit inscope hits : " + inscopeImplicitHits)
- inform("#implicit oftype hits : " + oftypeImplicitHits)
- inform("#macro expansions : " + macroExpandCount)
- inform("#time spent in macroExpand : " + showRelTyper(macroExpandNanos))
- }
-
- if (ctr1 != null) inform("#ctr1 : " + ctr1)
- if (ctr2 != null) inform("#ctr2 : " + ctr2)
- if (ctr3 != null) inform("#ctr3 : " + ctr3)
- if (ctr4 != null) inform("#ctr4 : " + ctr4)
- if (counter1 != null) inform("#counter1 : " + counter1)
- if (counter2 != null) inform("#counter2 : " + counter2)
- if (timer1 != null) inform("#timer1 : " + timer1)
- if (timer2 != null) inform("#timer2 : " + timer2)
- //for (t <- uniques.iterator) println("unique: "+t)
-
- if (phase.name == "patmat") {
- inform("time spent in patmat : " + patmatNanos )
- inform(" of which DPLL : " + showRelPatmat(patmatAnaDPLL ))
- inform("of which in CNF conversion : " + showRelPatmat(patmatCNF ))
- inform(" CNF size counts : " + showCounts(patmatCNFSizes ))
- inform("of which variable equality : " + showRelPatmat(patmatAnaVarEq ))
- inform(" of which in exhaustivity : " + showRelPatmat(patmatAnaExhaust))
- inform("of which in unreachability : " + showRelPatmat(patmatAnaReach ))
- }
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/util/StatisticsInfo.scala b/src/compiler/scala/tools/nsc/util/StatisticsInfo.scala
new file mode 100644
index 0000000000..f6a1ae1414
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/util/StatisticsInfo.scala
@@ -0,0 +1,38 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2011 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools.nsc
+package util
+
+import reflect.internal.util.Statistics
+
+abstract class StatisticsInfo {
+
+ val global: Global
+ import global._
+ import reflect.internal.TreesStats.nodeByType
+
+ val phasesShown = List("parser", "typer", "patmat", "erasure", "cleanup")
+
+ val retainedCount = Statistics.newCounter("#retained tree nodes")
+ val retainedByType = Statistics.newByClass("#retained tree nodes by type")(Statistics.newCounter(""))
+
+ def print(phase: Phase) = if (phasesShown contains phase.name) {
+ inform("*** Cumulative statistics at phase " + phase)
+ retainedCount.value = 0
+ for (c <- retainedByType.keys)
+ retainedByType(c).value = 0
+ for (u <- currentRun.units; t <- u.body) {
+ retainedCount.value += 1
+ retainedByType(t.getClass).value += 1
+ }
+
+ val quants =
+ if (phase.name == "parser") Seq(treeNodeCount, nodeByType, retainedCount, retainedByType)
+ else Statistics.allQuantities
+
+ for (q <- quants if q.showAt(phase.name)) inform(q.line)
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
index 7cf515425d..278f4e3ff7 100644
--- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
+++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
@@ -126,7 +126,10 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
wrapper(currentTyper.silent(_.typed(expr, analyzer.EXPRmode, pt)) match {
case analyzer.SilentResultValue(result) =>
trace("success: ")(showAttributed(result, true, true, settings.Yshowsymkinds.value))
- var Block(dummies, unwrapped) = result
+ var (dummies, unwrapped) = result match {
+ case Block(dummies, unwrapped) => (dummies, unwrapped)
+ case unwrapped => (Nil, unwrapped)
+ }
var invertedIndex = freeTerms map (_.swap)
// todo. also fixup singleton types
unwrapped = new Transformer {
diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
index 862b19d0a4..464ffc6fab 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
@@ -4,6 +4,7 @@ package scala.tools.selectivecps
import scala.tools.nsc.Global
import scala.tools.nsc.typechecker.Modes
+import scala.tools.nsc.MissingRequirementError
abstract class CPSAnnotationChecker extends CPSUtils with Modes {
val global: Global
@@ -170,6 +171,9 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
vprintln("yes we can!! (byval)")
return true
}
+ } else if ((mode & global.analyzer.RETmode) != 0) {
+ vprintln("yes we can!! (return)")
+ return true
}
}
false
@@ -183,6 +187,7 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
val patMode = (mode & global.analyzer.PATTERNmode) != 0
val exprMode = (mode & global.analyzer.EXPRmode) != 0
val byValMode = (mode & global.analyzer.BYVALmode) != 0
+ val retMode = (mode & global.analyzer.RETmode) != 0
val annotsTree = cpsParamAnnotation(tree.tpe)
val annotsExpected = cpsParamAnnotation(pt)
@@ -209,6 +214,12 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
val res = tree modifyType addMinusMarker
vprintln("adapted annotations (by val) of " + tree + " to " + res.tpe)
res
+ } else if (retMode && !hasPlusMarker(tree.tpe) && annotsTree.isEmpty && annotsExpected.nonEmpty) {
+ // add a marker annotation that will make tree.tpe behave as pt, subtyping wise
+ // tree will look like having no annotation
+ val res = tree modifyType (_ withAnnotations List(newPlusMarker()))
+ vprintln("adapted annotations (return) of " + tree + " to " + res.tpe)
+ res
} else tree
}
@@ -356,8 +367,9 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
* for a tree. All this should do is add annotations. */
override def addAnnotations(tree: Tree, tpe: Type): Type = {
+ import scala.util.control._
if (!cpsEnabled) {
- if (hasCpsParamTypes(tpe))
+ if (Exception.failAsValue(classOf[MissingRequirementError])(false)(hasCpsParamTypes(tpe)))
global.reporter.error(tree.pos, "this code must be compiled with the Scala continuations plugin enabled")
return tpe
}
@@ -464,6 +476,11 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
}
tpe
+ case ret @ Return(expr) =>
+ if (hasPlusMarker(expr.tpe))
+ ret setType expr.tpe
+ ret.tpe
+
case _ =>
tpe
}
diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
index 3a1dc87a6a..765cde5a81 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
@@ -3,6 +3,7 @@
package scala.tools.selectivecps
import scala.tools.nsc.Global
+import scala.collection.mutable.ListBuffer
trait CPSUtils {
val global: Global
@@ -135,4 +136,43 @@ trait CPSUtils {
case _ => None
}
}
+
+ def isTailReturn(retExpr: Tree, body: Tree): Boolean = {
+ val removed = ListBuffer[Tree]()
+ removeTailReturn(body, removed)
+ removed contains retExpr
+ }
+
+ def removeTailReturn(tree: Tree, removed: ListBuffer[Tree]): Tree = tree match {
+ case Block(stms, r @ Return(expr)) =>
+ removed += r
+ treeCopy.Block(tree, stms, expr)
+
+ case Block(stms, expr) =>
+ treeCopy.Block(tree, stms, removeTailReturn(expr, removed))
+
+ case If(cond, r1 @ Return(thenExpr), r2 @ Return(elseExpr)) =>
+ removed ++= Seq(r1, r2)
+ treeCopy.If(tree, cond, removeTailReturn(thenExpr, removed), removeTailReturn(elseExpr, removed))
+
+ case If(cond, thenExpr, elseExpr) =>
+ treeCopy.If(tree, cond, removeTailReturn(thenExpr, removed), removeTailReturn(elseExpr, removed))
+
+ case Try(block, catches, finalizer) =>
+ treeCopy.Try(tree,
+ removeTailReturn(block, removed),
+ (catches map (t => removeTailReturn(t, removed))).asInstanceOf[List[CaseDef]],
+ removeTailReturn(finalizer, removed))
+
+ case CaseDef(pat, guard, r @ Return(expr)) =>
+ removed += r
+ treeCopy.CaseDef(tree, pat, guard, expr)
+
+ case CaseDef(pat, guard, body) =>
+ treeCopy.CaseDef(tree, pat, guard, removeTailReturn(body, removed))
+
+ case _ =>
+ tree
+ }
+
}
diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala
index 017c8d24fd..fe465aad0d 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala
@@ -9,6 +9,8 @@ import scala.tools.nsc.plugins._
import scala.tools.nsc.ast._
+import scala.collection.mutable.ListBuffer
+
/**
* In methods marked @cps, explicitly name results of calls to other @cps methods
*/
@@ -46,10 +48,20 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
// this would cause infinite recursion. But we could remove the
// ValDef case here.
- case dd @ DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
+ case dd @ DefDef(mods, name, tparams, vparamss, tpt, rhs0) =>
debuglog("transforming " + dd.symbol)
atOwner(dd.symbol) {
+ val tailReturns = ListBuffer[Tree]()
+ val rhs = removeTailReturn(rhs0, tailReturns)
+ // throw an error if there is a Return tree which is not in tail position
+ rhs0 foreach {
+ case r @ Return(_) =>
+ if (!tailReturns.contains(r))
+ unit.error(r.pos, "return expressions in CPS code must be in tail position")
+ case _ => /* do nothing */
+ }
+
val rhs1 = transExpr(rhs, None, getExternalAnswerTypeAnn(tpt.tpe))
debuglog("result "+rhs1)
@@ -153,7 +165,6 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
}
}
-
def transExpr(tree: Tree, cpsA: CPSInfo, cpsR: CPSInfo): Tree = {
transTailValue(tree, cpsA, cpsR) match {
case (Nil, b) => b
diff --git a/src/library/scala/Predef.scala b/src/library/scala/Predef.scala
index 65e8549ae1..99bd7f0736 100644
--- a/src/library/scala/Predef.scala
+++ b/src/library/scala/Predef.scala
@@ -121,16 +121,16 @@ object Predef extends LowPriorityImplicits {
// Deprecated
- @deprecated("Use sys.error(message) instead", "2.9.0")
+ @deprecated("Use `sys.error(message)` instead", "2.9.0")
def error(message: String): Nothing = sys.error(message)
- @deprecated("Use sys.exit() instead", "2.9.0")
+ @deprecated("Use `sys.exit()` instead", "2.9.0")
def exit(): Nothing = sys.exit()
- @deprecated("Use sys.exit(status) instead", "2.9.0")
+ @deprecated("Use `sys.exit(status)` instead", "2.9.0")
def exit(status: Int): Nothing = sys.exit(status)
- @deprecated("Use formatString.format(args: _*) or arg.formatted(formatString) instead", "2.9.0")
+ @deprecated("Use `formatString.format(args: _*)` or `arg.formatted(formatString)` instead", "2.9.0")
def format(text: String, xs: Any*) = augmentString(text).format(xs: _*)
// errors and asserts -------------------------------------------------
@@ -219,7 +219,7 @@ object Predef extends LowPriorityImplicits {
final class Ensuring[A](val __resultOfEnsuring: A) extends AnyVal {
// `__resultOfEnsuring` must be a public val to allow inlining.
// See comments in ArrowAssoc for more.
- @deprecated("Use __resultOfEnsuring instead", "2.10.0")
+ @deprecated("Use `__resultOfEnsuring` instead", "2.10.0")
def x = __resultOfEnsuring
def ensuring(cond: Boolean): A = { assert(cond); __resultOfEnsuring }
@@ -255,7 +255,7 @@ object Predef extends LowPriorityImplicits {
// being confused why they get an ambiguous implicit conversion
// error. (`foo.x` used to produce this error since both
// any2Ensuring and any2ArrowAssoc pimped an `x` onto everything)
- @deprecated("Use __leftOfArrow instead", "2.10.0")
+ @deprecated("Use `__leftOfArrow` instead", "2.10.0")
def x = __leftOfArrow
@inline def -> [B](y: B): Tuple2[A, B] = Tuple2(__leftOfArrow, y)
@@ -320,30 +320,30 @@ object Predef extends LowPriorityImplicits {
// Primitive Widenings --------------------------------------------------------------
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def byte2short(x: Byte): Short = x.toShort
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def byte2int(x: Byte): Int = x.toInt
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def byte2long(x: Byte): Long = x.toLong
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def byte2float(x: Byte): Float = x.toFloat
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def byte2double(x: Byte): Double = x.toDouble
+ @deprecated("Use `.toShort` for explicit conversion and `Byte.byte2short` for implicit conversion", "2.10.0") def byte2short(x: Byte): Short = x.toShort
+ @deprecated("Use `.toInt` for explicit conversion and `Byte.byte2int` for implicit conversion", "2.10.0") def byte2int(x: Byte): Int = x.toInt
+ @deprecated("Use `.toLong` for explicit conversion and `Byte.byte2long for implicit conversion", "2.10.0") def byte2long(x: Byte): Long = x.toLong
+ @deprecated("Use `.toFloat` for explicit conversion and `Byte.byte2float` for implicit conversion", "2.10.0") def byte2float(x: Byte): Float = x.toFloat
+ @deprecated("Use `.toDouble` for explicit conversion and `Byte.byte2double` for implicit conversion", "2.10.0") def byte2double(x: Byte): Double = x.toDouble
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def short2int(x: Short): Int = x.toInt
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def short2long(x: Short): Long = x.toLong
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def short2float(x: Short): Float = x.toFloat
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def short2double(x: Short): Double = x.toDouble
+ @deprecated("Use `.toInt` for explicit conversion and `Short.short2int` for implicit conversion", "2.10.0") def short2int(x: Short): Int = x.toInt
+ @deprecated("Use `.toLong` for explicit conversion and `Short.short2long` for implicit conversion", "2.10.0") def short2long(x: Short): Long = x.toLong
+ @deprecated("Use `.toFloat` for explicit conversion and `Short.short2float` for implicit conversion", "2.10.0") def short2float(x: Short): Float = x.toFloat
+ @deprecated("Use `.toDouble` for explicit conversion and `Short.short2double` for implicit conversion", "2.10.0") def short2double(x: Short): Double = x.toDouble
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def char2int(x: Char): Int = x.toInt
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def char2long(x: Char): Long = x.toLong
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def char2float(x: Char): Float = x.toFloat
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def char2double(x: Char): Double = x.toDouble
+ @deprecated("Use `.toInt` for explicit conversion and `Char.char2int` for implicit conversion", "2.10.0") def char2int(x: Char): Int = x.toInt
+ @deprecated("Use `.toLong` for explicit conversion and `Char.char2long` for implicit conversion", "2.10.0") def char2long(x: Char): Long = x.toLong
+ @deprecated("Use `.toFloat` for explicit conversion and `Char.char2float` for implicit conversion", "2.10.0") def char2float(x: Char): Float = x.toFloat
+ @deprecated("Use `.toDouble` for explicit conversion and `Char.char2double` for implicit conversion", "2.10.0") def char2double(x: Char): Double = x.toDouble
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def int2long(x: Int): Long = x.toLong
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def int2float(x: Int): Float = x.toFloat
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def int2double(x: Int): Double = x.toDouble
+ @deprecated("Use `.toLong` for explicit conversion and `Int.int2long` for implicit conversion", "2.10.0") def int2long(x: Int): Long = x.toLong
+ @deprecated("Use `.toFloat` for explicit conversion and `Int.int2float` for implicit conversion", "2.10.0") def int2float(x: Int): Float = x.toFloat
+ @deprecated("Use `.toDouble` for explicit conversion and `Int.int2double` for implicit conversion", "2.10.0") def int2double(x: Int): Double = x.toDouble
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def long2float(x: Long): Float = x.toFloat
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def long2double(x: Long): Double = x.toDouble
+ @deprecated("Use `.toFloat` for explicit conversion and `Long.long2float` for implicit conversion", "2.10.0") def long2float(x: Long): Float = x.toFloat
+ @deprecated("Use `.toDouble` for explicit conversion and `Long.long2double` for implicit conversion", "2.10.0") def long2double(x: Long): Double = x.toDouble
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def float2double(x: Float): Double = x.toDouble
+ @deprecated("Use `.toDouble` for explicit conversion and `Float.float2double` for implicit conversion", "2.10.0") def float2double(x: Float): Double = x.toDouble
// "Autoboxing" and "Autounboxing" ---------------------------------------------------
@@ -385,7 +385,7 @@ object Predef extends LowPriorityImplicits {
implicit def any2stringadd(x: Any) = new runtime.StringAdd(x)
implicit def unaugmentString(x: StringOps): String = x.repr
- @deprecated("Use StringCanBuildFrom", "2.10.0")
+ @deprecated("Use `StringCanBuildFrom`", "2.10.0")
def stringCanBuildFrom: CanBuildFrom[String, Char, String] = StringCanBuildFrom
implicit val StringCanBuildFrom: CanBuildFrom[String, Char, String] = new CanBuildFrom[String, Char, String] {
diff --git a/src/library/scala/collection/GenTraversableOnce.scala b/src/library/scala/collection/GenTraversableOnce.scala
index eadacd9209..e475865391 100644
--- a/src/library/scala/collection/GenTraversableOnce.scala
+++ b/src/library/scala/collection/GenTraversableOnce.scala
@@ -9,6 +9,8 @@
package scala.collection
import scala.reflect.ClassTag
+import scala.collection.generic.CanBuildFrom
+import scala.annotation.unchecked.{ uncheckedVariance => uV }
/** A template trait for all traversable-once objects which may be
* traversed in parallel.
@@ -552,4 +554,21 @@ trait GenTraversableOnce[+A] extends Any {
* containing all key/value pairs of type `(T, U)` of this $coll.
*/
def toMap[K, V](implicit ev: A <:< (K, V)): GenMap[K, V]
+
+ /** Converts this $coll to a Vector.
+ * $willNotTerminateInf
+ * @return a vector containing all elements of this $coll.
+ */
+ def toVector: Vector[A]
+
+ /** Converts this $coll into another by copying all elements.
+ * @tparam Col The collection type to build.
+ * @return a new collection containing all elements of this $coll.
+ *
+ * @usecase def convertTo[Col[_]]: Col[A]
+ * @inheritdoc
+ * $willNotTerminateInf
+ * @return a new collection containing all elements of this $coll.
+ */
+ def convertTo[Col[_]](implicit cbf: CanBuildFrom[Nothing, A, Col[A @uV]]): Col[A @uV]
}
diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala
index b2bbc8d888..5f369de3b7 100644
--- a/src/library/scala/collection/Iterator.scala
+++ b/src/library/scala/collection/Iterator.scala
@@ -11,6 +11,8 @@ package scala.collection
import mutable.ArrayBuffer
import annotation.migration
import immutable.Stream
+import scala.collection.generic.CanBuildFrom
+import scala.annotation.unchecked.{ uncheckedVariance => uV }
/** The `Iterator` object provides various functions for creating specialized iterators.
*
@@ -1138,6 +1140,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
def toStream: Stream[A] =
if (self.hasNext) Stream.cons(self.next, self.toStream)
else Stream.empty[A]
+
/** Converts this iterator to a string.
*
diff --git a/src/library/scala/collection/MapLike.scala b/src/library/scala/collection/MapLike.scala
index b9b8f62574..75f9ff93db 100644
--- a/src/library/scala/collection/MapLike.scala
+++ b/src/library/scala/collection/MapLike.scala
@@ -227,30 +227,34 @@ self =>
def default(key: A): B =
throw new NoSuchElementException("key not found: " + key)
- /** Filters this map by retaining only keys satisfying a predicate.
- * @param p the predicate used to test keys
- * @return an immutable map consisting only of those key value pairs of this map where the key satisfies
- * the predicate `p`. The resulting map wraps the original map without copying any elements.
- */
- def filterKeys(p: A => Boolean): Map[A, B] = new AbstractMap[A, B] with DefaultMap[A, B] {
+ protected class FilteredKeys(p: A => Boolean) extends AbstractMap[A, B] with DefaultMap[A, B] {
override def foreach[C](f: ((A, B)) => C): Unit = for (kv <- self) if (p(kv._1)) f(kv)
def iterator = self.iterator.filter(kv => p(kv._1))
override def contains(key: A) = self.contains(key) && p(key)
def get(key: A) = if (!p(key)) None else self.get(key)
}
-
- /** Transforms this map by applying a function to every retrieved value.
- * @param f the function used to transform values of this map.
- * @return a map view which maps every key of this map
- * to `f(this(key))`. The resulting map wraps the original map without copying any elements.
+
+ /** Filters this map by retaining only keys satisfying a predicate.
+ * @param p the predicate used to test keys
+ * @return an immutable map consisting only of those key value pairs of this map where the key satisfies
+ * the predicate `p`. The resulting map wraps the original map without copying any elements.
*/
- def mapValues[C](f: B => C): Map[A, C] = new AbstractMap[A, C] with DefaultMap[A, C] {
+ def filterKeys(p: A => Boolean): Map[A, B] = new FilteredKeys(p)
+
+ protected class MappedValues[C](f: B => C) extends AbstractMap[A, C] with DefaultMap[A, C] {
override def foreach[D](g: ((A, C)) => D): Unit = for ((k, v) <- self) g((k, f(v)))
def iterator = for ((k, v) <- self.iterator) yield (k, f(v))
override def size = self.size
override def contains(key: A) = self.contains(key)
def get(key: A) = self.get(key).map(f)
}
+
+ /** Transforms this map by applying a function to every retrieved value.
+ * @param f the function used to transform values of this map.
+ * @return a map view which maps every key of this map
+ * to `f(this(key))`. The resulting map wraps the original map without copying any elements.
+ */
+ def mapValues[C](f: B => C): Map[A, C] = new MappedValues(f)
// The following 5 operations (updated, two times +, two times ++) should really be
// generic, returning This[B]. We need better covariance support to express that though.
diff --git a/src/library/scala/collection/TraversableLike.scala b/src/library/scala/collection/TraversableLike.scala
index 3716a318d9..e5861f5760 100644
--- a/src/library/scala/collection/TraversableLike.scala
+++ b/src/library/scala/collection/TraversableLike.scala
@@ -616,6 +616,13 @@ trait TraversableLike[+A, +Repr] extends Any
def toTraversable: Traversable[A] = thisCollection
def toIterator: Iterator[A] = toStream.iterator
def toStream: Stream[A] = toBuffer.toStream
+ // Override to provide size hint.
+ override def convertTo[Col[_]](implicit cbf: CanBuildFrom[Nothing, A, Col[A @uV]]): Col[A @uV] = {
+ val b = cbf()
+ b.sizeHint(this)
+ b ++= thisCollection
+ b.result
+ }
/** Converts this $coll to a string.
*
diff --git a/src/library/scala/collection/TraversableOnce.scala b/src/library/scala/collection/TraversableOnce.scala
index 386ce2d95a..8dc6184d88 100644
--- a/src/library/scala/collection/TraversableOnce.scala
+++ b/src/library/scala/collection/TraversableOnce.scala
@@ -9,6 +9,7 @@
package scala.collection
import mutable.{ Buffer, Builder, ListBuffer, ArrayBuffer }
+import generic.CanBuildFrom
import annotation.unchecked.{ uncheckedVariance => uV }
import language.{implicitConversions, higherKinds}
import reflect.ClassTag
@@ -239,17 +240,25 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
def toTraversable: Traversable[A]
- def toList: List[A] = (new ListBuffer[A] ++= seq).toList
+ def toList: List[A] = convertTo[List]
def toIterable: Iterable[A] = toStream
def toSeq: Seq[A] = toStream
- def toIndexedSeq: immutable.IndexedSeq[A] = immutable.IndexedSeq() ++ seq
+ def toIndexedSeq: immutable.IndexedSeq[A] = convertTo[immutable.IndexedSeq]
- def toBuffer[B >: A]: mutable.Buffer[B] = new ArrayBuffer[B] ++= seq
+ def toBuffer[B >: A]: mutable.Buffer[B] = convertTo[ArrayBuffer].asInstanceOf[mutable.Buffer[B]]
- def toSet[B >: A]: immutable.Set[B] = immutable.Set() ++ seq
+ def toSet[B >: A]: immutable.Set[B] = convertTo[immutable.Set].asInstanceOf[immutable.Set[B]]
+
+ def toVector: Vector[A] = convertTo[Vector]
+
+ def convertTo[Col[_]](implicit cbf: CanBuildFrom[Nothing, A, Col[A @uV]]): Col[A @uV] = {
+ val b = cbf()
+ b ++= seq
+ b.result
+ }
def toMap[T, U](implicit ev: A <:< (T, U)): immutable.Map[T, U] = {
val b = immutable.Map.newBuilder[T, U]
diff --git a/src/library/scala/collection/concurrent/TrieMap.scala b/src/library/scala/collection/concurrent/TrieMap.scala
index 08e9125bd8..2d8217551a 100644
--- a/src/library/scala/collection/concurrent/TrieMap.scala
+++ b/src/library/scala/collection/concurrent/TrieMap.scala
@@ -473,7 +473,11 @@ extends CNodeBase[K, V] {
private def computeSize(ct: TrieMap[K, V]): Int = {
var i = 0
var sz = 0
- val offset = math.abs(util.Random.nextInt()) % array.length
+ val offset =
+ if (array.length > 0)
+ //util.Random.nextInt(array.length) /* <-- benchmarks show that this causes observable contention */
+ scala.concurrent.forkjoin.ThreadLocalRandom.current.nextInt(0, array.length)
+ else 0
while (i < array.length) {
val pos = (i + offset) % array.length
array(pos) match {
diff --git a/src/library/scala/collection/immutable/Vector.scala b/src/library/scala/collection/immutable/Vector.scala
index 1395a8f52d..d100bf93df 100644
--- a/src/library/scala/collection/immutable/Vector.scala
+++ b/src/library/scala/collection/immutable/Vector.scala
@@ -77,6 +77,8 @@ override def companion: GenericCompanion[Vector] = Vector
override def par = new ParVector(this)
+ override def toVector: Vector[A] = this
+
override def lengthCompare(len: Int): Int = length - len
private[collection] final def initIterator[B >: A](s: VectorIterator[B]) {
diff --git a/src/library/scala/collection/mutable/ArrayOps.scala b/src/library/scala/collection/mutable/ArrayOps.scala
index 01636eb54e..7a595f211d 100644
--- a/src/library/scala/collection/mutable/ArrayOps.scala
+++ b/src/library/scala/collection/mutable/ArrayOps.scala
@@ -64,7 +64,7 @@ abstract class ArrayOps[T] extends ArrayLike[T, Array[T]] with CustomParalleliza
* @param asTrav A function that converts elements of this array to rows - arrays of type `U`.
* @return An array obtained by concatenating rows of this array.
*/
- def flatten[U, To](implicit asTrav: T => collection.Traversable[U], m: ClassTag[U]): Array[U] = {
+ def flatten[U](implicit asTrav: T => collection.Traversable[U], m: ClassTag[U]): Array[U] = {
val b = Array.newBuilder[U]
b.sizeHint(map{case is: collection.IndexedSeq[_] => is.size case _ => 0}.sum)
for (xs <- this)
diff --git a/src/library/scala/collection/mutable/LinkedHashMap.scala b/src/library/scala/collection/mutable/LinkedHashMap.scala
index 4150cf9eba..5643e070f8 100644
--- a/src/library/scala/collection/mutable/LinkedHashMap.scala
+++ b/src/library/scala/collection/mutable/LinkedHashMap.scala
@@ -49,7 +49,8 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B]
with Map[A, B]
with MapLike[A, B, LinkedHashMap[A, B]]
with HashTable[A, LinkedEntry[A, B]]
- with Serializable {
+ with Serializable
+{
override def empty = LinkedHashMap.empty[A, B]
override def size = tableSize
@@ -107,7 +108,25 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B]
if (hasNext) { val res = (cur.key, cur.value); cur = cur.later; res }
else Iterator.empty.next
}
+
+ protected class FilteredKeys(p: A => Boolean) extends super.FilteredKeys(p) {
+ override def empty = LinkedHashMap.empty
+ }
+
+ override def filterKeys(p: A => Boolean): scala.collection.Map[A, B] = new FilteredKeys(p)
+ protected class MappedValues[C](f: B => C) extends super.MappedValues[C](f) {
+ override def empty = LinkedHashMap.empty
+ }
+
+ override def mapValues[C](f: B => C): scala.collection.Map[A, C] = new MappedValues(f)
+
+ protected class DefaultKeySet extends super.DefaultKeySet {
+ override def empty = LinkedHashSet.empty
+ }
+
+ override def keySet: scala.collection.Set[A] = new DefaultKeySet
+
override def keysIterator: Iterator[A] = new AbstractIterator[A] {
private var cur = firstEntry
def hasNext = cur ne null
diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala
index a447f1b5e4..a7ec833193 100644
--- a/src/library/scala/collection/parallel/ParIterableLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableLike.scala
@@ -851,6 +851,12 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def toMap[K, V](implicit ev: T <:< (K, V)): immutable.ParMap[K, V] = toParMap[K, V, immutable.ParMap[K, V]](() => immutable.ParMap.newCombiner[K, V])
+ // TODO(@alex22): make these better
+ override def toVector: Vector[T] = seq.toVector
+
+ override def convertTo[Col[_]](implicit cbf: CanBuildFrom[Nothing, T, Col[T @uncheckedVariance]]): Col[T @uncheckedVariance] = seq.convertTo[Col]
+
+
/* tasks */
protected trait StrictSplitterCheckTask[R, Tp] extends Task[R, Tp] {
diff --git a/src/library/scala/collection/parallel/immutable/ParVector.scala b/src/library/scala/collection/parallel/immutable/ParVector.scala
index 1ece663a1d..e4099f1809 100644
--- a/src/library/scala/collection/parallel/immutable/ParVector.scala
+++ b/src/library/scala/collection/parallel/immutable/ParVector.scala
@@ -62,6 +62,8 @@ extends ParSeq[T]
override def seq: Vector[T] = vector
+ override def toVector: Vector[T] = vector
+
class ParVectorIterator(_start: Int, _end: Int) extends VectorIterator[T](_start, _end) with SeqSplitter[T] {
def remaining: Int = remainingElementCount
def dup: SeqSplitter[T] = (new ParVector(remainingVector)).splitter
diff --git a/src/library/scala/concurrent/impl/Future.scala b/src/library/scala/concurrent/impl/Future.scala
index 47534e398b..6a3487adde 100644
--- a/src/library/scala/concurrent/impl/Future.scala
+++ b/src/library/scala/concurrent/impl/Future.scala
@@ -44,13 +44,13 @@ private[concurrent] object Future {
}
def boxedType(c: Class[_]): Class[_] = if (c.isPrimitive) toBoxed(c) else c
-
- def apply[T](body: =>T)(implicit executor: ExecutionContext): Future[T] = {
+
+ private[impl] class PromiseCompletingTask[T](override val executor: ExecutionContext, body: => T)
+ extends Task {
val promise = new Promise.DefaultPromise[T]()
- //TODO: use `dispatchFuture`?
- executor.execute(new Runnable {
- def run = promise complete {
+ protected override def task() = {
+ promise complete {
try Right(body) catch {
case NonFatal(e) =>
// Commenting out reporting for now, since it produces too much output in the tests
@@ -58,9 +58,14 @@ private[concurrent] object Future {
Left(e)
}
}
- })
-
- promise.future
+ }
+ }
+
+ def apply[T](body: =>T)(implicit executor: ExecutionContext): Future[T] = {
+ val task = new PromiseCompletingTask(executor, body)
+ task.dispatch()
+
+ task.promise.future
}
private[impl] val throwableId: Throwable => Throwable = identity _
@@ -70,38 +75,77 @@ private[concurrent] object Future {
// so that it can be stolen from
// OR: a push to the local task queue should be so cheap that this is
// not even needed, but stealing is still possible
- private val _taskStack = new ThreadLocal[Stack[() => Unit]]()
+
+ private[impl] case class TaskStack(stack: Stack[Task], executor: ExecutionContext)
+
+ private val _taskStack = new ThreadLocal[TaskStack]()
+
+ private[impl] trait Task extends Runnable {
+ def executor: ExecutionContext
+
+ // run the original callback (no dispatch)
+ protected def task(): Unit
+
+ // we implement Runnable to avoid creating
+ // an extra object. run() runs ourselves with
+ // a TaskStack pushed, and then runs any
+ // other tasks that show up in the stack.
+ final override def run() = {
+ try {
+ val taskStack = TaskStack(Stack[Task](this), executor)
+ _taskStack set taskStack
+ while (taskStack.stack.nonEmpty) {
+ val next = taskStack.stack.pop()
+ require(next.executor eq executor)
+ try next.task() catch { case NonFatal(e) => executor reportFailure e }
+ }
+ } finally {
+ _taskStack.remove()
+ }
+ }
+
+ // send the task to the running executor.execute() via
+ // _taskStack, or start a new executor.execute()
+ def dispatch(force: Boolean = false): Unit =
+ _taskStack.get match {
+ case stack if (stack ne null) && (executor eq stack.executor) && !force => stack.stack push this
+ case _ => executor.execute(this)
+ }
+ }
+
+ private[impl] class ReleaseTask(override val executor: ExecutionContext, val elems: List[Task])
+ extends Task {
+ protected override def task() = {
+ _taskStack.get.stack.elems = elems
+ }
+ }
private[impl] def releaseStack(executor: ExecutionContext): Unit =
_taskStack.get match {
- case stack if (stack ne null) && stack.nonEmpty =>
- val tasks = stack.elems
- stack.clear()
+ case stack if (stack ne null) && stack.stack.nonEmpty =>
+ val tasks = stack.stack.elems
+ stack.stack.clear()
_taskStack.remove()
- dispatchFuture(executor, () => _taskStack.get.elems = tasks, true)
+ val release = new ReleaseTask(executor, tasks)
+ release.dispatch(force=true)
case null =>
// do nothing - there is no local batching stack anymore
case _ =>
_taskStack.remove()
}
- private[impl] def dispatchFuture(executor: ExecutionContext, task: () => Unit, force: Boolean = false): Unit =
- _taskStack.get match {
- case stack if (stack ne null) && !force => stack push task // FIXME we can't mix tasks aimed for different ExecutionContexts see: https://github.com/akka/akka/blob/v2.0.1/akka-actor/src/main/scala/akka/dispatch/Future.scala#L373
- case _ => executor.execute(new Runnable {
- def run() {
- try {
- val taskStack = Stack[() => Unit](task)
- _taskStack set taskStack
- while (taskStack.nonEmpty) {
- val next = taskStack.pop()
- try next() catch { case NonFatal(e) => executor reportFailure e }
- }
- } finally {
- _taskStack.remove()
- }
- }
- })
+ private[impl] class OnCompleteTask[T](override val executor: ExecutionContext, val onComplete: (Either[Throwable, T]) => Any)
+ extends Task {
+ private var value: Either[Throwable, T] = null
+
+ protected override def task() = {
+ require(value ne null) // dispatch(value) must be called before dispatch()
+ onComplete(value)
}
-
+
+ def dispatch(value: Either[Throwable, T]): Unit = {
+ this.value = value
+ dispatch()
+ }
+ }
}
diff --git a/src/library/scala/concurrent/impl/Promise.scala b/src/library/scala/concurrent/impl/Promise.scala
index 1d573ef818..c5060a2368 100644
--- a/src/library/scala/concurrent/impl/Promise.scala
+++ b/src/library/scala/concurrent/impl/Promise.scala
@@ -94,10 +94,10 @@ object Promise {
val resolved = resolveEither(value)
(try {
@tailrec
- def tryComplete(v: Either[Throwable, T]): List[Either[Throwable, T] => Unit] = {
+ def tryComplete(v: Either[Throwable, T]): List[Future.OnCompleteTask[T]] = {
getState match {
case raw: List[_] =>
- val cur = raw.asInstanceOf[List[Either[Throwable, T] => Unit]]
+ val cur = raw.asInstanceOf[List[Future.OnCompleteTask[T]]]
if (updateState(cur, v)) cur else tryComplete(v)
case _ => null
}
@@ -108,32 +108,21 @@ object Promise {
}) match {
case null => false
case cs if cs.isEmpty => true
- // this assumes that f(resolved) will go via dispatchFuture
- // and notifyCompleted (see onComplete below)
- case cs => cs.foreach(f => f(resolved)); true
+ case cs => cs.foreach(c => c.dispatch(resolved)); true
}
}
def onComplete[U](func: Either[Throwable, T] => U)(implicit executor: ExecutionContext): Unit = {
- val bound: Either[Throwable, T] => Unit = (either: Either[Throwable, T]) =>
- Future.dispatchFuture(executor, () => notifyCompleted(func, either))
+ val bound = new Future.OnCompleteTask[T](executor, func)
@tailrec //Tries to add the callback, if already completed, it dispatches the callback to be executed
def dispatchOrAddCallback(): Unit =
getState match {
- case r: Either[_, _] => bound(r.asInstanceOf[Either[Throwable, T]])
+ case r: Either[_, _] => bound.dispatch(r.asInstanceOf[Either[Throwable, T]])
case listeners: List[_] => if (updateState(listeners, bound :: listeners)) () else dispatchOrAddCallback()
}
dispatchOrAddCallback()
}
-
- private final def notifyCompleted(func: Either[Throwable, T] => Any, result: Either[Throwable, T])(implicit executor: ExecutionContext) {
- try {
- func(result)
- } catch {
- case NonFatal(e) => executor reportFailure e
- }
- }
}
/** An already completed Future is given its result at creation.
@@ -149,8 +138,8 @@ object Promise {
def tryComplete(value: Either[Throwable, T]): Boolean = false
def onComplete[U](func: Either[Throwable, T] => U)(implicit executor: ExecutionContext): Unit = {
- val completedAs = value.get // Avoid closing over "this"
- Future.dispatchFuture(executor, () => func(completedAs))
+ val completedAs = value.get
+ (new Future.OnCompleteTask(executor, func)).dispatch(completedAs)
}
def ready(atMost: Duration)(implicit permit: CanAwait): this.type = this
diff --git a/src/library/scala/reflect/base/Base.scala b/src/library/scala/reflect/base/Base.scala
index 461eaa2e9e..490a9e8c03 100644
--- a/src/library/scala/reflect/base/Base.scala
+++ b/src/library/scala/reflect/base/Base.scala
@@ -451,7 +451,7 @@ class Base extends Universe { self =>
}
}
- def show(tree: Tree) = s"<tree ${tree.getClass}>"
+ def treeToString(tree: Tree) = s"<tree ${tree.getClass}>"
trait TermTree extends Tree
diff --git a/src/library/scala/reflect/base/Trees.scala b/src/library/scala/reflect/base/Trees.scala
index 298d229570..2814450ae3 100644
--- a/src/library/scala/reflect/base/Trees.scala
+++ b/src/library/scala/reflect/base/Trees.scala
@@ -28,11 +28,11 @@ trait Trees { self: Universe =>
def isType: Boolean
/** Obtains string representation of a tree */
- override def toString: String = show(this)
+ override def toString: String = treeToString(this)
}
/** Obtains string representation of a tree */
- def show(tree: Tree): String
+ protected def treeToString(tree: Tree): String
/** Tree is the basis for scala's abstract syntax. The nodes are
* implemented as case classes, and the parameters which initialize
diff --git a/src/library/scala/util/control/Breaks.scala b/src/library/scala/util/control/Breaks.scala
index bb84fcb5fe..d7f5a57f50 100644
--- a/src/library/scala/util/control/Breaks.scala
+++ b/src/library/scala/util/control/Breaks.scala
@@ -73,7 +73,7 @@ class Breaks {
*
* @note This might be different than the statically closest enclosing block!
*/
- def break() { throw breakException }
+ def break(): Nothing = { throw breakException }
}
/** An object that can be used for the break control abstraction.
diff --git a/src/reflect/scala/reflect/api/Printers.scala b/src/reflect/scala/reflect/api/Printers.scala
new file mode 100644
index 0000000000..7f4ff8a7fb
--- /dev/null
+++ b/src/reflect/scala/reflect/api/Printers.scala
@@ -0,0 +1,94 @@
+package scala.reflect
+package api
+
+import java.io.{ PrintWriter, StringWriter }
+
+trait Printers { self: Universe =>
+
+ trait TreePrinter {
+ def print(args: Any*)
+ protected var printTypes = false
+ protected var printIds = false
+ protected var printKinds = false
+ def withTypes: this.type = { printTypes = true; this }
+ def withoutTypes: this.type = { printTypes = false; this }
+ def withIds: this.type = { printIds = true; this }
+ def withoutIds: this.type = { printIds = false; this }
+ def withKinds: this.type = { printKinds = true; this }
+ def withoutKinds: this.type = { printKinds = false; this }
+ }
+
+ case class BooleanFlag(val value: Option[Boolean])
+ object BooleanFlag {
+ import language.implicitConversions
+ implicit def booleanToBooleanFlag(value: Boolean): BooleanFlag = BooleanFlag(Some(value))
+ implicit def optionToBooleanFlag(value: Option[Boolean]): BooleanFlag = BooleanFlag(value)
+ }
+
+ protected def render(what: Any, mkPrinter: PrintWriter => TreePrinter, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printKinds: BooleanFlag = None): String = {
+ val buffer = new StringWriter()
+ val writer = new PrintWriter(buffer)
+ var printer = mkPrinter(writer)
+ printTypes.value.map(printTypes => if (printTypes) printer.withTypes else printer.withoutTypes)
+ printIds.value.map(printIds => if (printIds) printer.withIds else printer.withoutIds)
+ printKinds.value.map(printKinds => if (printKinds) printer.withKinds else printer.withoutKinds)
+ printer.print(what)
+ writer.flush()
+ buffer.toString
+ }
+
+ /** By default trees are printed with `show` */
+ override protected def treeToString(tree: Tree) = show(tree)
+
+ /** Renders a prettified representation of a tree.
+ * Typically it looks very close to the Scala code it represents.
+ * This function is used in Tree.toString.
+ */
+ def show(tree: Tree, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printKinds: BooleanFlag = None): String =
+ render(tree, newTreePrinter(_), printTypes, printIds, printKinds)
+
+ /** Hook to define what `show(tree)` means.
+ */
+ def newTreePrinter(out: PrintWriter): TreePrinter
+
+ /** Renders internal structure of a tree.
+ */
+ def showRaw(tree: Tree, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printKinds: BooleanFlag = None): String =
+ render(tree, newRawTreePrinter(_), printTypes, printIds, printKinds)
+
+ /** Hook to define what `showRaw(tree)` means.
+ */
+ def newRawTreePrinter(out: PrintWriter): TreePrinter
+
+ /** Renders a prettified representation of a symbol.
+ */
+ def show(sym: Symbol): String = sym.toString
+
+ /** Renders internal structure of a symbol.
+ */
+ def showRaw(sym: Symbol): String = render(sym, newRawTreePrinter(_))
+
+ /** Renders a prettified representation of a type.
+ */
+ def show(tpe: Type): String = tpe.toString
+
+ /** Renders internal structure of a type.
+ */
+ def showRaw(tpe: Type): String = render(tpe, newRawTreePrinter(_))
+
+ /** Renders a prettified representation of a name.
+ */
+ def show(name: Name): String
+
+ /** Renders internal structure of a name.
+ */
+ def showRaw(name: Name): String = name.toString
+
+ /** Renders a prettified representation of a flag set.
+ */
+ def show(flags: FlagSet): String
+
+ /** Renders internal structure of a flag set.
+ */
+ def showRaw(flags: FlagSet): String = flags.toString
+}
diff --git a/src/reflect/scala/reflect/api/Symbols.scala b/src/reflect/scala/reflect/api/Symbols.scala
index 1d266dc778..eb9921a31a 100644
--- a/src/reflect/scala/reflect/api/Symbols.scala
+++ b/src/reflect/scala/reflect/api/Symbols.scala
@@ -116,11 +116,28 @@ trait Symbols extends base.Symbols { self: Universe =>
*/
def isValue: Boolean
+ /** Does this symbol denote a stable value? */
+ def isStable: Boolean
+
/** Does this symbol represent a mutable value?
* If yes, `isTerm` and `isValue` are also guaranteed to be true.
*/
def isVariable: Boolean
+ /** Does this symbol represent a getter or a setter?
+ */
+ def isAccessor: Boolean
+
+ /** Does this symbol represent a getter of a field?
+ * If yes, `isTerm` and `isMethod` are also guaranteed to be true.
+ */
+ def isGetter: Boolean
+
+ /** Does this symbol represent a setter of a field?
+ * If yes, `isTerm` and `isMethod` are also guaranteed to be true.
+ */
+ def isSetter: Boolean
+
/** Does this symbol represent the definition of a package?
* If yes, `isTerm` is also guaranteed to be true.
*/
@@ -177,6 +194,25 @@ trait Symbols extends base.Symbols { self: Universe =>
*/
def isErroneous : Boolean
+ /** Can this symbol be loaded by a reflective mirror?
+ *
+ * Scalac relies on `ScalaSignature' annotation to retain symbols across compilation runs.
+ * Such annotations (also called "pickles") are applied on top-level classes and include information
+ * about all symbols reachable from the annotee. However, local symbols (e.g. classes or definitions local to a block)
+ * are typically unreachable and information about them gets lost.
+ *
+ * This method is useful for macro writers who wish to save certain ASTs to be used at runtime.
+ * With `isLocatable' it's possible to check whether a tree can be retained as is, or it needs special treatment.
+ */
+ def isLocatable: Boolean
+
+ /** Is this symbol static (i.e. with no outer instance)?
+ * Q: When exactly is a sym marked as STATIC?
+ * A: If it's a member of a toplevel object, or of an object contained in a toplevel object, or any number of levels deep.
+ * http://groups.google.com/group/scala-internals/browse_thread/thread/d385bcd60b08faf6
+ */
+ def isStatic: Boolean
+
/** The type signature of this symbol seen as a member of given type `site`.
*/
def typeSignatureIn(site: Type): Type
diff --git a/src/reflect/scala/reflect/api/TreePrinters.scala b/src/reflect/scala/reflect/api/TreePrinters.scala
deleted file mode 100644
index 08a08e7b90..0000000000
--- a/src/reflect/scala/reflect/api/TreePrinters.scala
+++ /dev/null
@@ -1,87 +0,0 @@
-package scala.reflect
-package api
-
-import java.io.{ PrintWriter, StringWriter }
-
-trait TreePrinters { self: Universe =>
-
- trait TreePrinter {
- def print(args: Any*)
- protected var typesPrinted = false
- protected var uniqueIds = false
- def withTypesPrinted: this.type = { typesPrinted = true; this }
- def withUniqueIds: this.type = { uniqueIds = true; this }
- }
-
- def show(tree: Tree): String = show(tree, newTreePrinter)
-
- def show(tree: Tree, mkPrinter: PrintWriter => TreePrinter): String = {
- val buffer = new StringWriter()
- val writer = new PrintWriter(buffer)
- val printer = mkPrinter(writer)
- printer.print(tree)
- writer.flush()
- buffer.toString
- }
-
- def showRaw(tree: Tree): String = show(tree, new RawTreePrinter(_))
-
- /** Hook to define what `show(tree)` means.
- */
- def newTreePrinter(out: PrintWriter): TreePrinter
-
- // emits more or less verbatim representation of the provided tree
- // [Eugene] todo. needs to be refined
- // http://groups.google.com/group/scala-user/browse_thread/thread/de5a5be2e083cf8e
- class RawTreePrinter(out: PrintWriter) extends TreePrinter {
- def print(args: Any*): Unit = args foreach {
- case EmptyTree =>
- print("EmptyTree")
- case tree @ TypeTree() =>
- print("TypeTree()")
- if (tree.tpe != null)
- print(".setType(", tree.tpe, ")")
- else if (tree.original != null)
- print(".setOriginal(", tree.original, ")")
- case Literal(Constant(s: String)) =>
- print("Literal(Constant(\"" + s + "\"))")
- case tree: Tree =>
- print(tree.productPrefix+"(")
- val it = tree.productIterator
- while (it.hasNext) {
- it.next() match {
- case name: Name if uniqueIds && tree.hasSymbol && tree.symbol != NoSymbol =>
- print(tree.symbol.name, "#", tree.symbol.id)
- case arg =>
- print(arg)
- }
- print(if (it.hasNext) ", " else "")
- }
- print(")")
- if (typesPrinted)
- print(".setType(", tree.tpe, ")")
- case list: List[_] =>
- print("List(")
- val it = list.iterator
- while (it.hasNext) {
- print(it.next())
- print(if (it.hasNext) ", " else "")
- }
- print(")")
- case mods: Modifiers =>
- val parts = collection.mutable.ListBuffer[String]()
- parts += mods.flagString
- if (mods.privateWithin.toString.nonEmpty)
- parts += "newTypeName(\"" + mods.privateWithin.toString + "\")"
- if (mods.annotations.nonEmpty)
- parts += mods.annotations map showRaw mkString ("List(", ", ", ")")
- print(parts mkString ("Modifiers(", ", ", ")"))
- case name: Name =>
- if (name.isTermName) print("newTermName(\"") else print("newTypeName(\"")
- print(name.toString)
- print("\")")
- case arg =>
- out.print(arg)
- }
- }
-}
diff --git a/src/reflect/scala/reflect/api/Universe.scala b/src/reflect/scala/reflect/api/Universe.scala
index 002cd2e673..85d8adc44f 100644
--- a/src/reflect/scala/reflect/api/Universe.scala
+++ b/src/reflect/scala/reflect/api/Universe.scala
@@ -9,7 +9,7 @@ abstract class Universe extends base.Universe
with FlagSets
with Names
with Trees
- with TreePrinters
+ with Printers
with Constants
with Positions
with Mirrors
diff --git a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
index 5f78671012..fa758edf05 100644
--- a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
+++ b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
@@ -8,7 +8,7 @@ package internal
// todo implement in terms of BitSet
import scala.collection.{ mutable, immutable }
import math.max
-import util.Statistics._
+import util.Statistics
/** A base type sequence (BaseTypeSeq) is an ordered sequence spanning all the base types
* of a type. It characterized by the following two laws:
@@ -28,6 +28,7 @@ import util.Statistics._
trait BaseTypeSeqs {
this: SymbolTable =>
import definitions._
+ import BaseTypeSeqsStats._
protected def newBaseTypeSeq(parents: List[Type], elems: Array[Type]) =
new BaseTypeSeq(parents, elems)
@@ -38,8 +39,8 @@ trait BaseTypeSeqs {
*/
class BaseTypeSeq protected[BaseTypeSeqs] (private[BaseTypeSeqs] val parents: List[Type], private[BaseTypeSeqs] val elems: Array[Type]) {
self =>
- incCounter(baseTypeSeqCount)
- incCounter(baseTypeSeqLenTotal, elems.length)
+ Statistics.incCounter(baseTypeSeqCount)
+ Statistics.incCounter(baseTypeSeqLenTotal, elems.length)
/** The number of types in the sequence */
def length: Int = elems.length
@@ -231,3 +232,8 @@ trait BaseTypeSeqs {
val CyclicInheritance = new Throwable
}
+
+object BaseTypeSeqsStats {
+ val baseTypeSeqCount = Statistics.newCounter("#base type seqs")
+ val baseTypeSeqLenTotal = Statistics.newRelCounter("avg base type seq length", baseTypeSeqCount)
+}
diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala
index a9d9b06621..320cd3ddae 100644
--- a/src/reflect/scala/reflect/internal/Definitions.scala
+++ b/src/reflect/scala/reflect/internal/Definitions.scala
@@ -179,7 +179,7 @@ trait Definitions extends api.StandardDefinitions {
val EmptyPackage: ModuleSymbol = rootMirror.EmptyPackage
@deprecated("Moved to rootMirror.EmptyPackageClass", "2.10.0")
- val EmptyPackageClass: ClassSymbol = rootMirror.RootClass
+ val EmptyPackageClass: ClassSymbol = rootMirror.EmptyPackageClass
// It becomes tricky to create dedicated objects for other symbols because
// of initialization order issues.
diff --git a/src/reflect/scala/reflect/internal/TreePrinters.scala b/src/reflect/scala/reflect/internal/Printers.scala
index 6d035c8b9d..82a8c42f7c 100644
--- a/src/reflect/scala/reflect/internal/TreePrinters.scala
+++ b/src/reflect/scala/reflect/internal/Printers.scala
@@ -11,7 +11,7 @@ package internal
import java.io.{ OutputStream, PrintWriter, StringWriter, Writer }
import Flags._
-trait TreePrinters extends api.TreePrinters { self: SymbolTable =>
+trait Printers extends api.Printers { self: SymbolTable =>
//nsc import treeInfo.{ IsTrue, IsFalse }
@@ -62,8 +62,9 @@ trait TreePrinters extends api.TreePrinters { self: SymbolTable =>
protected val indentStep = 2
protected var indentString = " " // 40
- typesPrinted = settings.printtypes.value
- uniqueIds = settings.uniqid.value
+ printTypes = settings.printtypes.value
+ printIds = settings.uniqid.value
+ printKinds = settings.Yshowsymkinds.value
protected def doPrintPositions = settings.Xprintpos.value
def indent() = indentMargin += indentStep
@@ -320,7 +321,7 @@ trait TreePrinters extends api.TreePrinters { self: SymbolTable =>
case Function(vparams, body) =>
print("("); printValueParams(vparams); print(" => ", body, ")")
- if (uniqueIds && tree.symbol != null) print("#"+tree.symbol.id)
+ if (printIds && tree.symbol != null) print("#"+tree.symbol.id)
case Assign(lhs, rhs) =>
print(lhs, " = ", rhs)
@@ -429,7 +430,7 @@ trait TreePrinters extends api.TreePrinters { self: SymbolTable =>
printColumn(whereClauses, " forSome { ", ";", "}")
// SelectFromArray is no longer visible in reflect.internal.
-// eliminated until we figure out what we will do with both TreePrinters and
+// eliminated until we figure out what we will do with both Printers and
// SelectFromArray.
// case SelectFromArray(qualifier, name, _) =>
// print(qualifier); print(".<arr>"); print(symName(tree, name))
@@ -437,7 +438,7 @@ trait TreePrinters extends api.TreePrinters { self: SymbolTable =>
case tree =>
xprintTree(this, tree)
}
- if (typesPrinted && tree.isTerm && !tree.isEmpty) {
+ if (printTypes && tree.isTerm && !tree.isEmpty) {
print("{", if (tree.tpe eq null) "<null>" else tree.tpe.toString, "}")
}
}
@@ -475,4 +476,167 @@ trait TreePrinters extends api.TreePrinters { self: SymbolTable =>
def close = { /* do nothing */ }
def flush = { /* do nothing */ }
}
+
+ // provides footnotes for types
+ private var typeCounter = 0
+ private val typeMap = collection.mutable.WeakHashMap[Type, Int]()
+
+ def newRawTreePrinter(writer: PrintWriter): RawTreePrinter = new RawTreePrinter(writer)
+ def newRawTreePrinter(stream: OutputStream): RawTreePrinter = newRawTreePrinter(new PrintWriter(stream))
+ def newRawTreePrinter(): RawTreePrinter = newRawTreePrinter(new PrintWriter(ConsoleWriter))
+
+ // emits more or less verbatim representation of the provided tree
+ class RawTreePrinter(out: PrintWriter) extends super.TreePrinter {
+ private var depth = 0
+ private var footnotes = collection.mutable.Map[Int, Type]()
+ private var printingFootnotes = false
+ private var printTypesInFootnotes = true
+
+ def print(args: Any*): Unit = {
+ if (depth == 0 && args.length == 1 && args(0) != null && args(0).isInstanceOf[Type])
+ printTypesInFootnotes = false
+
+ depth += 1
+ args foreach {
+ case EmptyTree =>
+ print("EmptyTree")
+ case emptyValDef: AnyRef if emptyValDef eq self.emptyValDef =>
+ print("emptyValDef")
+ case Literal(Constant(value)) =>
+ def print(s: String) = this.print("Literal(Constant(" + s + "))")
+ value match {
+ case s: String => print("\"" + s + "\"")
+ case null => print(null)
+ case _ => print(value.toString)
+ }
+ case tree: Tree =>
+ val hasSymbol = tree.hasSymbol && tree.symbol != NoSymbol
+ val isError = hasSymbol && tree.symbol.name.toString == nme.ERROR.toString
+ printProduct(
+ tree,
+ preamble = _ => {
+ print(tree.productPrefix)
+ if (printTypes && tree.tpe != null) print(tree.tpe)
+ },
+ body = {
+ case name: Name =>
+ if (isError) {
+ if (isError) print("<")
+ print(name)
+ if (isError) print(": error>")
+ } else if (hasSymbol) {
+ tree match {
+ case _: Ident | _: Select | _: SelectFromTypeTree => print(tree.symbol)
+ case _ => print(tree.symbol.name)
+ }
+ } else {
+ print(name)
+ }
+ case arg =>
+ print(arg)
+ },
+ postamble = {
+ case tree @ TypeTree() if tree.original != null => print(".setOriginal(", tree.original, ")")
+ case _ => // do nothing
+ })
+ case sym: Symbol =>
+ if (sym.isStatic && (sym.isClass || sym.isModule)) print(sym.fullName)
+ else print(sym.name)
+ if (printIds) print("#", sym.id)
+ if (printKinds) print("#", sym.abbreviatedKindString)
+ case NoType =>
+ print("NoType")
+ case NoPrefix =>
+ print("NoPrefix")
+ case tpe: Type if printTypesInFootnotes && !printingFootnotes =>
+ val index = typeMap.getOrElseUpdate(tpe, { typeCounter += 1; typeCounter })
+ footnotes(index) = tpe
+ print("[", index, "]")
+ case mods: Modifiers =>
+ print("Modifiers(")
+ if (mods.flags != NoFlags || mods.privateWithin != tpnme.EMPTY || mods.annotations.nonEmpty) print(show(mods.flags))
+ if (mods.privateWithin != tpnme.EMPTY || mods.annotations.nonEmpty) { print(", "); print(mods.privateWithin) }
+ if (mods.annotations.nonEmpty) { print(", "); print(mods.annotations); }
+ print(")")
+ case name: Name =>
+ print(show(name))
+ case list: List[_] =>
+ print("List")
+ printIterable(list)
+ case product: Product =>
+ printProduct(product)
+ case arg =>
+ out.print(arg)
+ }
+ depth -= 1
+ if (depth == 0 && footnotes.nonEmpty && !printingFootnotes) {
+ printingFootnotes = true
+ out.println()
+ val typeIndices = footnotes.keys.toList.sorted
+ typeIndices.zipWithIndex foreach {
+ case (typeIndex, i) =>
+ print("[" + typeIndex + "] ")
+ print(footnotes(typeIndex))
+ if (i < typeIndices.length - 1) out.println()
+ }
+ }
+ }
+
+ def printProduct(
+ p: Product,
+ preamble: Product => Unit = p => print(p.productPrefix),
+ body: Any => Unit = print(_),
+ postamble: Product => Unit = p => print("")): Unit =
+ {
+ preamble(p)
+ printIterable(p.productIterator.toList, body = body)
+ postamble(p)
+ }
+
+ def printIterable(
+ iterable: List[_],
+ preamble: => Unit = print(""),
+ body: Any => Unit = print(_),
+ postamble: => Unit = print("")): Unit =
+ {
+ preamble
+ print("(")
+ val it = iterable.iterator
+ while (it.hasNext) {
+ body(it.next)
+ print(if (it.hasNext) ", " else "")
+ }
+ print(")")
+ postamble
+ }
+ }
+
+ def show(name: Name): String = name match {
+ // base.StandardNames
+ case tpnme.EMPTY => "tpnme.EMPTY"
+ case tpnme.ROOT => "tpnme.ROOT"
+ case tpnme.EMPTY_PACKAGE_NAME => "tpnme.EMPTY_PACKAGE_NAME"
+ case tpnme.WILDCARD => "tpnme.WILDCARD"
+ case nme.CONSTRUCTOR => "nme.CONSTRUCTOR"
+ case nme.NO_NAME => "nme.NO_NAME"
+ // api.StandardNames
+ case tpnme.ERROR => "tpnme.ERROR"
+ case nme.ERROR => "nme.ERROR"
+ case nme.EMPTY => "nme.EMPTY"
+ case tpnme.PACKAGE => "tpnme.PACKAGE"
+ case nme.PACKAGE => "nme.PACKAGE"
+ case _ =>
+ val prefix = if (name.isTermName) "newTermName(\"" else "newTypeName(\""
+ prefix + name.toString + "\")"
+ }
+
+ def show(flags: FlagSet): String = {
+ if (flags == NoFlags) nme.NoFlags.toString
+ else {
+ val s_flags = new collection.mutable.ListBuffer[String]
+ for (i <- 0 to 63 if (flags containsAll (1L << i)))
+ s_flags += flagToString(1L << i).replace("<", "").replace(">", "").toUpperCase
+ s_flags mkString " | "
+ }
+ }
}
diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala
index cadd76b1ba..18adab7c68 100644
--- a/src/reflect/scala/reflect/internal/SymbolTable.scala
+++ b/src/reflect/scala/reflect/internal/SymbolTable.scala
@@ -28,7 +28,7 @@ abstract class SymbolTable extends makro.Universe
with AnnotationInfos
with AnnotationCheckers
with Trees
- with TreePrinters
+ with Printers
with Positions
with TypeDebugging
with Importers
@@ -40,7 +40,7 @@ abstract class SymbolTable extends makro.Universe
{
val gen = new TreeGen { val global: SymbolTable.this.type = SymbolTable.this }
- val treeBuild = gen
+ lazy val treeBuild = gen
def log(msg: => AnyRef): Unit
def abort(msg: String): Nothing = throw new FatalError(supplementErrorMessage(msg))
diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala
index 288eb63332..4b0ceeb86b 100644
--- a/src/reflect/scala/reflect/internal/Symbols.scala
+++ b/src/reflect/scala/reflect/internal/Symbols.scala
@@ -8,18 +8,17 @@ package internal
import scala.collection.{ mutable, immutable }
import scala.collection.mutable.ListBuffer
-import util.Statistics._
+import util.Statistics
import Flags._
trait Symbols extends api.Symbols { self: SymbolTable =>
import definitions._
+ import SymbolsStats._
protected var ids = 0
val emptySymbolArray = new Array[Symbol](0)
- def symbolCount = ids // statistics
-
protected def nextId() = { ids += 1; ids }
/** Used for deciding in the IDE whether we can interrupt the compiler */
@@ -666,7 +665,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def isClassLocalToConstructor = false
final def isDerivedValueClass =
- isClass && !hasFlag(PACKAGE | TRAIT) &&
+ isClass && !hasFlag(PACKAGE | TRAIT) &&
info.firstParent.typeSymbol == AnyValClass && !isPrimitiveValueClass
final def isMethodWithExtension =
@@ -2713,7 +2712,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
}
- incCounter(typeSymbolCount)
+ Statistics.incCounter(typeSymbolCount)
}
implicit val TypeSymbolTag = ClassTag[TypeSymbol](classOf[TypeSymbol])
@@ -2929,7 +2928,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def children = childSet
override def addChild(sym: Symbol) { childSet = childSet + sym }
- incCounter(classSymbolCount)
+ Statistics.incCounter(classSymbolCount)
}
implicit val ClassSymbolTag = ClassTag[ClassSymbol](classOf[ClassSymbol])
@@ -3188,4 +3187,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def toList: List[TypeHistory] = this :: ( if (prev eq null) Nil else prev.toList )
}
+
+ Statistics.newView("#symbols")(ids)
+}
+
+object SymbolsStats {
+ val typeSymbolCount = Statistics.newCounter("#type symbols")
+ val classSymbolCount = Statistics.newCounter("#class symbols")
}
diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala
index 75bb0e6d49..dd13dd4c4c 100644
--- a/src/reflect/scala/reflect/internal/Trees.scala
+++ b/src/reflect/scala/reflect/internal/Trees.scala
@@ -9,6 +9,7 @@ package internal
import Flags._
import base.Attachments
import collection.mutable.{ListBuffer, LinkedHashSet}
+import util.Statistics
trait Trees extends api.Trees { self: SymbolTable =>
@@ -18,6 +19,8 @@ trait Trees extends api.Trees { self: SymbolTable =>
val id = nodeCount // TODO: add to attachment?
nodeCount += 1
+ Statistics.incCounter(TreesStats.nodeByType, getClass)
+
@inline final def pos: Position = rawatt.pos
def pos_=(pos: Position): Unit = rawatt = (rawatt withPos pos)
def setPos(newpos: Position): this.type = { pos = newpos; this }
@@ -809,7 +812,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
}
}
- // Belongs in TreeInfo but then I can't reach it from TreePrinters.
+ // Belongs in TreeInfo but then I can't reach it from Printers.
def isReferenceToScalaMember(t: Tree, Id: Name) = t match {
case Ident(Id) => true
case Select(Ident(nme.scala_), Id) => true
@@ -1592,4 +1595,11 @@ trait Trees extends api.Trees { self: SymbolTable =>
implicit val TypeBoundsTreeTag = ClassTag[TypeBoundsTree](classOf[TypeBoundsTree])
implicit val ExistentialTypeTreeTag = ClassTag[ExistentialTypeTree](classOf[ExistentialTypeTree])
implicit val TypeTreeTag = ClassTag[TypeTree](classOf[TypeTree])
+
+ val treeNodeCount = Statistics.newView("#created tree nodes")(nodeCount)
+}
+
+object TreesStats {
+ // statistics
+ val nodeByType = Statistics.newByClass("#created tree nodes by type")(Statistics.newCounter(""))
}
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index 8a4394bf1d..d4b895bcb4 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -13,8 +13,7 @@ import mutable.ListBuffer
import Flags._
import scala.util.control.ControlThrowable
import scala.annotation.tailrec
-import util.Statistics._
-import language.postfixOps
+import util.Statistics
/* A standard type pattern match:
case ErrorType =>
@@ -73,9 +72,7 @@ import language.postfixOps
trait Types extends api.Types { self: SymbolTable =>
import definitions._
-
- //statistics
- def uniqueTypeCount = if (uniques == null) 0 else uniques.size
+ import TypesStats._
private var explainSwitch = false
private final val emptySymbolSet = immutable.Set.empty[Symbol]
@@ -681,8 +678,8 @@ trait Types extends api.Types { self: SymbolTable =>
if (isTrivial || phase.erasedTypes && pre.typeSymbol != ArrayClass) this
else {
// scala.tools.nsc.util.trace.when(pre.isInstanceOf[ExistentialType])("X "+this+".asSeenfrom("+pre+","+clazz+" = ") {
- incCounter(asSeenFromCount)
- val start = startTimer(asSeenFromNanos)
+ Statistics.incCounter(asSeenFromCount)
+ val start = Statistics.startTimer(asSeenFromNanos)
val m = new AsSeenFromMap(pre.normalize, clazz)
val tp = m apply this
val tp1 = existentialAbstraction(m.capturedParams, tp)
@@ -690,7 +687,7 @@ trait Types extends api.Types { self: SymbolTable =>
if (m.capturedSkolems.isEmpty) tp1
else deriveType(m.capturedSkolems, _.cloneSymbol setFlag CAPTURED)(tp1)
- stopTimer(asSeenFromNanos, start)
+ Statistics.stopTimer(asSeenFromNanos, start)
result
}
}
@@ -828,26 +825,26 @@ trait Types extends api.Types { self: SymbolTable =>
}
def stat_<:<(that: Type): Boolean = {
- incCounter(subtypeCount)
- val start = startTimer(subtypeNanos)
+ Statistics.incCounter(subtypeCount)
+ val start = Statistics.startTimer(subtypeNanos)
val result =
(this eq that) ||
(if (explainSwitch) explain("<:", isSubType, this, that)
else isSubType(this, that, AnyDepth))
- stopTimer(subtypeNanos, start)
+ Statistics.stopTimer(subtypeNanos, start)
result
}
/** Is this type a weak subtype of that type? True also for numeric types, i.e. Int weak_<:< Long.
*/
def weak_<:<(that: Type): Boolean = {
- incCounter(subtypeCount)
- val start = startTimer(subtypeNanos)
+ Statistics.incCounter(subtypeCount)
+ val start = Statistics.startTimer(subtypeNanos)
val result =
((this eq that) ||
(if (explainSwitch) explain("weak_<:", isWeakSubType, this, that)
else isWeakSubType(this, that)))
- stopTimer(subtypeNanos, start)
+ Statistics.stopTimer(subtypeNanos, start)
result
}
@@ -1020,8 +1017,8 @@ trait Types extends api.Types { self: SymbolTable =>
// See (t0851) for a situation where this happens.
val suspension: List[TypeVar] = if (this.isGround) null else suspendTypeVarsInType(this)
- incCounter(findMemberCount)
- val start = startTimer(findMemberNanos)
+ Statistics.incCounter(findMemberCount)
+ val start = Statistics.startTimer(findMemberNanos)
//Console.println("find member " + name.decode + " in " + this + ":" + this.baseClasses)//DEBUG
var members: Scope = null
@@ -1048,7 +1045,7 @@ trait Types extends api.Types { self: SymbolTable =>
!sym.isPrivateLocal ||
(bcs0.head.hasTransOwner(bcs.head)))) {
if (name.isTypeName || stableOnly && sym.isStable) {
- stopTimer(findMemberNanos, start)
+ Statistics.stopTimer(findMemberNanos, start)
if (suspension ne null) suspension foreach (_.suspended = false)
return sym
} else if (member == NoSymbol) {
@@ -1094,13 +1091,13 @@ trait Types extends api.Types { self: SymbolTable =>
} // while (!bcs.isEmpty)
excluded = excludedFlags
} // while (continue)
- stopTimer(findMemberNanos, start)
+ Statistics.stopTimer(findMemberNanos, start)
if (suspension ne null) suspension foreach (_.suspended = false)
if (members eq null) {
- if (member == NoSymbol) incCounter(noMemberCount)
+ if (member == NoSymbol) Statistics.incCounter(noMemberCount)
member
} else {
- incCounter(multMemberCount)
+ Statistics.incCounter(multMemberCount)
baseClasses.head.newOverloaded(this, members.toList)
}
}
@@ -1185,7 +1182,7 @@ trait Types extends api.Types { self: SymbolTable =>
override def isVolatile = underlying.isVolatile
override def widen: Type = underlying.widen
override def baseTypeSeq: BaseTypeSeq = {
- incCounter(singletonBaseTypeSeqCount)
+ Statistics.incCounter(singletonBaseTypeSeqCount)
underlying.baseTypeSeq prepend this
}
override def isHigherKinded = false // singleton type classifies objects, thus must be kind *
@@ -1536,7 +1533,7 @@ trait Types extends api.Types { self: SymbolTable =>
val bts = copyRefinedType(tpe.asInstanceOf[RefinedType], tpe.parents map varToParam, varToParam mapOver tpe.decls).baseTypeSeq
tpe.baseTypeSeqCache = bts lateMap paramToVar
} else {
- incCounter(compoundBaseTypeSeqCount)
+ Statistics.incCounter(compoundBaseTypeSeqCount)
tpe.baseTypeSeqCache = undetBaseTypeSeq
tpe.baseTypeSeqCache = if (tpe.typeSymbol.isRefinementClass)
tpe.memo(compoundBaseTypeSeq(tpe))(_.baseTypeSeq updateHead tpe.typeSymbol.tpe)
@@ -2392,7 +2389,7 @@ trait Types extends api.Types { self: SymbolTable =>
if (period != currentPeriod) {
tpe.baseTypeSeqPeriod = currentPeriod
if (!isValidForBaseClasses(period)) {
- incCounter(typerefBaseTypeSeqCount)
+ Statistics.incCounter(typerefBaseTypeSeqCount)
tpe.baseTypeSeqCache = undetBaseTypeSeq
tpe.baseTypeSeqCache = tpe.baseTypeSeqImpl
}
@@ -3702,7 +3699,7 @@ trait Types extends api.Types { self: SymbolTable =>
private var uniqueRunId = NoRunId
protected def unique[T <: Type](tp: T): T = {
- incCounter(rawTypeCount)
+ Statistics.incCounter(rawTypeCount)
if (uniqueRunId != currentRunId) {
uniques = util.HashSet[Type]("uniques", initialUniquesCapacity)
uniqueRunId = currentRunId
@@ -5104,7 +5101,7 @@ trait Types extends api.Types { self: SymbolTable =>
/** Do `tp1` and `tp2` denote equivalent types? */
def isSameType(tp1: Type, tp2: Type): Boolean = try {
- incCounter(sametypeCount)
+ Statistics.incCounter(sametypeCount)
subsametypeRecursions += 1
undoLog undoUnless {
isSameType1(tp1, tp2)
@@ -6308,14 +6305,14 @@ trait Types extends api.Types { self: SymbolTable =>
case List() => NothingClass.tpe
case List(t) => t
case _ =>
- incCounter(lubCount)
- val start = startTimer(lubNanos)
+ Statistics.incCounter(lubCount)
+ val start = Statistics.startTimer(lubNanos)
try {
lub(ts, lubDepth(ts))
} finally {
lubResults.clear()
glbResults.clear()
- stopTimer(lubNanos, start)
+ Statistics.stopTimer(lubNanos, start)
}
}
@@ -6431,7 +6428,7 @@ trait Types extends api.Types { self: SymbolTable =>
indent = indent + " "
assert(indent.length <= 100)
}
- incCounter(nestedLubCount)
+ Statistics.incCounter(nestedLubCount)
val res = lub0(ts)
if (printLubs) {
indent = indent stripSuffix " "
@@ -6456,14 +6453,14 @@ trait Types extends api.Types { self: SymbolTable =>
case List() => AnyClass.tpe
case List(t) => t
case ts0 =>
- incCounter(lubCount)
- val start = startTimer(lubNanos)
+ Statistics.incCounter(lubCount)
+ val start = Statistics.startTimer(lubNanos)
try {
glbNorm(ts0, lubDepth(ts0))
} finally {
lubResults.clear()
glbResults.clear()
- stopTimer(lubNanos, start)
+ Statistics.stopTimer(lubNanos, start)
}
}
@@ -6578,7 +6575,7 @@ trait Types extends api.Types { self: SymbolTable =>
}
// if (settings.debug.value) { println(indent + "glb of " + ts + " at depth "+depth); indent = indent + " " } //DEBUG
- incCounter(nestedLubCount)
+ Statistics.incCounter(nestedLubCount)
val res = glb0(ts)
// if (settings.debug.value) { indent = indent.substring(0, indent.length() - 2); log(indent + "glb of " + ts + " is " + res) }//DEBUG
@@ -6871,4 +6868,27 @@ trait Types extends api.Types { self: SymbolTable =>
implicit val TypeBoundsTag = ClassTag[TypeBounds](classOf[TypeBounds])
implicit val TypeRefTag = ClassTag[TypeRef](classOf[TypeRef])
implicit val TypeTagg = ClassTag[Type](classOf[Type])
+
+ Statistics.newView("#unique types") { if (uniques == null) 0 else uniques.size }
+}
+
+object TypesStats {
+ import BaseTypeSeqsStats._
+ val rawTypeCount = Statistics.newCounter ("#raw type creations")
+ val asSeenFromCount = Statistics.newCounter ("#asSeenFrom ops")
+ val subtypeCount = Statistics.newCounter ("#subtype ops")
+ val sametypeCount = Statistics.newCounter ("#sametype ops")
+ val lubCount = Statistics.newCounter ("#toplevel lubs/glbs")
+ val nestedLubCount = Statistics.newCounter ("#all lubs/glbs")
+ val findMemberCount = Statistics.newCounter ("#findMember ops")
+ val noMemberCount = Statistics.newSubCounter(" of which not found", findMemberCount)
+ val multMemberCount = Statistics.newSubCounter(" of which multiple overloaded", findMemberCount)
+ val typerNanos = Statistics.newTimer ("time spent typechecking", "typer")
+ val lubNanos = Statistics.newSubTimer ("time spent in lubs", typerNanos)
+ val subtypeNanos = Statistics.newSubTimer ("time spent in <:<", typerNanos)
+ val findMemberNanos = Statistics.newSubTimer ("time spent in findmember", typerNanos)
+ val asSeenFromNanos = Statistics.newSubTimer ("time spent in asSeenFrom", typerNanos)
+ val compoundBaseTypeSeqCount = Statistics.newSubCounter(" of which for compound types", baseTypeSeqCount)
+ val typerefBaseTypeSeqCount = Statistics.newSubCounter(" of which for typerefs", baseTypeSeqCount)
+ val singletonBaseTypeSeqCount = Statistics.newSubCounter(" of which for singletons", baseTypeSeqCount)
}
diff --git a/src/reflect/scala/reflect/internal/util/StatBase.scala b/src/reflect/scala/reflect/internal/util/StatBase.scala
deleted file mode 100644
index b033ff98bc..0000000000
--- a/src/reflect/scala/reflect/internal/util/StatBase.scala
+++ /dev/null
@@ -1,97 +0,0 @@
-package scala.reflect.internal.util
-
-class StatBase {
-
- private var _enabled = false
-
- def enabled = _enabled
- def enabled_=(cond: Boolean) = {
- if (cond && !_enabled) {
- val test = new Timer()
- val start = System.nanoTime()
- var total = 0L
- for (i <- 1 to 10000) {
- val time = System.nanoTime()
- total += System.nanoTime() - time
- }
- val total2 = System.nanoTime() - start
- println("Enabling statistics, measuring overhead = "+
- total/10000.0+"ns to "+total2/10000.0+"ns per timer")
- _enabled = true
- }
- }
-
- def currentTime() =
- if (_enabled) System.nanoTime() else 0L
-
- def showPercent(x: Double, base: Double) =
- if (base == 0) "" else " ("+"%2.1f".format(x / base * 100)+"%)"
-
- def incCounter(c: Counter) {
- if (_enabled) c.value += 1
- }
-
- def incCounter(c: Counter, delta: Int) {
- if (_enabled) c.value += delta
- }
-
- def startCounter(sc: SubCounter): IntPair =
- if (_enabled) sc.start() else null
-
- def stopCounter(sc: SubCounter, start: IntPair) {
- if (_enabled) sc.stop(start)
- }
-
- def startTimer(tm: Timer): LongPair =
- if (_enabled) tm.start() else null
-
- def stopTimer(tm: Timer, start: LongPair) {
- if (_enabled) tm.stop(start)
- }
-
- case class IntPair(x: Int, y: Int)
- case class LongPair(x: Long, y: Long)
-
- class Counter {
- var value: Int = 0
- override def toString = value.toString
- }
-
- class SubCounter(c: Counter) {
- var value: Int = 0
- def start(): IntPair =
- if (_enabled) IntPair(value, c.value) else null
- def stop(prev: IntPair) {
- if (_enabled) {
- val IntPair(value0, cvalue0) = prev
- value = value0 + c.value - cvalue0
- }
- }
- override def toString =
- value+showPercent(value, c.value)
- }
-
- class Timer {
- var nanos: Long = 0
- var timings = 0
- def start(): LongPair =
- if (_enabled) {
- timings += 1
- LongPair(nanos, System.nanoTime())
- } else null
- def stop(prev: LongPair) {
- if (_enabled) {
- val LongPair(nanos0, start) = prev
- nanos = nanos0 + System.nanoTime() - start
- timings += 1
- }
- }
- override def toString = (timings/2)+" spans, "+nanos.toString+"ns"
- }
-
- import Predef.Class
-
- class ClassCounts extends scala.collection.mutable.HashMap[Class[_], Int] {
- override def default(key: Class[_]) = 0
- }
-} \ No newline at end of file
diff --git a/src/reflect/scala/reflect/internal/util/Statistics.scala b/src/reflect/scala/reflect/internal/util/Statistics.scala
index de0830aa3a..57c9e98174 100644
--- a/src/reflect/scala/reflect/internal/util/Statistics.scala
+++ b/src/reflect/scala/reflect/internal/util/Statistics.scala
@@ -1,37 +1,232 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Martin Odersky
- */
package scala.reflect.internal.util
-class Statistics extends StatBase {
- val singletonBaseTypeSeqCount = new Counter
- val compoundBaseTypeSeqCount = new Counter
- val typerefBaseTypeSeqCount = new Counter
- val findMemberCount = new Counter
- val noMemberCount = new Counter
- val multMemberCount = new Counter
- val findMemberNanos = new Timer
- val asSeenFromCount = new Counter
- val asSeenFromNanos = new Timer
- val subtypeCount = new Counter
- val subtypeNanos = new Timer
- val sametypeCount = new Counter
- val rawTypeCount = new Counter
- val rawTypeFailed = new SubCounter(rawTypeCount)
- val findMemberFailed = new SubCounter(findMemberCount)
- val subtypeFailed = new SubCounter(subtypeCount)
- val rawTypeImpl = new SubCounter(rawTypeCount)
- val findMemberImpl = new SubCounter(findMemberCount)
- val subtypeImpl = new SubCounter(subtypeCount)
- val baseTypeSeqCount = new Counter
- val baseTypeSeqLenTotal = new Counter
- val typeSymbolCount = new Counter
- val classSymbolCount = new Counter
- val lubCount = new Counter
- val nestedLubCount = new Counter
- val lubNanos = new Timer
-}
+import collection.mutable
+
+object Statistics {
+
+ /** If enabled, increment counter by one */
+ @inline final def incCounter(c: Counter) {
+ if (_enabled && c != null) c.value += 1
+ }
+
+ /** If enabled, increment counter by given delta */
+ @inline final def incCounter(c: Counter, delta: Int) {
+ if (_enabled && c != null) c.value += delta
+ }
+
+ /** If enabled, increment counter in map `ctrs` at index `key` by one */
+ @inline final def incCounter[K](ctrs: QuantMap[K, Counter], key: K) =
+ if (_enabled && ctrs != null) ctrs(key).value += 1
+
+ /** If enabled, start subcounter. While active it will track all increments of
+ * its base counter.
+ */
+ @inline final def startCounter(sc: SubCounter): (Int, Int) =
+ if (_enabled && sc != null) sc.start() else null
+
+ /** If enabled, stop subcounter from tracking its base counter. */
+ @inline final def stopCounter(sc: SubCounter, start: (Int, Int)) {
+ if (_enabled && sc != null) sc.stop(start)
+ }
+
+ /** If enabled, start timer */
+ @inline final def startTimer(tm: Timer): (Long, Long) =
+ if (_enabled && tm != null) tm.start() else null
+
+ /** If enabled, stop timer */
+ @inline final def stopTimer(tm: Timer, start: (Long, Long)) {
+ if (_enabled && tm != null) tm.stop(start)
+ }
+
+ /** If enabled, push and start a new timer in timer stack */
+ @inline final def pushTimerClass(timers: ByClassTimerStack, cls: Class[_]): (Long, Long) =
+ if (_enabled && timers != null) timers.push(cls) else null
+
+ /** If enabled, stop and pop timer from timer stack */
+ @inline final def popTimerClass(timers: ByClassTimerStack, prev: (Long, Long)) {
+ if (_enabled && timers != null) timers.pop(prev)
+ }
+
+ /** Create a new counter that shows as `prefix` and is active in given phases */
+ def newCounter(prefix: String, phases: String*) = new Counter(prefix, phases)
+
+ /** Create a new relative counter that shows as `prefix` and is active
+ * in the same phases as its base counter. Relative counters print as percentages
+ * of their base counters.
+ */
+ def newRelCounter(prefix: String, ctr: Counter): Counter = new RelCounter(prefix, ctr)
+
+ /** Create a new subcounter that shows as `prefix` and is active
+ * in the same phases as its base counter. Subcounters can track
+ * increments of their base counters and print as percentages
+ * of their base counters.
+ */
+ def newSubCounter(prefix: String, ctr: Counter): SubCounter = new SubCounter(prefix, ctr)
+
+ /** Create a new counter that shows as `prefix` and is active in given phases */
+ def newTimer(prefix: String, phases: String*): Timer = new Timer(prefix, phases)
+
+ /** Create a new subtimer that shows as `prefix` and is active
+ * in the same phases as its base timer. Subtimers can track
+ * increments of their base timers and print as percentages
+ * of their base timers.
+ */
+ def newSubTimer(prefix: String, timer: Timer): Timer = new SubTimer(prefix, timer)
+
+ /** Create a new view that shows as `prefix` and is active in given phases.
+ * The view always reflects the current value of `quant` as a quantity.
+ */
+ def newView(prefix: String, phases: String*)(quant: => Any): View = new View(prefix, phases,
+quant)
-object Statistics extends Statistics
+ /** Create a new quantity map that shows as `prefix` and is active in given phases.
+ */
+ def newQuantMap[K, V <% Ordered[V]](prefix: String, phases: String*)(initValue: => V): QuantMap[K, V] = new QuantMap(prefix, phases, initValue)
+ /** Same as newQuantMap, where the key type is fixed to be Class[_] */
+ def newByClass[V <% Ordered[V]](prefix: String, phases: String*)(initValue: => V): QuantMap[Class[_], V] = new QuantMap(prefix, phases, initValue)
+
+ /** Create a new timer stack map, indexed by Class[_]. */
+ def newByClassTimerStack(prefix: String, underlying: Timer) = new ByClassTimerStack(prefix, underlying)
+
+ def allQuantities: Iterable[Quantity] =
+ for ((q, _) <- qs if !q.isInstanceOf[SubQuantity];
+ r <- q :: q.children.toList if r.prefix.nonEmpty) yield r
+
+ private def showPercent(x: Double, base: Double) =
+ if (base == 0) "" else f" (${x / base * 100}%2.1f%)"
+
+ trait Quantity {
+ qs += (this -> ())
+ val prefix: String
+ val phases: Seq[String]
+ def showAt(phase: String) = phases.isEmpty || (phases contains phase)
+ def line = f"$prefix%-30s: ${this}"
+ val children = new mutable.ListBuffer[Quantity]
+ }
+
+ trait SubQuantity extends Quantity {
+ protected def underlying: Quantity
+ underlying.children += this
+ }
+
+ class Counter(val prefix: String, val phases: Seq[String]) extends Quantity with Ordered[Counter] {
+ var value: Int = 0
+ def compare(that: Counter): Int =
+ if (this.value < that.value) -1
+ else if (this.value > that.value) 1
+ else 0
+ override def toString = value.toString
+ }
+
+ class View(val prefix: String, val phases: Seq[String], quant: => Any) extends Quantity {
+ override def toString = quant.toString
+ }
+
+ private class RelCounter(prefix: String, val underlying: Counter) extends Counter(prefix, underlying.phases) with SubQuantity {
+ override def toString =
+ if (value == 0) "0"
+ else {
+ assert(underlying.value != 0, prefix+"/"+underlying.line)
+ f"${value.toFloat / underlying.value}%2.1f"
+ }
+ }
+
+ class SubCounter(prefix: String, override val underlying: Counter) extends Counter(prefix, underlying.phases) with SubQuantity {
+ def start() = (value, underlying.value)
+ def stop(prev: (Int, Int)) {
+ val (value0, uvalue0) = prev
+ value = value0 + underlying.value - uvalue0
+ }
+ override def toString =
+ value + showPercent(value, underlying.value)
+ }
+
+ class Timer(val prefix: String, val phases: Seq[String]) extends Quantity with Ordered[Timer] {
+ var nanos: Long = 0
+ var timings = 0
+ def compare(that: Timer): Int =
+ if (this.nanos < that.nanos) -1
+ else if (this.nanos > that.nanos) 1
+ else 0
+ def start() = {
+ (nanos, System.nanoTime())
+ }
+ def stop(prev: (Long, Long)) {
+ val (nanos0, start) = prev
+ nanos = nanos0 + System.nanoTime() - start
+ timings += 1
+ }
+ override def toString = s"$timings spans, ${nanos/1000}ms"
+ }
+
+ private class SubTimer(prefix: String, override val underlying: Timer) extends Timer(prefix, underlying.phases) with SubQuantity {
+ override def toString: String = super.toString + showPercent(nanos, underlying.nanos)
+ }
+
+ /** A mutable map quantity where missing elements are automatically inserted
+ * on access by executing `initValue`.
+ */
+ class QuantMap[K, V <% Ordered[V]](val prefix: String, val phases: Seq[String], initValue: => V)
+ extends scala.collection.mutable.HashMap[K, V] with Quantity {
+ override def default(key: K) = {
+ val elem = initValue
+ this(key) = elem
+ elem
+ }
+ override def toString =
+ this.toSeq.sortWith(_._2 > _._2).map {
+ case (cls: Class[_], elem) =>
+ s"${cls.toString.substring(cls.toString.lastIndexOf("$") + 1)}: $elem"
+ case (key, elem) =>
+ s"$key: $elem"
+ }.mkString(", ")
+ }
+
+ /** A mutable map quantity that takes class keys to subtimer values, relative to
+ * some `underlying` timer. In addition, class timers can be pushed and popped.
+ * Pushing the timer for a class means stopping the currently active timer.
+ */
+ class ByClassTimerStack(prefix: String, val underlying: Timer)
+ extends QuantMap[Class[_], Timer](prefix, underlying.phases, new SubTimer("", underlying)) with SubQuantity {
+ private var elems: List[(Timer, Long)] = Nil
+ def push(cls: Class[_]): (Long, Long) = {
+ val topTimer = this(cls)
+ elems = (topTimer, 0L) :: elems
+ topTimer.start()
+ }
+ def pop(prev: (Long, Long)) = {
+ val (nanos0, start) = prev
+ val duration = System.nanoTime() - start
+ val (topTimer, nestedNanos) :: rest = elems
+ topTimer.nanos = nanos0 + duration - nestedNanos
+ topTimer.timings += 1
+ elems = rest match {
+ case (outerTimer, outerNested) :: elems1 =>
+ (outerTimer, outerNested + duration) :: elems1
+ case Nil =>
+ Nil
+ }
+ }
+ }
+
+ private var _enabled = false
+ private val qs = new mutable.WeakHashMap[Quantity, Unit]
+
+ def enabled = _enabled
+ def enabled_=(cond: Boolean) = {
+ if (cond && !_enabled) {
+ val test = new Timer("", Nil)
+ val start = System.nanoTime()
+ var total = 0L
+ for (i <- 1 to 10000) {
+ val time = System.nanoTime()
+ total += System.nanoTime() - time
+ }
+ val total2 = System.nanoTime() - start
+ println("Enabling statistics, measuring overhead = "+
+ total/10000.0+"ns to "+total2/10000.0+"ns per timer")
+ _enabled = true
+ }
+ }
+}
diff --git a/src/reflect/scala/reflect/makro/Universe.scala b/src/reflect/scala/reflect/makro/Universe.scala
index ffc4042a0a..98046be555 100644
--- a/src/reflect/scala/reflect/makro/Universe.scala
+++ b/src/reflect/scala/reflect/makro/Universe.scala
@@ -15,25 +15,6 @@ abstract class Universe extends scala.reflect.api.Universe {
// [Eugene++ to Martin] should we also add mutability methods here (similarly to what's done below for trees)?
// I'm talking about `setAnnotations` and friends
-
- /** Can this symbol be loaded by a reflective mirror?
- *
- * Scalac relies on `ScalaSignature' annotation to retain symbols across compilation runs.
- * Such annotations (also called "pickles") are applied on top-level classes and include information
- * about all symbols reachable from the annotee. However, local symbols (e.g. classes or definitions local to a block)
- * are typically unreachable and information about them gets lost.
- *
- * This method is useful for macro writers who wish to save certain ASTs to be used at runtime.
- * With `isLocatable' it's possible to check whether a tree can be retained as is, or it needs special treatment.
- */
- def isLocatable: Boolean
-
- /** Is this symbol static (i.e. with no outer instance)?
- * Q: When exactly is a sym marked as STATIC?
- * A: If it's a member of a toplevel object, or of an object contained in a toplevel object, or any number of levels deep.
- * http://groups.google.com/group/scala-internals/browse_thread/thread/d385bcd60b08faf6
- */
- def isStatic: Boolean
}
// Tree extensions ---------------------------------------------------------------
diff --git a/src/reflect/scala/reflect/runtime/JavaUniverse.scala b/src/reflect/scala/reflect/runtime/JavaUniverse.scala
index d4a83b960d..629df76178 100644
--- a/src/reflect/scala/reflect/runtime/JavaUniverse.scala
+++ b/src/reflect/scala/reflect/runtime/JavaUniverse.scala
@@ -12,12 +12,6 @@ class JavaUniverse extends internal.SymbolTable with ReflectSetup with runtime.S
def picklerPhase = SomePhase
- type TreeGen = internal.TreeGen
-
- override type Position = scala.reflect.internal.util.Position
-
- override val gen = new TreeGen { val global: self.type = self }
-
lazy val settings = new Settings
def forInteractive = false
def forScaladoc = false
diff --git a/src/scalacheck/org/scalacheck/Arbitrary.scala b/src/scalacheck/org/scalacheck/Arbitrary.scala
index 28e116b479..8c43cdaafe 100644
--- a/src/scalacheck/org/scalacheck/Arbitrary.scala
+++ b/src/scalacheck/org/scalacheck/Arbitrary.scala
@@ -5,7 +5,7 @@
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck
@@ -178,9 +178,10 @@ object Arbitrary {
import java.math.MathContext._
val mcGen = oneOf(UNLIMITED, DECIMAL32, DECIMAL64, DECIMAL128)
val bdGen = for {
- mc <- mcGen
- scale <- arbInt.arbitrary
x <- arbBigInt.arbitrary
+ mc <- mcGen
+ limit <- value(if(mc == UNLIMITED) 0 else math.max(x.abs.toString.length - mc.getPrecision, 0))
+ scale <- Gen.chooseNum(Int.MinValue + limit , Int.MaxValue)
} yield BigDecimal(x, scale, mc)
Arbitrary(bdGen)
}
@@ -197,24 +198,37 @@ object Arbitrary {
}
/** Generates an arbitrary property */
- implicit lazy val arbProp: Arbitrary[Prop] =
+ implicit lazy val arbProp: Arbitrary[Prop] = {
+ import Prop._
+ val undecidedOrPassed = forAll { b: Boolean =>
+ b ==> true
+ }
Arbitrary(frequency(
- (5, Prop.proved),
- (4, Prop.falsified),
- (2, Prop.undecided),
- (1, Prop.exception(null))
+ (4, falsified),
+ (4, passed),
+ (3, proved),
+ (3, undecidedOrPassed),
+ (2, undecided),
+ (1, exception(null))
))
+ }
/** Arbitrary instance of test params */
implicit lazy val arbTestParams: Arbitrary[Test.Params] =
Arbitrary(for {
- minSuccTests <- choose(10,150)
- maxDiscTests <- choose(100,500)
+ minSuccTests <- choose(10,200)
+ maxDiscardRatio <- choose(0.2f,10f)
minSize <- choose(0,500)
sizeDiff <- choose(0,500)
maxSize <- choose(minSize, minSize + sizeDiff)
ws <- choose(1,4)
- } yield Test.Params(minSuccTests,maxDiscTests,minSize,maxSize,workers = ws))
+ } yield Test.Params(
+ minSuccessfulTests = minSuccTests,
+ maxDiscardRatio = maxDiscardRatio,
+ minSize = minSize,
+ maxSize = maxSize,
+ workers = ws
+ ))
/** Arbitrary instance of gen params */
implicit lazy val arbGenParams: Arbitrary[Gen.Params] =
diff --git a/src/scalacheck/org/scalacheck/Arg.scala b/src/scalacheck/org/scalacheck/Arg.scala
index 908bce2a81..8959211f09 100644
--- a/src/scalacheck/org/scalacheck/Arg.scala
+++ b/src/scalacheck/org/scalacheck/Arg.scala
@@ -5,7 +5,7 @@
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck
diff --git a/src/scalacheck/org/scalacheck/Commands.scala b/src/scalacheck/org/scalacheck/Commands.scala
index 112dda28a7..5ad82c513d 100644
--- a/src/scalacheck/org/scalacheck/Commands.scala
+++ b/src/scalacheck/org/scalacheck/Commands.scala
@@ -5,7 +5,7 @@
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck
@@ -46,13 +46,6 @@ trait Commands extends Prop {
def run(s: State): Any
def nextState(s: State): State
- /** @deprecated Use <code>preConditions += ...</code> instead. */
- @deprecated("Use 'preConditions += ...' instead.", "1.6")
- def preCondition_=(f: State => Boolean) = {
- preConditions.clear
- preConditions += f
- }
-
/** Returns all preconditions merged into a single function */
def preCondition: (State => Boolean) = s => preConditions.toList.forall(_.apply(s))
@@ -62,20 +55,6 @@ trait Commands extends Prop {
* conditions to the precondition list */
val preConditions = new collection.mutable.ListBuffer[State => Boolean]
- /** @deprecated Use <code>postConditions += ...</code> instead. */
- @deprecated("Use 'postConditions += ...' instead.", "1.6")
- def postCondition_=(f: (State,Any) => Prop) = {
- postConditions.clear
- postConditions += ((s0,s1,r) => f(s0,r))
- }
-
- /** @deprecated Use <code>postConditions += ...</code> instead. */
- @deprecated("Use 'postConditions += ...' instead.", "1.6")
- def postCondition_=(f: (State,State,Any) => Prop) = {
- postConditions.clear
- postConditions += f
- }
-
/** Returns all postconditions merged into a single function */
def postCondition: (State,State,Any) => Prop = (s0,s1,r) => all(postConditions.map(_.apply(s0,s1,r)): _*)
diff --git a/src/scalacheck/org/scalacheck/ConsoleReporter.scala b/src/scalacheck/org/scalacheck/ConsoleReporter.scala
index c3af6c83a3..93f1dc222e 100644
--- a/src/scalacheck/org/scalacheck/ConsoleReporter.scala
+++ b/src/scalacheck/org/scalacheck/ConsoleReporter.scala
@@ -5,7 +5,7 @@
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck
@@ -37,31 +37,6 @@ object ConsoleReporter {
* the given verbosity */
def apply(verbosity: Int = 0) = new ConsoleReporter(verbosity)
- @deprecated("(v1.8)", "1.8")
- def propReport(s: Int, d: Int) = {
- if(d == 0) printf("\rPassed %s tests\r", s)
- else printf("\rPassed %s tests; %s discarded\r", s, d)
- Console.flush
- }
-
- @deprecated("(v1.8)", "1.8")
- def propReport(pName: String, s: Int, d: Int) = {
- if(d == 0) printf("\r %s: Passed %s tests\r", pName, s)
- else printf("\r %s: Passed %s tests; %s discarded\r", pName, s, d)
- Console.flush
- }
-
- @deprecated("(v1.8)", "1.8")
- def testReport(res: Test.Result) = {
- print(List.fill(78)(' ').mkString)
- val s = (if(res.passed) "+ " else "! ") + pretty(res, Params(0))
- printf("\r%s\n", format(s, "", "", 75))
- res
- }
-
- @deprecated("(v1.8)", "1.8")
- def testStatsEx(res: Test.Result): Unit = testStatsEx("", res)
-
def testStatsEx(msg: String, res: Test.Result) = {
lazy val m = if(msg.length == 0) "" else msg + ": "
res.status match {
diff --git a/src/scalacheck/org/scalacheck/Gen.scala b/src/scalacheck/org/scalacheck/Gen.scala
index a253b040cd..64bb61c2d3 100644
--- a/src/scalacheck/org/scalacheck/Gen.scala
+++ b/src/scalacheck/org/scalacheck/Gen.scala
@@ -5,7 +5,7 @@
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck
@@ -59,6 +59,12 @@ object Choose {
}
}
+case class FiniteGenRes[+T](
+ r: T
+)
+
+sealed trait FiniteGen[+T] extends Gen[FiniteGenRes[T]]
+
/** Class that represents a generator. */
sealed trait Gen[+T] {
@@ -150,13 +156,6 @@ sealed trait Gen[+T] {
/** Returns a new property that holds if and only if both this
* and the given generator generates the same result, or both
- * generators generate no result.
- * @deprecated Use <code>==</code> instead */
- @deprecated("Use == instead", "1.7")
- def ===[U](g: Gen[U]): Prop = this == g
-
- /** Returns a new property that holds if and only if both this
- * and the given generator generates the same result, or both
* generators generate no result. */
def ==[U](g: Gen[U]) = Prop(prms =>
(this(prms.genPrms), g(prms.genPrms)) match {
@@ -221,11 +220,6 @@ object Gen {
}
}
- /* Default generator parameters
- * @deprecated Use <code>Gen.Params()</code> instead */
- @deprecated("Use Gen.Params() instead", "1.8")
- val defaultParams = Params()
-
/* Generator factory method */
def apply[T](g: Gen.Params => Option[T]) = new Gen[T] {
def apply(p: Gen.Params) = g(p)
@@ -310,20 +304,6 @@ object Gen {
x <- if(i == 0) g1 else if(i == 1) g2 else gs(i-2)
} yield x
- /** Chooses one of the given values, with a weighted random distribution.
- * @deprecated Use <code>frequency</code> with constant generators
- * instead. */
- @deprecated("Use 'frequency' with constant generators instead.", "1.6")
- def elementsFreq[T](vs: (Int, T)*): Gen[T] =
- frequency(vs.map { case (w,v) => (w, value(v)) } : _*)
-
- /** A generator that returns a random element from a list
- * @deprecated Use <code>oneOf</code> with constant generators instead. */
- @deprecated("Use 'oneOf' with constant generators instead.", "1.6")
- def elements[T](xs: T*): Gen[T] = if(xs.isEmpty) fail else for {
- i <- choose(0,xs.length-1)
- } yield xs(i)
-
//// List Generators ////
@@ -368,12 +348,6 @@ object Gen {
* <code>containerOfN[List,T](n,g)</code>. */
def listOfN[T](n: Int, g: Gen[T]) = containerOfN[List,T](n,g)
- /** Generates a list of the given length. This method is equal to calling
- * <code>containerOfN[List,T](n,g)</code>.
- * @deprecated Use the method <code>listOfN</code> instead. */
- @deprecated("Use 'listOfN' instead.", "1.6")
- def vectorOf[T](n: Int, g: Gen[T]) = containerOfN[List,T](n,g)
-
/** A generator that picks a random number of elements from a list */
def someOf[T](l: Iterable[T]) = choose(0,l.size) flatMap (pick(_,l))
@@ -438,16 +412,6 @@ object Gen {
//// Number Generators ////
- /* Generates positive integers
- * @deprecated Use <code>posNum[Int]code> instead */
- @deprecated("Use posNum[Int] instead", "1.7")
- def posInt: Gen[Int] = sized(max => choose(1, max))
-
- /* Generates negative integers
- * @deprecated Use <code>negNum[Int]code> instead */
- @deprecated("Use negNum[Int] instead", "1.7")
- def negInt: Gen[Int] = sized(max => choose(-max, -1))
-
/** Generates positive numbers of uniform distribution, with an
* upper bound of the generation size parameter. */
def posNum[T](implicit num: Numeric[T], c: Choose[T]): Gen[T] = {
diff --git a/src/scalacheck/org/scalacheck/Pretty.scala b/src/scalacheck/org/scalacheck/Pretty.scala
index f59ac315c7..c40e4aa718 100644
--- a/src/scalacheck/org/scalacheck/Pretty.scala
+++ b/src/scalacheck/org/scalacheck/Pretty.scala
@@ -5,7 +5,7 @@
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck
@@ -49,6 +49,8 @@ object Pretty {
implicit def prettyAny(t: Any) = Pretty { p => t.toString }
+ implicit def prettyString(t: String) = Pretty { p => "\""++t++"\"" }
+
implicit def prettyList(l: List[Any]) = Pretty { p =>
l.map("\""+_+"\"").mkString("List(", ", ", ")")
}
diff --git a/src/scalacheck/org/scalacheck/Prop.scala b/src/scalacheck/org/scalacheck/Prop.scala
index 3ae9f22234..dfd85a832a 100644
--- a/src/scalacheck/org/scalacheck/Prop.scala
+++ b/src/scalacheck/org/scalacheck/Prop.scala
@@ -5,12 +5,13 @@
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck
import util.{FreqMap,Buildable}
import scala.collection._
+import scala.annotation.tailrec
/** A property is a generator that generates a property result */
trait Prop {
@@ -102,15 +103,6 @@ trait Prop {
}
}
- /** Returns a new property that holds if and only if both this
- * and the given property generates a result with the exact
- * same status. Note that this means that if one of the properties is
- * proved, and the other one passed, then the resulting property
- * will fail.
- * @deprecated Use <code>==</code> instead */
- @deprecated("Use == instead.", "1.7")
- def ===(p: Prop): Prop = this == p
-
override def toString = "Prop"
/** Put a label on the property to make test reports clearer */
@@ -201,7 +193,7 @@ object Prop {
case (_,Undecided) => r
case (_,Proof) => merge(this, r, this.status)
- case (Proof,_) => merge(this, r, this.status)
+ case (Proof,_) => merge(this, r, r.status)
case (True,True) => merge(this, r, True)
}
@@ -337,15 +329,12 @@ object Prop {
/** A property that depends on the generator size */
def sizedProp(f: Int => Prop): Prop = Prop { prms =>
+ // provedToTrue since if the property is proved for
+ // one size, it shouldn't be regarded as proved for
+ // all sizes.
provedToTrue(f(prms.genPrms.size)(prms))
}
- /** Implication
- * @deprecated Use the implication operator of the Prop class instead
- */
- @deprecated("Use the implication operator of the Prop class instead", "1.7")
- def ==>(b: => Boolean, p: => Prop): Prop = (b: Prop) ==> p
-
/** Implication with several conditions */
def imply[T](x: T, f: PartialFunction[T,Prop]): Prop =
secure(if(f.isDefinedAt(x)) f(x) else undecided)
@@ -758,4 +747,17 @@ object Prop {
a8: Arbitrary[A8], s8: Shrink[A8], pp8: A8 => Pretty
): Prop = forAll((a: A1) => forAll(f(a, _:A2, _:A3, _:A4, _:A5, _:A6, _:A7, _:A8)))
+ /** Ensures that the property expression passed in completes within the given space of time. */
+ def within(maximumMs: Long)(wrappedProp: => Prop): Prop = new Prop {
+ @tailrec private def attempt(prms: Params, endTime: Long): Result = {
+ val result = wrappedProp.apply(prms)
+ if (System.currentTimeMillis > endTime) {
+ (if (result.failure) result else Result(False)).label("Timeout")
+ } else {
+ if (result.success) result
+ else attempt(prms, endTime)
+ }
+ }
+ def apply(prms: Params) = attempt(prms, System.currentTimeMillis + maximumMs)
+ }
}
diff --git a/src/scalacheck/org/scalacheck/Properties.scala b/src/scalacheck/org/scalacheck/Properties.scala
index 8a5b3febc9..26059231d6 100644
--- a/src/scalacheck/org/scalacheck/Properties.scala
+++ b/src/scalacheck/org/scalacheck/Properties.scala
@@ -5,7 +5,7 @@
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck
diff --git a/src/scalacheck/org/scalacheck/Shrink.scala b/src/scalacheck/org/scalacheck/Shrink.scala
index a077f21573..ae15bd9616 100644
--- a/src/scalacheck/org/scalacheck/Shrink.scala
+++ b/src/scalacheck/org/scalacheck/Shrink.scala
@@ -5,7 +5,7 @@
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck
diff --git a/src/scalacheck/org/scalacheck/Test.scala b/src/scalacheck/org/scalacheck/Test.scala
index 48b0a151a1..4368184823 100644
--- a/src/scalacheck/org/scalacheck/Test.scala
+++ b/src/scalacheck/org/scalacheck/Test.scala
@@ -5,7 +5,7 @@
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck
@@ -19,12 +19,18 @@ object Test {
/** Test parameters */
case class Params(
minSuccessfulTests: Int = 100,
- maxDiscardedTests: Int = 500,
+
+ /** @deprecated Use maxDiscardRatio instead. */
+ @deprecated("Use maxDiscardRatio instead.", "1.10")
+ maxDiscardedTests: Int = -1,
+
minSize: Int = 0,
maxSize: Int = Gen.Params().size,
rng: java.util.Random = Gen.Params().rng,
workers: Int = 1,
- testCallback: TestCallback = new TestCallback {}
+ testCallback: TestCallback = new TestCallback {},
+ maxDiscardRatio: Float = 5,
+ customClassLoader: Option[ClassLoader] = None
)
/** Test statistics */
@@ -90,7 +96,7 @@ object Test {
import prms._
if(
minSuccessfulTests <= 0 ||
- maxDiscardedTests < 0 ||
+ maxDiscardRatio <= 0 ||
minSize < 0 ||
maxSize < minSize ||
workers <= 0
@@ -106,12 +112,13 @@ object Test {
val names = Set("minSuccessfulTests", "s")
val help = "Number of tests that must succeed in order to pass a property"
}
- object OptMaxDiscarded extends IntOpt {
- val default = Test.Params().maxDiscardedTests
- val names = Set("maxDiscardedTests", "d")
+ object OptMaxDiscardRatio extends FloatOpt {
+ val default = Test.Params().maxDiscardRatio
+ val names = Set("maxDiscardRatio", "r")
val help =
- "Number of tests that can be discarded before ScalaCheck stops " +
- "testing a property"
+ "The maximum ratio between discarded and succeeded tests " +
+ "allowed before ScalaCheck stops testing a property. At " +
+ "least minSuccessfulTests will always be tested, though."
}
object OptMinSize extends IntOpt {
val default = Test.Params().minSize
@@ -135,45 +142,54 @@ object Test {
}
val opts = Set[Opt[_]](
- OptMinSuccess, OptMaxDiscarded, OptMinSize,
+ OptMinSuccess, OptMaxDiscardRatio, OptMinSize,
OptMaxSize, OptWorkers, OptVerbosity
)
def parseParams(args: Array[String]) = parseArgs(args) {
optMap => Test.Params(
- optMap(OptMinSuccess),
- optMap(OptMaxDiscarded),
- optMap(OptMinSize),
- optMap(OptMaxSize),
- Test.Params().rng,
- optMap(OptWorkers),
- ConsoleReporter(optMap(OptVerbosity))
+ minSuccessfulTests = optMap(OptMinSuccess),
+ maxDiscardRatio = optMap(OptMaxDiscardRatio),
+ minSize = optMap(OptMinSize),
+ maxSize = optMap(OptMaxSize),
+ rng = Test.Params().rng,
+ workers = optMap(OptWorkers),
+ testCallback = ConsoleReporter(optMap(OptVerbosity))
)
}
}
/** Tests a property with the given testing parameters, and returns
* the test results. */
- def check(prms: Params, p: Prop): Result = {
+ def check(params: Params, p: Prop): Result = {
+
+ // maxDiscardedTests is deprecated, but if someone
+ // uses it let it override maxDiscardRatio
+ val mdr =
+ if(params.maxDiscardedTests < 0) params.maxDiscardRatio
+ else (params.maxDiscardedTests: Float)/(params.minSuccessfulTests: Float)
+ val prms = params.copy( maxDiscardRatio = mdr)
+
import prms._
- import actors.Futures.future
+ import scala.actors.Futures.future
assertParams(prms)
if(workers > 1)
assert(!p.isInstanceOf[Commands], "Commands cannot be checked multi-threaded")
- val iterations = minSuccessfulTests / workers
- val sizeStep = (maxSize-minSize) / (minSuccessfulTests: Float)
+ val iterations = math.ceil(minSuccessfulTests / (workers: Double))
+ val sizeStep = (maxSize-minSize) / (iterations*workers)
var stop = false
- def worker(workerdIdx: Int) = future {
- var n = 0
- var d = 0
- var size = minSize + (workerdIdx*sizeStep*iterations)
+ def worker(workerIdx: Int) = future {
+ params.customClassLoader.map(Thread.currentThread.setContextClassLoader(_))
+ var n = 0 // passed tests
+ var d = 0 // discarded tests
var res: Result = null
var fm = FreqMap.empty[immutable.Set[Any]]
while(!stop && res == null && n < iterations) {
- val propPrms = Prop.Params(Gen.Params(size.round, prms.rng), fm)
+ val size = (minSize: Double) + (sizeStep * (workerIdx + (workers*(n+d))))
+ val propPrms = Prop.Params(Gen.Params(size.round.toInt, prms.rng), fm)
secure(p(propPrms)) match {
case Right(e) => res =
Result(GenException(e), n, d, FreqMap.empty[immutable.Set[Any]])
@@ -184,35 +200,48 @@ object Test {
propRes.status match {
case Prop.Undecided =>
d += 1
- testCallback.onPropEval("", workerdIdx, n, d)
- if(d >= maxDiscardedTests) res = Result(Exhausted, n, d, fm)
+ testCallback.onPropEval("", workerIdx, n, d)
+ // The below condition is kind of hacky. We have to have
+ // some margin, otherwise workers might stop testing too
+ // early because they have been exhausted, but the overall
+ // test has not.
+ if (n+d > minSuccessfulTests && 1+workers*maxDiscardRatio*n < d)
+ res = Result(Exhausted, n, d, fm)
case Prop.True =>
n += 1
- testCallback.onPropEval("", workerdIdx, n, d)
+ testCallback.onPropEval("", workerIdx, n, d)
case Prop.Proof =>
n += 1
res = Result(Proved(propRes.args), n, d, fm)
- case Prop.False => res =
- Result(Failed(propRes.args, propRes.labels), n, d, fm)
- case Prop.Exception(e) => res =
- Result(PropException(propRes.args, e, propRes.labels), n, d, fm)
+ stop = true
+ case Prop.False =>
+ res = Result(Failed(propRes.args,propRes.labels), n, d, fm)
+ stop = true
+ case Prop.Exception(e) =>
+ res = Result(PropException(propRes.args,e,propRes.labels), n, d, fm)
+ stop = true
}
}
- size += sizeStep
}
- if(res != null) stop = true
- else res = Result(Passed, n, d, fm)
- res
+ if (res == null) {
+ if (maxDiscardRatio*n > d) Result(Passed, n, d, fm)
+ else Result(Exhausted, n, d, fm)
+ } else res
}
- def mergeResults(r1: () => Result, r2: () => Result) = r1() match {
- case Result(Passed, s1, d1, fm1, t) => r2() match {
- case Result(Passed, s2, d2, fm2, t) if d1+d2 >= maxDiscardedTests =>
- () => Result(Exhausted, s1+s2, d1+d2, fm1++fm2, t)
- case Result(st, s2, d2, fm2, t) =>
- () => Result(st, s1+s2, d1+d2, fm1++fm2, t)
+ def mergeResults(r1: () => Result, r2: () => Result) = {
+ val Result(st1, s1, d1, fm1, _) = r1()
+ val Result(st2, s2, d2, fm2, _) = r2()
+ if (st1 != Passed && st1 != Exhausted)
+ () => Result(st1, s1+s2, d1+d2, fm1++fm2, 0)
+ else if (st2 != Passed && st2 != Exhausted)
+ () => Result(st2, s1+s2, d1+d2, fm1++fm2, 0)
+ else {
+ if (s1+s2 >= minSuccessfulTests && maxDiscardRatio*(s1+s2) >= (d1+d2))
+ () => Result(Passed, s1+s2, d1+d2, fm1++fm2, 0)
+ else
+ () => Result(Exhausted, s1+s2, d1+d2, fm1++fm2, 0)
}
- case r => () => r
}
val start = System.currentTimeMillis
@@ -237,78 +266,4 @@ object Test {
(name,res)
}
-
- // Deprecated methods //
-
- /** Default testing parameters
- * @deprecated Use <code>Test.Params()</code> instead */
- @deprecated("Use Test.Params() instead", "1.8")
- val defaultParams = Params()
-
- /** Property evaluation callback. Takes number of passed and
- * discarded tests, respectively */
- @deprecated("(v1.8)", "1.8")
- type PropEvalCallback = (Int,Int) => Unit
-
- /** Property evaluation callback. Takes property name, and number of passed
- * and discarded tests, respectively */
- @deprecated("(v1.8)", "1.8")
- type NamedPropEvalCallback = (String,Int,Int) => Unit
-
- /** Test callback. Takes property name, and test results. */
- @deprecated("(v1.8)", "1.8")
- type TestResCallback = (String,Result) => Unit
-
- /** @deprecated (v1.8) Use <code>check(prms.copy(testCallback = myCallback), p)</code> instead. */
- @deprecated("Use check(prms.copy(testCallback = myCallback), p) instead", "1.8")
- def check(prms: Params, p: Prop, propCallb: PropEvalCallback): Result = {
- val testCallback = new TestCallback {
- override def onPropEval(n: String, t: Int, s: Int, d: Int) = propCallb(s,d)
- }
- check(prms copy (testCallback = testCallback), p)
- }
-
- /** Tests a property and prints results to the console. The
- * <code>maxDiscarded</code> parameter specifies how many
- * discarded tests that should be allowed before ScalaCheck
- * @deprecated (v1.8) Use <code>check(Params(maxDiscardedTests = n, testCallback = ConsoleReporter()), p)</code> instead. */
- @deprecated("Use check(Params(maxDiscardedTests = n, testCallback = ConsoleReporter()), p) instead.", "1.8")
- def check(p: Prop, maxDiscarded: Int): Result =
- check(Params(maxDiscardedTests = maxDiscarded, testCallback = ConsoleReporter()), p)
-
- /** Tests a property and prints results to the console
- * @deprecated (v1.8) Use <code>check(Params(testCallback = ConsoleReporter()), p)</code> instead. */
- @deprecated("Use check(Params(testCallback = ConsoleReporter()), p) instead.", "1.8")
- def check(p: Prop): Result = check(Params(testCallback = ConsoleReporter()), p)
-
- /** Tests all properties with the given testing parameters, and returns
- * the test results. <code>f</code> is a function which is called each
- * time a property is evaluted. <code>g</code> is a function called each
- * time a property has been fully tested.
- * @deprecated (v1.8) Use <code>checkProperties(prms.copy(testCallback = myCallback), ps)</code> instead. */
- @deprecated("Use checkProperties(prms.copy(testCallback = myCallback), ps) instead.", "1.8")
- def checkProperties(ps: Properties, prms: Params,
- propCallb: NamedPropEvalCallback, testCallb: TestResCallback
- ): Seq[(String,Result)] = {
- val testCallback = new TestCallback {
- override def onPropEval(n: String, t: Int, s: Int, d: Int) = propCallb(n,s,d)
- override def onTestResult(n: String, r: Result) = testCallb(n,r)
- }
- checkProperties(prms copy (testCallback = testCallback), ps)
- }
-
- /** Tests all properties with the given testing parameters, and returns
- * the test results.
- * @deprecated (v1.8) Use checkProperties(prms, ps) instead */
- @deprecated("Use checkProperties(prms, ps) instead", "1.8")
- def checkProperties(ps: Properties, prms: Params): Seq[(String,Result)] =
- checkProperties(ps, prms, (n,s,d) => (), (n,s) => ())
-
- /** Tests all properties with default testing parameters, and returns
- * the test results. The results are also printed on the console during
- * testing.
- * @deprecated (v1.8) Use <code>checkProperties(Params(), ps)</code> instead. */
- @deprecated("Use checkProperties(Params(), ps) instead.", "1.8")
- def checkProperties(ps: Properties): Seq[(String,Result)] =
- checkProperties(Params(), ps)
}
diff --git a/src/scalacheck/org/scalacheck/util/Buildable.scala b/src/scalacheck/org/scalacheck/util/Buildable.scala
index 5c960c3ba8..221b8a61c3 100644
--- a/src/scalacheck/org/scalacheck/util/Buildable.scala
+++ b/src/scalacheck/org/scalacheck/util/Buildable.scala
@@ -5,7 +5,7 @@
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck.util
@@ -31,7 +31,7 @@ object Buildable {
def builder = (new mutable.ListBuffer[T]).mapResult(_.toStream)
}
- implicit def buildableArray[T](implicit t: ClassTag[T]) =
+ implicit def buildableArray[T](implicit cm: ClassTag[T]) =
new Buildable[T,Array] {
def builder = mutable.ArrayBuilder.make[T]
}
diff --git a/src/scalacheck/org/scalacheck/util/CmdLineParser.scala b/src/scalacheck/org/scalacheck/util/CmdLineParser.scala
index a63e4ba10e..16ac1940b2 100644
--- a/src/scalacheck/org/scalacheck/util/CmdLineParser.scala
+++ b/src/scalacheck/org/scalacheck/util/CmdLineParser.scala
@@ -5,7 +5,7 @@
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck.util
@@ -26,6 +26,7 @@ trait CmdLineParser extends Parsers {
}
trait Flag extends Opt[Unit]
trait IntOpt extends Opt[Int]
+ trait FloatOpt extends Opt[Float]
trait StrOpt extends Opt[String]
class OptMap {
@@ -68,11 +69,17 @@ trait CmdLineParser extends Parsers {
case s if s != null && s.length > 0 && s.forall(_.isDigit) => s.toInt
})
+ private val floatVal: Parser[Float] = accept("float", {
+ case s if s != null && s.matches("[0987654321]+\\.?[0987654321]*")
+ => s.toFloat
+ })
+
private case class OptVal[T](o: Opt[T], v: T)
private val optVal: Parser[OptVal[Any]] = opt into {
case o: Flag => success(OptVal(o, ()))
case o: IntOpt => intVal ^^ (v => OptVal(o, v))
+ case o: FloatOpt => floatVal ^^ (v => OptVal(o, v))
case o: StrOpt => strVal ^^ (v => OptVal(o, v))
}
diff --git a/src/scalacheck/org/scalacheck/util/FreqMap.scala b/src/scalacheck/org/scalacheck/util/FreqMap.scala
index 902c148d67..c7474d3b87 100644
--- a/src/scalacheck/org/scalacheck/util/FreqMap.scala
+++ b/src/scalacheck/org/scalacheck/util/FreqMap.scala
@@ -5,7 +5,7 @@
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck.util
diff --git a/src/scalacheck/org/scalacheck/util/StdRand.scala b/src/scalacheck/org/scalacheck/util/StdRand.scala
index 4cc83a4172..317b0ccd10 100644
--- a/src/scalacheck/org/scalacheck/util/StdRand.scala
+++ b/src/scalacheck/org/scalacheck/util/StdRand.scala
@@ -5,7 +5,7 @@
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck.util