summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/actors/scala/actors/Actor.scala8
-rw-r--r--src/actors/scala/actors/Debug.scala9
-rw-r--r--src/actors/scala/actors/Reactor.scala22
-rw-r--r--src/actors/scala/actors/ReactorTask.scala1
-rw-r--r--src/actors/scala/actors/ReplyReactor.scala8
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala16
-rw-r--r--src/compiler/scala/tools/nsc/Main.scala1
-rw-r--r--src/compiler/scala/tools/nsc/Settings.scala31
-rw-r--r--src/compiler/scala/tools/nsc/ast/Trees.scala5
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala1
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Scanners.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/GenICode.scala13
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/Inliners.scala27
-rw-r--r--src/compiler/scala/tools/nsc/dependencies/Changes.scala14
-rw-r--r--src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala40
-rw-r--r--src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala44
-rw-r--r--src/compiler/scala/tools/nsc/symtab/BaseTypeSeqs.scala4
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala13
-rw-r--r--src/compiler/scala/tools/nsc/symtab/Symbols.scala16
-rw-r--r--src/compiler/scala/tools/nsc/symtab/Types.scala212
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala12
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala4
-rw-r--r--src/compiler/scala/tools/nsc/transform/LazyVals.scala4
-rw-r--r--src/compiler/scala/tools/nsc/transform/UnCurry.scala16
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Analyzer.scala9
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala342
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala141
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala9
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala22
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala156
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Unapplies.scala4
-rw-r--r--src/compiler/scala/tools/nsc/util/Statistics.scala253
-rw-r--r--src/library/scala/Predef.scala6
-rw-r--r--src/library/scala/collection/IndexedSeqViewLike.scala70
-rw-r--r--src/library/scala/collection/IterableViewLike.scala28
-rw-r--r--src/library/scala/collection/SeqViewLike.scala26
-rw-r--r--src/library/scala/collection/TraversableViewLike.scala23
-rw-r--r--src/library/scala/collection/immutable/StringOps.scala2
-rw-r--r--src/library/scala/collection/mutable/ArrayBuffer.scala2
-rw-r--r--src/library/scala/collection/mutable/IndexedSeqView.scala5
-rw-r--r--src/library/scala/collection/mutable/MultiMap.scala4
-rw-r--r--src/library/scala/collection/views/Transformed.scala128
-rw-r--r--src/library/scala/ref/PhantomReference.scala8
-rw-r--r--src/library/scala/ref/ReferenceQueue.scala23
-rw-r--r--src/library/scala/ref/ReferenceWrapper.scala7
-rw-r--r--src/library/scala/ref/SoftReference.scala11
-rw-r--r--src/library/scala/ref/WeakReference.scala11
-rw-r--r--src/library/scala/xml/factory/XMLLoader.scala4
-rw-r--r--src/library/scala/xml/parsing/MarkupParser.scala2
-rw-r--r--src/partest/scala/tools/partest/PartestTask.scala2
51 files changed, 1198 insertions, 627 deletions
diff --git a/src/actors/scala/actors/Actor.scala b/src/actors/scala/actors/Actor.scala
index fb90cb9c46..907389b9f0 100644
--- a/src/actors/scala/actors/Actor.scala
+++ b/src/actors/scala/actors/Actor.scala
@@ -646,6 +646,7 @@ trait Actor extends AbstractActor with ReplyReactor with ReplyableActor {
}
// guarded by lock of this
+ // never throws SuspendActorException
private[actors] override def scheduleActor(f: Any =>? Unit, msg: Any) =
if ((f eq null) && (continuation eq null)) {
// do nothing (timeout is handled instead)
@@ -825,6 +826,13 @@ trait Actor extends AbstractActor with ReplyReactor with ReplyableActor {
resumeActor()
else if (waitingFor ne waitingForNone) {
scheduleActor(continuation, null)
+ /* Here we should not throw a SuspendActorException,
+ since the current method is called from an actor that
+ is in the process of exiting.
+
+ Therefore, the contract for scheduleActor is that
+ it never throws a SuspendActorException.
+ */
}
}
}
diff --git a/src/actors/scala/actors/Debug.scala b/src/actors/scala/actors/Debug.scala
index 481b68d7f4..bad19b8aeb 100644
--- a/src/actors/scala/actors/Debug.scala
+++ b/src/actors/scala/actors/Debug.scala
@@ -27,6 +27,15 @@ object Debug {
def error(s: String) =
if (lev > 0) System.err.println("Error: " + s)
+
+ def doInfo(b: => Unit) =
+ if (lev > 2) b
+
+ def doWarning(b: => Unit) =
+ if (lev > 1) b
+
+ def doError(b: => Unit) =
+ if (lev > 0) b
}
class Debug(tag: String) {
diff --git a/src/actors/scala/actors/Reactor.scala b/src/actors/scala/actors/Reactor.scala
index d641f54eb6..8545b92d1e 100644
--- a/src/actors/scala/actors/Reactor.scala
+++ b/src/actors/scala/actors/Reactor.scala
@@ -108,8 +108,17 @@ trait Reactor extends OutputChannel[Any] {
// assert continuation != null
if (onSameThread)
continuation(item._1)
- else
+ else {
scheduleActor(continuation, item._1)
+ /* Here, we throw a SuspendActorException to avoid
+ terminating this actor when the current ReactorTask
+ is finished.
+
+ The SuspendActorException skips the termination code
+ in ReactorTask.
+ */
+ throw Actor.suspendException
+ }
}
def !(msg: Any) {
@@ -149,7 +158,14 @@ trait Reactor extends OutputChannel[Any] {
// keep going
} else {
waitingFor = handlesMessage
- done = true
+ /* Here, we throw a SuspendActorException to avoid
+ terminating this actor when the current ReactorTask
+ is finished.
+
+ The SuspendActorException skips the termination code
+ in ReactorTask.
+ */
+ throw Actor.suspendException
}
}
} else {
@@ -171,6 +187,8 @@ trait Reactor extends OutputChannel[Any] {
* an actors act method.
*
* assume handler != null
+ *
+ * never throws SuspendActorException
*/
private[actors] def scheduleActor(handler: Any =>? Unit, msg: Any) = {
val fun = () => handler(msg)
diff --git a/src/actors/scala/actors/ReactorTask.scala b/src/actors/scala/actors/ReactorTask.scala
index 28e93bbbff..f6ec67e94c 100644
--- a/src/actors/scala/actors/ReactorTask.scala
+++ b/src/actors/scala/actors/ReactorTask.scala
@@ -46,6 +46,7 @@ private[actors] class ReactorTask[T >: Null <: Reactor](var reactor: T, var fun:
case e: Exception =>
Debug.info(reactor+": caught "+e)
+ Debug.doInfo { e.printStackTrace() }
reactor.terminated()
afterExecuting(e)
} finally {
diff --git a/src/actors/scala/actors/ReplyReactor.scala b/src/actors/scala/actors/ReplyReactor.scala
index 4b31369db4..64860f4d38 100644
--- a/src/actors/scala/actors/ReplyReactor.scala
+++ b/src/actors/scala/actors/ReplyReactor.scala
@@ -57,8 +57,11 @@ trait ReplyReactor extends Reactor with ReplyableReactor {
// assert continuation != null
if (onSameThread)
continuation(item._1)
- else
+ else {
scheduleActor(continuation, item._1)
+ // see Reactor.resumeReceiver
+ throw Actor.suspendException
+ }
}
// assume continuation != null
@@ -83,7 +86,8 @@ trait ReplyReactor extends Reactor with ReplyableReactor {
// keep going
} else {
waitingFor = handlesMessage
- done = true
+ // see Reactor.searchMailbox
+ throw Actor.suspendException
}
}
} else {
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index dfa8dcbef4..1128150cb8 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -673,9 +673,9 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
refreshProgress
}
private def refreshProgress =
- if (fileset.size > 0)
- progress((phasec * fileset.size) + unitc,
- (phaseDescriptors.length-1) * fileset.size) // terminal phase not part of the progress display
+ if (compiledFiles.size > 0)
+ progress((phasec * compiledFiles.size) + unitc,
+ (phaseDescriptors.length-1) * compiledFiles.size) // terminal phase not part of the progress display
// ----- finding phases --------------------------------------------
@@ -704,12 +704,12 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
// ----------- Units and top-level classes and objects --------
private var unitbuf = new ListBuffer[CompilationUnit]
- private var fileset = new HashSet[AbstractFile]
+ var compiledFiles = new HashSet[AbstractFile]
/** add unit to be compiled in this run */
private def addUnit(unit: CompilationUnit) {
unitbuf += unit
- fileset += unit.source.file
+ compiledFiles += unit.source.file
}
/* An iterator returning all the units being compiled in this run */
@@ -847,12 +847,12 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
* to phase "namer".
*/
def compileLate(file: AbstractFile) {
- if (fileset eq null) {
+ if (compiledFiles eq null) {
val msg = "No class file for " + file +
" was found\n(This file cannot be loaded as a source file)"
inform(msg)
throw new FatalError(msg)
- } else if (!(fileset contains file)) {
+ } else if (!(compiledFiles contains file)) {
compileLate(new CompilationUnit(getSourceFile(file)))
}
}
@@ -863,7 +863,7 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
def compileLate(unit: CompilationUnit) {
addUnit(unit)
var localPhase = firstPhase.asInstanceOf[GlobalPhase]
- while (localPhase != null && (localPhase.id < globalPhase.id || localPhase.id <= namerPhase.id)/* && !reporter.hasErrors*/) {
+ while (localPhase != null && (localPhase.id < globalPhase.id || localPhase.id < typerPhase.id)/* && !reporter.hasErrors*/) {
val oldSource = reporter.getSource
reporter.withSource(unit.source) {
atPhase(localPhase)(localPhase.applyPhase(unit))
diff --git a/src/compiler/scala/tools/nsc/Main.scala b/src/compiler/scala/tools/nsc/Main.scala
index 315d94d7d4..577439990d 100644
--- a/src/compiler/scala/tools/nsc/Main.scala
+++ b/src/compiler/scala/tools/nsc/Main.scala
@@ -40,6 +40,7 @@ object Main extends AnyRef with EvalLoop {
loop { line =>
val args = line.split(' ').toList
val command = new CompilerCommand(args, new Settings(error), error, true)
+ compiler.reporter.reset
new compiler.Run() compile command.files
}
}
diff --git a/src/compiler/scala/tools/nsc/Settings.scala b/src/compiler/scala/tools/nsc/Settings.scala
index 23ceb37c95..b039a9e90d 100644
--- a/src/compiler/scala/tools/nsc/Settings.scala
+++ b/src/compiler/scala/tools/nsc/Settings.scala
@@ -372,6 +372,37 @@ object Settings {
}
}
}
+
+ /** Return the source file path(s) which correspond to the given
+ * classfile path and SourceFile attribute value, subject to the
+ * condition that source files are arranged in the filesystem
+ * according to Java package layout conventions.
+ *
+ * The given classfile path must be contained in at least one of
+ * the specified output directories. If it does not then this
+ * method returns Nil.
+ *
+ * Note that the source file is not required to exist, so assuming
+ * a valid classfile path this method will always return a list
+ * containing at least one element.
+ *
+ * Also that if two or more source path elements target the same
+ * output directory there will be two or more candidate source file
+ * paths.
+ */
+ def srcFilesFor(classFile : AbstractFile, srcPath : String) : List[AbstractFile] = {
+ def isBelow(srcDir: AbstractFile, outDir: AbstractFile) =
+ classFile.path.startsWith(outDir.path)
+
+ singleOutDir match {
+ case Some(d) => Nil
+ case None =>
+ (outputs filter (isBelow _).tuple) match {
+ case Nil => Nil
+ case matches => matches.map(_._1.lookupPath(srcPath, false))
+ }
+ }
+ }
}
// The Setting companion object holds all the factory methods
diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala
index 9adad689f1..c4c9af5294 100644
--- a/src/compiler/scala/tools/nsc/ast/Trees.scala
+++ b/src/compiler/scala/tools/nsc/ast/Trees.scala
@@ -102,9 +102,8 @@ trait Trees {
//def kindingIrrelevant(tp: Type) = (tp eq null) || phase.name == "erasure" || phase.erasedTypes
abstract class Tree extends Product {
- {
- import util.Statistics
- if (Statistics.enabled) nodeCount += 1
+ if (util.Statistics.enabled) {
+ util.Statistics.nodeByType(getClass) += 1
}
val id = nodeCount
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index a8118242df..a89f4f01c3 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -2165,6 +2165,7 @@ self =>
atPos(accept(THIS)) {
newLineOptWhenFollowedBy(LBRACE)
var t = Apply(Ident(nme.CONSTRUCTOR), argumentExprs())
+ newLineOptWhenFollowedBy(LBRACE)
while (in.token == LPAREN || in.token == LBRACE) {
t = Apply(t, argumentExprs())
newLineOptWhenFollowedBy(LBRACE)
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
index a9a3ded6b4..be90a835f5 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
@@ -332,7 +332,7 @@ trait Scanners {
nextChar()
if (isIdentifierStart(ch))
charLitOr(getIdentRest)
- else if (isSpecial(ch))
+ else if (isOperatorPart(ch) && (ch != '\\'))
charLitOr(getOperatorRest)
else {
getLitChar()
diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
index 4e6507f9e3..cd690097e8 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
@@ -1621,8 +1621,9 @@ abstract class GenICode extends SubComponent {
override def equals(other: Any) = f == other;
}
- def duplicateFinalizer(ctx: Context, finalizer: Tree) =
- (new DuplicateLabels(ctx.labels.keySet))(ctx, finalizer)
+ def duplicateFinalizer(boundLabels: collection.Set[Symbol], targetCtx: Context, finalizer: Tree) = {
+ (new DuplicateLabels(boundLabels))(targetCtx, finalizer)
+ }
/**
* The Context class keeps information relative to the current state
@@ -1863,6 +1864,10 @@ abstract class GenICode extends SubComponent {
var tmp: Local = null
val kind = toTypeKind(tree.tpe)
val guardResult = kind != UNIT && mayCleanStack(finalizer)
+ // we need to save bound labels before any code generation is performed on
+ // the current context (otherwise, any new lables in the finalizer that need to
+ // be duplicated would be incorrectly considered bound -- see #2850).
+ val boundLabels: collection.Set[Symbol] = Set.empty ++ labels.keySet
if (guardResult) {
tmp = this.makeLocal(tree.pos, tree.tpe, "tmp")
@@ -1875,11 +1880,11 @@ abstract class GenICode extends SubComponent {
if (guardResult) {
ctx1.bb.emit(STORE_LOCAL(tmp))
- val ctx2 = genLoad(duplicateFinalizer(ctx1, finalizer), ctx1, UNIT)
+ val ctx2 = genLoad(duplicateFinalizer(boundLabels, ctx1, finalizer), ctx1, UNIT)
ctx2.bb.emit(LOAD_LOCAL(tmp))
ctx2
} else
- genLoad(duplicateFinalizer(ctx1, finalizer), ctx1, UNIT)
+ genLoad(duplicateFinalizer(boundLabels, ctx1, finalizer), ctx1, UNIT)
} else ctx
diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
index 8e77c493dd..4c9a996cec 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
@@ -278,10 +278,13 @@ abstract class Inliners extends SubComponent {
assert(pending.isEmpty, "Pending NEW elements: " + pending)
}
+ /** The current iclass */
+ private var currentIClazz: IClass = _
+
def analyzeClass(cls: IClass): Unit = if (settings.inline.value) {
if (settings.debug.value)
log("Analyzing " + cls);
-
+ this.currentIClazz = cls
cls.methods filterNot (_.symbol.isConstructor) foreach analyzeMethod
}
@@ -311,6 +314,12 @@ abstract class Inliners extends SubComponent {
if (!retry) {
i match {
case CALL_METHOD(msym, Dynamic) =>
+ def warnNoInline(reason: String) = {
+ if (msym.hasAnnotation(ScalaInlineAttr))
+ currentIClazz.cunit.warning(i.pos,
+ "Could not inline required method %s because %s.".format(msym.originalName.decode, reason))
+ }
+
val receiver = info.stack.types.drop(msym.info.paramTypes.length).head match {
case REFERENCE(s) => s;
case _ => NoSymbol;
@@ -361,12 +370,18 @@ abstract class Inliners extends SubComponent {
+ "\n\tinc.code ne null: " + (inc.code ne null) + (if (inc.code ne null)
"\n\tisSafeToInline(m, inc, info.stack): " + isSafeToInline(m, inc, info.stack)
+ "\n\tshouldInline heuristics: " + shouldInline(m, inc) else ""));
+ warnNoInline(
+ if (inc.code eq null) "bytecode was unavailable"
+ else if (!isSafeToInline(m, inc, info.stack)) "it is unsafe (target may reference private fields)"
+ else "a bug (run with -Ylog:inline -Ydebug for more information)")
}
case None =>
+ warnNoInline("bytecode was not available")
if (settings.debug.value)
log("could not find icode\n\treceiver: " + receiver + "\n\tmethod: " + concreteMethod)
}
- }
+ } else
+ warnNoInline(if (icodes.available(receiver)) "it is not final" else "bytecode was not available")
case _ => ();
}
@@ -375,14 +390,6 @@ abstract class Inliners extends SubComponent {
if (tfa.stat) log(m.symbol.fullNameString + " iterations: " + tfa.iterations + " (size: " + m.code.blocks.length + ")")
}} while (retry && count < 15)
m.normalize
-// } catch {
-// case e =>
-// Console.println("############# Caught exception: " + e + " #################");
-// Console.println("\nMethod: " + m +
-// "\nMethod owner: " + m.symbol.owner);
-// e.printStackTrace();
-// m.dump
-// throw e
}
diff --git a/src/compiler/scala/tools/nsc/dependencies/Changes.scala b/src/compiler/scala/tools/nsc/dependencies/Changes.scala
index a7953dfb9f..87e38d8909 100644
--- a/src/compiler/scala/tools/nsc/dependencies/Changes.scala
+++ b/src/compiler/scala/tools/nsc/dependencies/Changes.scala
@@ -21,7 +21,8 @@ abstract class Changes {
/** Are the new modifiers more restrictive than the old ones? */
private def moreRestrictive(from: Long, to: Long): Boolean =
((((to & PRIVATE) != 0L) && (from & PRIVATE) == 0L)
- || (((to & PROTECTED) != 0L) && (from & PROTECTED) == 0L))
+ || (((to & PROTECTED) != 0L) && (from & PROTECTED) == 0L)) ||
+ ((from & IMPLICIT) != (to & IMPLICIT))
/** An entity in source code, either a class or a member definition.
* Name is fully-qualified.
@@ -38,6 +39,8 @@ abstract class Changes {
private def sameSymbol(sym1: Symbol, sym2: Symbol): Boolean =
sym1.fullNameString == sym2.fullNameString
+ private def sameFlags(sym1: Symbol, sym2: Symbol): Boolean =
+ sym1.flags == sym2.flags
private def sameType(tp1: Type, tp2: Type) = {
def typeOf(tp: Type): String = tp.toString + "[" + tp.getClass + "]"
@@ -88,9 +91,10 @@ abstract class Changes {
case (MethodType(params1, res1), MethodType(params2, res2)) =>
// new dependent types: probably fix this, use substSym as done for PolyType
(sameTypes(tp1.paramTypes, tp2.paramTypes) &&
- ((tp1.params, tp2.params).zipped forall sameSymbol) &&
- sameType(res1, res2) &&
- tp1.isInstanceOf[ImplicitMethodType] == tp2.isInstanceOf[ImplicitMethodType])
+ ((tp1.params, tp2.params).zipped forall ((t1, t2) =>
+ (sameSymbol(t1, t1) && sameFlags(t1, t2)))) &&
+ sameType(res1, res2) &&
+ tp1.isInstanceOf[ImplicitMethodType] == tp2.isInstanceOf[ImplicitMethodType])
case (PolyType(tparams1, res1), PolyType(tparams2, res2)) =>
sameTypeParams(tparams1, tparams2) && sameType(res1, res2)
@@ -141,7 +145,7 @@ abstract class Changes {
// println("changeSet " + from + "(" + from.info + ")"
// + " vs " + to + "(" + to.info + ")")
- def omitSymbols(s: Symbol): Boolean = !s.hasFlag(Flags.LOCAL | Flags.LIFTED | Flags.PRIVATE)
+ def omitSymbols(s: Symbol): Boolean = !s.hasFlag(LOCAL | LIFTED | PRIVATE)
val cs = new mutable.ListBuffer[Change]
if ((from.info.parents zip to.info.parents) exists { case (t1, t2) => !sameType(t1, t2) })
diff --git a/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala b/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala
index 32d94d9ee9..5ce0cfdb1c 100644
--- a/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala
@@ -149,18 +149,52 @@ trait DependencyAnalysis extends SubComponent with Files {
&& (!tree.symbol.isPackage)
&& (!tree.symbol.hasFlag(Flags.JAVA))
&& ((tree.symbol.sourceFile eq null)
- || (tree.symbol.sourceFile.path != file.path))) {
- references += file -> (references(file) + tree.symbol.fullNameString)
+ || (tree.symbol.sourceFile.path != file.path))
+ && (!tree.symbol.isClassConstructor)) {
+ updateReferences(tree.symbol.fullNameString)
}
+
tree match {
case cdef: ClassDef if !cdef.symbol.hasFlag(Flags.PACKAGE) =>
buf += cdef.symbol
+ atPhase(currentRun.erasurePhase.prev) {
+ for (s <- cdef.symbol.info.decls)
+ s match {
+ case ts: TypeSymbol if !ts.isClass =>
+ checkType(s.tpe)
+ case _ =>
+ }
+ }
super.traverse(tree)
- case _ =>
+ case ddef: DefDef =>
+ atPhase(currentRun.typerPhase.prev) {
+ checkType(ddef.symbol.tpe)
+ }
+ super.traverse(tree)
+
+ case _ =>
super.traverse(tree)
}
}
+
+ def checkType(tpe: Type): Unit =
+ tpe match {
+ case t: MethodType =>
+ checkType(t.resultType)
+ for (s <- t.params) checkType(s.tpe)
+
+ case t: TypeRef =>
+ updateReferences(t.typeSymbol.fullNameString)
+ for (tp <- t.args) checkType(tp)
+
+ case t =>
+ updateReferences(t.typeSymbol.fullNameString)
+ }
+
+ def updateReferences(s: String): Unit =
+ references += file -> (references(file) + s)
+
}).apply(unit.body)
definitions(unit.source.file) = buf.toList
diff --git a/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala b/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala
index b445ec245e..5414b53e0c 100644
--- a/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala
+++ b/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala
@@ -40,13 +40,15 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana
protected def newCompiler(settings: Settings) = new BuilderGlobal(settings)
val compiler = newCompiler(settings)
- import compiler.Symbol
+ import compiler.{Symbol, atPhase, currentRun}
+
+ private case class Symbols(sym: Symbol, symBefErasure: Symbol)
/** Managed source files. */
private val sources: mutable.Set[AbstractFile] = new mutable.HashSet[AbstractFile]
- private val definitions: mutable.Map[AbstractFile, List[Symbol]] =
- new mutable.HashMap[AbstractFile, List[Symbol]] {
+ private val definitions: mutable.Map[AbstractFile, List[Symbols]] =
+ new mutable.HashMap[AbstractFile, List[Symbols]] {
override def default(key: AbstractFile) = Nil
}
@@ -70,7 +72,7 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana
*/
private def invalidatedByRemove(files: Set[AbstractFile]): Set[AbstractFile] = {
val changes = new mutable.HashMap[Symbol, List[Change]]
- for (f <- files; sym <- definitions(f))
+ for (f <- files; Symbols(sym, _) <- definitions(f))
changes += sym -> List(Removed(Class(sym.fullNameString)))
invalidated(files, changes)
}
@@ -121,10 +123,16 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana
val syms = defs(src)
for (sym <- syms) {
definitions(src).find(
- s => (s.fullNameString == sym.fullNameString) &&
- isCorrespondingSym(s, sym)) match {
- case Some(oldSym) =>
- changesOf(oldSym) = changeSet(oldSym, sym)
+ s => (s.sym.fullNameString == sym.fullNameString) &&
+ isCorrespondingSym(s.sym, sym)) match {
+ case Some(Symbols(oldSym, oldSymEras)) =>
+ val changes = changeSet(oldSym, sym)
+ val changesErasure =
+ atPhase(currentRun.erasurePhase.prev) {
+ changeSet(oldSymEras, sym)
+ }
+
+ changesOf(oldSym) = (changes ++ changesErasure).removeDuplicates
case _ =>
// a new top level definition
changesOf(sym) =
@@ -134,10 +142,10 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana
}
}
// Create a change for the top level classes that were removed
- val removed = definitions(src) filterNot ((s: Symbol) =>
- syms.find(_.fullNameString == s.fullNameString) != None)
- for (sym <- removed) {
- changesOf(sym) = List(removeChangeSet(sym))
+ val removed = definitions(src) filterNot ((s:Symbols) =>
+ syms.find(_.fullNameString == (s.sym.fullNameString)) != None)
+ for (s <- removed) {
+ changesOf(s.sym) = List(removeChangeSet(s.sym))
}
}
}
@@ -192,8 +200,8 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana
println("invalidate " + file + " because " + reason + " [" + change + "]")
buf += file
directDeps -= file
- for (sym <- definitions(file)) // fixes #2557
- newChangesOf(sym) = List(change)
+ for (syms <- definitions(file)) // fixes #2557
+ newChangesOf(syms.sym) = List(change)
break
}
@@ -255,8 +263,8 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana
for (file <- directDeps) {
breakable {
- for (cls <- definitions(file)) checkParents(cls, file)
- for (cls <- definitions(file)) checkInterface(cls, file)
+ for (cls <- definitions(file)) checkParents(cls.sym, file)
+ for (cls <- definitions(file)) checkInterface(cls.sym, file)
checkReferences(file)
}
}
@@ -270,7 +278,9 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana
/** Update the map of definitions per source file */
private def updateDefinitions(files: Set[AbstractFile]) {
for (src <- files; val localDefs = compiler.dependencyAnalysis.definitions(src)) {
- definitions(src) = (localDefs map (_.cloneSymbol))
+ definitions(src) = (localDefs map (s => {
+ Symbols(s.cloneSymbol, atPhase(currentRun.erasurePhase.prev) {s.cloneSymbol})
+ }))
}
this.references = compiler.dependencyAnalysis.references
}
diff --git a/src/compiler/scala/tools/nsc/symtab/BaseTypeSeqs.scala b/src/compiler/scala/tools/nsc/symtab/BaseTypeSeqs.scala
index 8fc01a91fb..c83138e9bc 100644
--- a/src/compiler/scala/tools/nsc/symtab/BaseTypeSeqs.scala
+++ b/src/compiler/scala/tools/nsc/symtab/BaseTypeSeqs.scala
@@ -9,6 +9,7 @@ package symtab
import scala.collection.mutable.ListBuffer
import scala.collection.immutable.Map
import math.max
+import util.Statistics._
/** A base type sequence (BaseTypeSeq) is an ordered sequence spanning all the base types
* of a type. It characterized by the following two laws:
@@ -32,6 +33,9 @@ trait BaseTypeSeqs {
class BaseTypeSeq(parents: List[Type], elems: Array[Type]) {
self =>
+ incCounter(baseTypeSeqCount)
+ incCounter(baseTypeSeqLenTotal, elems.length)
+
/** The number of types in the sequence */
def length: Int = elems.length
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
index 7e4693c7a1..3daa4e8ac2 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
@@ -18,6 +18,8 @@ import scala.tools.nsc.util.{Position, NoPosition, ClassPath, ClassRep, JavaClas
import classfile.ClassfileParser
import Flags._
+import util.Statistics._
+
/** This class ...
*
* @author Martin Odersky
@@ -243,11 +245,18 @@ abstract class SymbolLoaders {
protected def description = "class file "+ classfile.toString
protected def doComplete(root: Symbol) {
+ val start = startTimer(classReadNanos)
classfileParser.parse(classfile, root)
+ stopTimer(classReadNanos, start)
}
+ override protected def sourcefile = classfileParser.srcfile
}
class MSILTypeLoader(typ: MSILType) extends SymbolLoader {
+ private object typeParser extends clr.TypeParser {
+ val global: SymbolLoaders.this.global.type = SymbolLoaders.this.global
+ }
+
protected def description = "MSILType "+ typ.FullName + ", assembly "+ typ.Assembly.FullName
protected def doComplete(root: Symbol) { typeParser.parse(typ, root) }
}
@@ -263,10 +272,6 @@ abstract class SymbolLoaders {
protected def doComplete(root: Symbol) { root.sourceModule.initialize }
}
- private object typeParser extends clr.TypeParser {
- val global: SymbolLoaders.this.global.type = SymbolLoaders.this.global
- }
-
object clrTypes extends clr.CLRTypes {
val global: SymbolLoaders.this.global.type = SymbolLoaders.this.global
if (global.forMSIL) init()
diff --git a/src/compiler/scala/tools/nsc/symtab/Symbols.scala b/src/compiler/scala/tools/nsc/symtab/Symbols.scala
index 91f26b1810..9f22bc54f7 100644
--- a/src/compiler/scala/tools/nsc/symtab/Symbols.scala
+++ b/src/compiler/scala/tools/nsc/symtab/Symbols.scala
@@ -10,8 +10,9 @@ package symtab
import scala.collection.mutable.ListBuffer
import scala.collection.immutable.Map
-import scala.tools.nsc.io.AbstractFile
-import scala.tools.nsc.util.{Position, NoPosition, BatchSourceFile}
+import io.AbstractFile
+import util.{Position, NoPosition, BatchSourceFile}
+import util.Statistics._
import Flags._
//todo: get rid of MONOMORPHIC flag
@@ -21,11 +22,7 @@ trait Symbols {
import definitions._
private var ids = 0
-
- //for statistics:
- def symbolCount = ids
- var typeSymbolCount = 0
- var classSymbolCount = 0
+ def symbolCount = ids // statistics
val emptySymbolArray = new Array[Symbol](0)
val emptySymbolSet = Set.empty[Symbol]
@@ -1872,7 +1869,7 @@ trait Symbols {
def cloneSymbolImpl(owner: Symbol): Symbol =
new TypeSymbol(owner, pos, name)
- if (util.Statistics.enabled) typeSymbolCount = typeSymbolCount + 1
+ incCounter(typeSymbolCount)
}
/** A class for type parameters viewed from inside their scopes
@@ -2009,7 +2006,7 @@ trait Symbols {
override def children: Set[Symbol] = childSet
override def addChild(sym: Symbol) { childSet = childSet + sym }
- if (util.Statistics.enabled) classSymbolCount = classSymbolCount + 1
+ incCounter(classSymbolCount)
}
/** A class for module class symbols
@@ -2025,6 +2022,7 @@ trait Symbols {
setSourceModule(module)
}
override def sourceModule = module
+ lazy val implicitMembers = info.implicitMembers
def setSourceModule(module: Symbol) { this.module = module }
}
diff --git a/src/compiler/scala/tools/nsc/symtab/Types.scala b/src/compiler/scala/tools/nsc/symtab/Types.scala
index d87a30f847..be537010f6 100644
--- a/src/compiler/scala/tools/nsc/symtab/Types.scala
+++ b/src/compiler/scala/tools/nsc/symtab/Types.scala
@@ -9,10 +9,12 @@ package symtab
import scala.collection.immutable
import scala.collection.mutable.{ListBuffer, HashMap, WeakHashMap}
-import scala.tools.nsc.ast.TreeGen
-import scala.tools.nsc.util.{HashSet, Position, NoPosition}
+import ast.TreeGen
+import util.{HashSet, Position, NoPosition}
+import util.Statistics._
import Flags._
+
/* A standard type pattern match:
case ErrorType =>
// internal: error
@@ -64,16 +66,7 @@ trait Types {
//statistics
- var singletonBaseTypeSeqCount = 0
- var compoundBaseTypeSeqCount = 0
- var typerefBaseTypeSeqCount = 0
- var findMemberCount = 0
- var noMemberCount = 0
- var multMemberCount = 0
- var findMemberNanos = 0l
- var subtypeCount = 0
- var sametypeCount = 0
- var subtypeNanos = 0l
+ def uniqueTypeCount = if (uniques == null) 0 else uniques.size
private var explainSwitch = false
@@ -499,9 +492,13 @@ trait Types {
*/
def asSeenFrom(pre: Type, clazz: Symbol): Type =
if (!isTrivial && (!phase.erasedTypes || pre.typeSymbol == ArrayClass)) {
+ incCounter(asSeenFromCount)
+ val start = startTimer(asSeenFromNanos)
val m = new AsSeenFromMap(pre.normalize, clazz)
val tp = m apply this
- existentialAbstraction(m.capturedParams, tp)
+ val result = existentialAbstraction(m.capturedParams, tp)
+ stopTimer(asSeenFromNanos, start)
+ result
} else this
/** The info of `sym', seen as a member of this type.
@@ -597,31 +594,36 @@ trait Types {
/** Is this type a subtype of that type? */
def <:<(that: Type): Boolean = {
-// val startTime = if (util.Statistics.enabled) System.nanoTime() else 0l
-// val result =
- ((this eq that) ||
- (if (explainSwitch) explain("<:", isSubType, this, that)
- else isSubType(this, that, AnyDepth)))
-// if (util.Statistics.enabled) {
-// subtypeNanos += System.nanoTime() - startTime
-// subtypeCount += 1
-// }
-// result
+ if (util.Statistics.enabled) stat_<:<(that)
+ else
+ (this eq that) ||
+ (if (explainSwitch) explain("<:", isSubType, this, that)
+ else isSubType(this, that, AnyDepth))
+ }
+
+ def stat_<:<(that: Type): Boolean = {
+ incCounter(subtypeCount)
+ val start = startTimer(subtypeNanos)
+ val result =
+ (this eq that) ||
+ (if (explainSwitch) explain("<:", isSubType, this, that)
+ else isSubType(this, that, AnyDepth))
+ stopTimer(subtypeNanos, start)
+ result
}
/** Is this type a weak subtype of that type? True also for numeric types, i.e. Int weak_<:< Long.
*/
- def weak_<:<(that: Type): Boolean =
-// val startTime = if (util.Statistics.enabled) System.nanoTime() else 0l
-// val result =
+ def weak_<:<(that: Type): Boolean = {
+ incCounter(subtypeCount)
+ val start = startTimer(subtypeNanos)
+ val result =
((this eq that) ||
(if (explainSwitch) explain("weak_<:", isWeakSubType, this, that)
else isWeakSubType(this, that)))
-// if (util.Statistics.enabled) {
-// subtypeNanos += System.nanoTime() - startTime
-// subtypeCount += 1
-// }
-// result
+ stopTimer(subtypeNanos, start)
+ result
+ }
/** Is this type equivalent to that type? */
def =:=(that: Type): Boolean = (
@@ -785,15 +787,17 @@ trait Types {
// See (t0851) for a situation where this happens.
if (!this.isGround)
return typeVarToOriginMap(this).findMember(name, excludedFlags, requiredFlags, stableOnly)
- if (util.Statistics.enabled) findMemberCount += 1
-// val startTime = if (util.Statistics.enabled) System.nanoTime() else 0l
+
+ incCounter(findMemberCount)
+ val start = startTimer(findMemberNanos)
//Console.println("find member " + name.decode + " in " + this + ":" + this.baseClasses)//DEBUG
var members: Scope = null
var member: Symbol = NoSymbol
var excluded = excludedFlags | DEFERRED
- var self: Type = null
var continue = true
+ lazy val self: Type = this.narrow
+ lazy val membertpe = self.memberType(member)
while (continue) {
continue = false
val bcs0 = baseClasses
@@ -812,29 +816,27 @@ trait Types {
sym.getFlag(PRIVATE | LOCAL) != (PRIVATE | LOCAL).toLong ||
(bcs0.head.hasTransOwner(bcs.head)))) {
if (name.isTypeName || stableOnly && sym.isStable) {
-// if (util.Statistics.enabled) findMemberNanos += System.nanoTime() - startTime
+ stopTimer(findMemberNanos, start)
return sym
} else if (member == NoSymbol) {
member = sym
} else if (members eq null) {
+// val start = startTimer(timer1)
if (member.name != sym.name ||
!(member == sym ||
member.owner != sym.owner &&
- !sym.hasFlag(PRIVATE) && {
- if (self eq null) self = this.narrow;
- (self.memberType(member) matches self.memberType(sym))
- })) {
+ !sym.hasFlag(PRIVATE) &&
+ (membertpe matches self.memberType(sym)))) {
members = new Scope(List(member, sym))
}
+// stopTimer(timer1, start)
} else {
var prevEntry = members.lookupEntry(sym.name)
while ((prevEntry ne null) &&
!(prevEntry.sym == sym ||
prevEntry.sym.owner != sym.owner &&
- !sym.hasFlag(PRIVATE) && {
- if (self eq null) self = this.narrow;
- (self.memberType(prevEntry.sym) matches self.memberType(sym))
- })) {
+ !sym.hasFlag(PRIVATE) &&
+ (self.memberType(prevEntry.sym) matches self.memberType(sym)))) {
prevEntry = members lookupNextEntry prevEntry
}
if (prevEntry eq null) {
@@ -852,14 +854,13 @@ trait Types {
} // while (!bcs.isEmpty)
excluded = excludedFlags
} // while (continue)
-// if (util.Statistics.enabled) findMemberNanos += System.nanoTime() - startTime
+ stopTimer(findMemberNanos, start)
if (members eq null) {
- if (util.Statistics.enabled) if (member == NoSymbol) noMemberCount += 1;
+ if (member == NoSymbol) incCounter(noMemberCount)
member
} else {
- if (util.Statistics.enabled) multMemberCount += 1;
- //val pre = if (this.typeSymbol.isClass) this.typeSymbol.thisType else this;
- (baseClasses.head.newOverloaded(this, members.toList))
+ incCounter(multMemberCount)
+ baseClasses.head.newOverloaded(this, members.toList)
}
}
@@ -970,7 +971,7 @@ trait Types {
override def isVolatile = underlying.isVolatile
override def widen: Type = underlying.widen
override def baseTypeSeq: BaseTypeSeq = {
- if (util.Statistics.enabled) singletonBaseTypeSeqCount += 1
+ incCounter(singletonBaseTypeSeqCount)
underlying.baseTypeSeq prepend this
}
override def safeToString: String = prefixString + "type"
@@ -1179,8 +1180,7 @@ trait Types {
val bts = copyRefinedType(this.asInstanceOf[RefinedType], parents map varToParam, varToParam mapOver decls).baseTypeSeq
baseTypeSeqCache = bts lateMap paramToVar
} else {
- if (util.Statistics.enabled)
- compoundBaseTypeSeqCount += 1
+ incCounter(compoundBaseTypeSeqCount)
baseTypeSeqCache = undetBaseTypeSeq
baseTypeSeqCache = memo(compoundBaseTypeSeq(this))(_.baseTypeSeq updateHead typeSymbol.tpe)
}
@@ -1711,13 +1711,11 @@ A type's typeSymbol should never be inspected directly.
if (period != currentPeriod) {
baseTypeSeqPeriod = currentPeriod
if (!isValidForBaseClasses(period)) {
- if (util.Statistics.enabled)
- typerefBaseTypeSeqCount += 1
+ incCounter(typerefBaseTypeSeqCount)
baseTypeSeqCache = undetBaseTypeSeq
baseTypeSeqCache =
if (sym.isAbstractType) transform(bounds.hi).baseTypeSeq prepend this
else sym.info.baseTypeSeq map transform
-
}
}
if (baseTypeSeqCache == undetBaseTypeSeq)
@@ -1796,7 +1794,7 @@ A type's typeSymbol should never be inspected directly.
case class MethodType(override val params: List[Symbol],
override val resultType: Type) extends Type {
override val isTrivial: Boolean =
- paramTypes.forall(_.isTrivial) && resultType.isTrivial
+ params.forall(_.tpe.isTrivial) && resultType.isTrivial
//assert(paramTypes forall (pt => !pt.typeSymbol.isImplClass))//DEBUG
override def paramSectionCount: Int = resultType.paramSectionCount + 1
@@ -1910,6 +1908,10 @@ A type's typeSymbol should never be inspected directly.
override def boundSyms: List[Symbol] = quantified
override def prefix = maybeRewrap(underlying.prefix)
override def typeArgs = underlying.typeArgs map maybeRewrap
+ override def params = underlying.params mapConserve { param =>
+ val tpe1 = rewrap(param.tpe)
+ if (tpe1 eq param.tpe) param else param.cloneSymbol.setInfo(tpe1)
+ }
override def paramTypes = underlying.paramTypes map maybeRewrap
override def instantiateTypeParams(formals: List[Symbol], actuals: List[Type]) = {
// maybeRewrap(underlying.instantiateTypeParams(formals, actuals))
@@ -2642,9 +2644,8 @@ A type's typeSymbol should never be inspected directly.
private var uniques: HashSet[AnyRef] = _
private var uniqueRunId = NoRunId
- def uniqueTypeCount = if (uniques == null) 0 else uniques.size // for statistics
-
private def unique[T <: AnyRef](tp: T): T = {
+ incCounter(rawTypeCount)
if (uniqueRunId != currentRunId) {
uniques = new HashSet("uniques", initialUniquesCapacity)
uniqueRunId = currentRunId
@@ -3531,6 +3532,7 @@ A type's typeSymbol should never be inspected directly.
class MissingAliasException extends Exception
val missingAliasException = new MissingAliasException
+ class MissingTypeException extends Exception
object adaptToNewRunMap extends TypeMap {
private def adaptToNewRun(pre: Type, sym: Symbol): Symbol = {
@@ -3542,7 +3544,8 @@ A type's typeSymbol should never be inspected directly.
var rebind0 = pre.findMember(sym.name, BRIDGE, 0, true)
if (rebind0 == NoSymbol) {
if (sym.isAliasType) throw missingAliasException
- assert(false, pre+"."+sym+" does no longer exist, phase = "+phase)
+ throw new MissingTypeException // For build manager purposes
+ //assert(false, pre+"."+sym+" does no longer exist, phase = "+phase)
}
/** The two symbols have the same fully qualified name */
def corresponds(sym1: Symbol, sym2: Symbol): Boolean =
@@ -3588,6 +3591,8 @@ A type's typeSymbol should never be inspected directly.
} catch {
case ex: MissingAliasException =>
apply(tp.dealias)
+ case _: MissingTypeException =>
+ NoType
}
}
case MethodType(params, restp) =>
@@ -3757,7 +3762,7 @@ A type's typeSymbol should never be inspected directly.
/** Do `tp1' and `tp2' denote equivalent types?
*/
def isSameType(tp1: Type, tp2: Type): Boolean = try {
- sametypeCount += 1
+ incCounter(sametypeCount)
subsametypeRecursions += 1
undoLog undoUnless {
isSameType0(tp1, tp2)
@@ -4149,7 +4154,7 @@ A type's typeSymbol should never be inspected directly.
tp1 match {
case MethodType(params1, res1) =>
(params1.length == params2.length &&
- matchingParams(tp1.paramTypes, tp2.paramTypes, tp1.isInstanceOf[JavaMethodType], tp2.isInstanceOf[JavaMethodType]) &&
+ matchingParams(params1, params2, tp1.isInstanceOf[JavaMethodType], tp2.isInstanceOf[JavaMethodType]) &&
(res1 <:< res2) &&
tp1.isInstanceOf[ImplicitMethodType] == tp2.isInstanceOf[ImplicitMethodType])
case _ =>
@@ -4242,13 +4247,71 @@ A type's typeSymbol should never be inspected directly.
}
/** A function implementing `tp1' matches `tp2' */
- def matchesType(tp1: Type, tp2: Type, alwaysMatchSimple: Boolean): Boolean = {
+ final def matchesType(tp1: Type, tp2: Type, alwaysMatchSimple: Boolean): Boolean = {
+ def matchesQuantified(tparams1: List[Symbol], tparams2: List[Symbol], res1: Type, res2: Type): Boolean =
+ tparams1.length == tparams2.length &&
+ matchesType(res1, res2.substSym(tparams2, tparams1), alwaysMatchSimple)
+ def lastTry =
+ tp2 match {
+ case ExistentialType(_, res2) if alwaysMatchSimple =>
+ matchesType(tp1, res2, true)
+ case MethodType(_, _) =>
+ false
+ case PolyType(tparams2, res2) =>
+ tparams2.isEmpty && matchesType(tp1, res2, alwaysMatchSimple)
+ case _ =>
+ alwaysMatchSimple || tp1 =:= tp2
+ }
+ tp1 match {
+ case MethodType(params1, res1) =>
+ tp2 match {
+ case MethodType(params2, res2) =>
+ params1.length == params2.length && // useful pre-secreening optimization
+ matchingParams(params1, params2, tp1.isInstanceOf[JavaMethodType], tp2.isInstanceOf[JavaMethodType]) &&
+ matchesType(res1, res2, alwaysMatchSimple) &&
+ tp1.isInstanceOf[ImplicitMethodType] == tp2.isInstanceOf[ImplicitMethodType]
+ case PolyType(List(), res2) =>
+ if (params1.isEmpty) matchesType(res1, res2, alwaysMatchSimple)
+ else matchesType(tp1, res2, alwaysMatchSimple)
+ case ExistentialType(_, res2) =>
+ alwaysMatchSimple && matchesType(tp1, res2, true)
+ case _ =>
+ false
+ }
+ case PolyType(tparams1, res1) =>
+ tp2 match {
+ case PolyType(tparams2, res2) =>
+ matchesQuantified(tparams1, tparams2, res1, res2)
+ case MethodType(List(), res2) if (tparams1.isEmpty) =>
+ matchesType(res1, res2, alwaysMatchSimple)
+ case ExistentialType(_, res2) =>
+ alwaysMatchSimple && matchesType(tp1, res2, true)
+ case _ =>
+ tparams1.isEmpty && matchesType(res1, tp2, alwaysMatchSimple)
+ }
+ case ExistentialType(tparams1, res1) =>
+ tp2 match {
+ case ExistentialType(tparams2, res2) =>
+ matchesQuantified(tparams1, tparams2, res1, res2)
+ case _ =>
+ if (alwaysMatchSimple) matchesType(res1, tp2, true)
+ else lastTry
+ }
+ case _ =>
+ lastTry
+ }
+ }
+
+/** matchesType above is an optimized version of the following implementation:
+
+ def matchesType2(tp1: Type, tp2: Type, alwaysMatchSimple: Boolean): Boolean = {
def matchesQuantified(tparams1: List[Symbol], tparams2: List[Symbol], res1: Type, res2: Type): Boolean =
tparams1.length == tparams2.length &&
matchesType(res1, res2.substSym(tparams2, tparams1), alwaysMatchSimple)
(tp1, tp2) match {
case (MethodType(params1, res1), MethodType(params2, res2)) =>
- matchingParams(tp1.paramTypes, tp2.paramTypes, tp1.isInstanceOf[JavaMethodType], tp2.isInstanceOf[JavaMethodType]) &&
+ params1.length == params2.length && // useful pre-secreening optimization
+ matchingParams(params1, params2, tp1.isInstanceOf[JavaMethodType], tp2.isInstanceOf[JavaMethodType]) &&
matchesType(res1, res2, alwaysMatchSimple) &&
tp1.isInstanceOf[ImplicitMethodType] == tp2.isInstanceOf[ImplicitMethodType]
case (PolyType(tparams1, res1), PolyType(tparams2, res2)) =>
@@ -4275,14 +4338,25 @@ A type's typeSymbol should never be inspected directly.
alwaysMatchSimple || tp1 =:= tp2
}
}
+*/
- /** Are `tps1' and `tps2' lists of pairwise equivalent types? */
- private def matchingParams(tps1: List[Type], tps2: List[Type], tps1isJava: Boolean, tps2isJava: Boolean): Boolean =
- (tps1.length == tps2.length) &&
- ((tps1, tps2).zipped forall ((tp1, tp2) =>
- (tp1 =:= tp2) ||
- tps1isJava && tp2.typeSymbol == ObjectClass && tp1.typeSymbol == AnyClass ||
- tps2isJava && tp1.typeSymbol == ObjectClass && tp2.typeSymbol == AnyClass))
+ /** Are `syms1' and `syms2' parameter lists with pairwise equivalent types? */
+ private def matchingParams(syms1: List[Symbol], syms2: List[Symbol], syms1isJava: Boolean, syms2isJava: Boolean): Boolean = syms1 match {
+ case Nil =>
+ syms2.isEmpty
+ case sym1 :: rest1 =>
+ syms2 match {
+ case Nil =>
+ false
+ case sym2 :: rest2 =>
+ val tp1 = sym1.tpe
+ val tp2 = sym2.tpe
+ (tp1 =:= tp2 ||
+ syms1isJava && tp2.typeSymbol == ObjectClass && tp1.typeSymbol == AnyClass ||
+ syms2isJava && tp1.typeSymbol == ObjectClass && tp2.typeSymbol == AnyClass) &&
+ matchingParams(rest1, rest2, syms1isJava, syms2isJava)
+ }
+ }
/** like map2, but returns list `xs' itself - instead of a copy - if function
* `f' maps all elements to themselves.
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index da4e0aaa49..5c4679625b 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -8,7 +8,7 @@ package scala.tools.nsc
package symtab
package classfile
-import java.io.IOException
+import java.io.{ File, IOException }
import java.lang.Integer.toHexString
import scala.collection.immutable.{Map, ListMap}
@@ -41,6 +41,9 @@ abstract class ClassfileParser {
protected var busy: Option[Symbol] = None // lock to detect recursive reads
private var externalName: Name = _ // JVM name of the current class
protected var classTParams = Map[Name,Symbol]()
+ protected var srcfile0 : Option[AbstractFile] = None
+
+ def srcfile = srcfile0
private object metaParser extends MetaParser {
val global: ClassfileParser.this.global.type = ClassfileParser.this.global
@@ -813,6 +816,13 @@ abstract class ClassfileParser {
case nme.ExceptionsATTR if (!isScala) =>
parseExceptions(attrLen)
+ case nme.SourceFileATTR =>
+ val srcfileLeaf = pool.getName(in.nextChar).toString.trim
+ val srcpath = sym.enclosingPackage match {
+ case NoSymbol => srcfileLeaf
+ case pkg => pkg.fullNameString(File.separatorChar)+File.separator+srcfileLeaf
+ }
+ srcfile0 = settings.outputDirs.srcFilesFor(in.file, srcpath).find(_.exists)
case _ =>
in.skip(attrLen)
}
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index 1fa81c8776..de9dadbd1f 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -497,7 +497,7 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
tree
else if (isValueClass(tree.tpe.typeSymbol) && !isValueClass(pt.typeSymbol))
adaptToType(box(tree), pt)
- else if (tree.tpe.isInstanceOf[MethodType] && tree.tpe.paramTypes.isEmpty) {
+ else if (tree.tpe.isInstanceOf[MethodType] && tree.tpe.params.isEmpty) {
if (!tree.symbol.isStable) assert(false, "adapt "+tree+":"+tree.tpe+" to "+pt)
adaptToType(Apply(tree, List()) setPos tree.pos setType tree.tpe.resultType, pt)
} else if (pt <:< tree.tpe)
@@ -597,7 +597,7 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
if (isValueClass(tree.symbol.owner) && !isValueClass(qual1.tpe.typeSymbol))
tree.symbol = NoSymbol
- else if (qual1.tpe.isInstanceOf[MethodType] && qual1.tpe.paramTypes.isEmpty) {
+ else if (qual1.tpe.isInstanceOf[MethodType] && qual1.tpe.params.isEmpty) {
assert(qual1.symbol.isStable, qual1.symbol);
qual1 = Apply(qual1, List()) setPos qual1.pos setType qual1.tpe.resultType
} else if (!(qual1.isInstanceOf[Super] || (qual1.tpe.typeSymbol isSubClass tree.symbol.owner))) {
diff --git a/src/compiler/scala/tools/nsc/transform/LazyVals.scala b/src/compiler/scala/tools/nsc/transform/LazyVals.scala
index b2f9489480..03339163a1 100644
--- a/src/compiler/scala/tools/nsc/transform/LazyVals.scala
+++ b/src/compiler/scala/tools/nsc/transform/LazyVals.scala
@@ -94,7 +94,9 @@ abstract class LazyVals extends Transform with ast.TreeDSL {
}
val bmps = bitmaps(methSym) map (ValDef(_, ZERO))
- def isMatch(params: List[Ident]) = (params.tail, methSym.tpe.paramTypes).zipped forall (_.tpe == _)
+
+ // Martin to Iulian: Don't we need to compare lengths here?
+ def isMatch(params: List[Ident]) = (params.tail, methSym.tpe.params).zipped forall (_.tpe == _.tpe)
if (bmps.isEmpty) rhs else rhs match {
case Block(assign, l @ LabelDef(name, params, rhs1))
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index 8ea0b69049..fd5dd0f9a3 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -61,7 +61,7 @@ abstract class UnCurry extends InfoTransform with TypingTransformers {
tp match {
case MethodType(params, MethodType(params1, restpe)) =>
apply(MethodType(params ::: params1, restpe))
- case MethodType(formals, ExistentialType(tparams, restpe @ MethodType(_, _))) =>
+ case MethodType(params, ExistentialType(tparams, restpe @ MethodType(_, _))) =>
assert(false, "unexpected curried method types with intervening exitential")
tp0
case mt: ImplicitMethodType =>
@@ -189,8 +189,8 @@ abstract class UnCurry extends InfoTransform with TypingTransformers {
* additional parameter sections of a case class are skipped.
*/
def uncurryTreeType(tp: Type): Type = tp match {
- case MethodType(formals, MethodType(formals1, restpe)) if (inPattern) =>
- uncurryTreeType(MethodType(formals, restpe))
+ case MethodType(params, MethodType(params1, restpe)) if (inPattern) =>
+ uncurryTreeType(MethodType(params, restpe))
case _ =>
uncurry(tp)
}
@@ -451,13 +451,15 @@ abstract class UnCurry extends InfoTransform with TypingTransformers {
if (isJava &&
suffix.tpe.typeSymbol == ArrayClass &&
isValueClass(suffix.tpe.typeArgs.head.typeSymbol) &&
- fun.tpe.paramTypes.last.typeSymbol == ArrayClass &&
- fun.tpe.paramTypes.last.typeArgs.head.typeSymbol == ObjectClass)
+ { val lastFormal2 = fun.tpe.params.last.tpe
+ lastFormal2.typeSymbol == ArrayClass &&
+ lastFormal2.typeArgs.head.typeSymbol == ObjectClass
+ })
suffix = localTyper.typedPos(pos) {
gen.mkRuntimeCall("toObjectArray", List(suffix))
}
}
- args.take(formals.length - 1) ::: List(suffix setType formals.last)
+ args.take(formals.length - 1) ::: List(suffix setType lastFormal)
case _ =>
args
}
@@ -591,7 +593,7 @@ abstract class UnCurry extends InfoTransform with TypingTransformers {
transform(treeCopy.Apply(tree, fn, List(liftTree(args.head))))
} else {
withNeedLift(true) {
- val formals = fn.tpe.paramTypes;
+ val formals = fn.tpe.paramTypes
treeCopy.Apply(tree, transform(fn), transformTrees(transformArgs(tree.pos, fn.symbol, args, formals)))
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
index ead20f1cb8..519ad1b0bf 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
@@ -7,6 +7,8 @@
package scala.tools.nsc
package typechecker
+import util.Statistics._
+
/** The main attribution phase.
*/
trait Analyzer extends AnyRef
@@ -63,8 +65,6 @@ trait Analyzer extends AnyRef
}
}
- var typerTime = 0L
-
object typerFactory extends SubComponent {
val global: Analyzer.this.global.type = Analyzer.this.global
val phaseName = "typer"
@@ -73,10 +73,9 @@ trait Analyzer extends AnyRef
def newPhase(_prev: Phase): StdPhase = new StdPhase(_prev) {
resetTyper()
override def run {
- val start = if (util.Statistics.enabled) System.nanoTime() else 0L
+ val start = startTimer(typerNanos)
currentRun.units foreach applyPhase
- if (util.Statistics.enabled)
- typerTime += System.nanoTime() - start
+ stopTimer(typerNanos, start)
}
def apply(unit: CompilationUnit) {
try {
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index eea5be32b7..9657cea101 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -14,6 +14,7 @@ package typechecker
import scala.collection.mutable.{LinkedHashMap, ListBuffer}
import scala.tools.nsc.util.{ HashSet, Position, Set, NoPosition, SourceFile }
import symtab.Flags._
+import util.Statistics._
/** This trait provides methods to find various kinds of implicits.
*
@@ -28,16 +29,6 @@ self: Analyzer =>
def traceImplicits = printTypings
- var implicitTime = 0L
- var inscopeSucceed = 0L
- var inscopeFail = 0L
- var oftypeSucceed = 0L
- var oftypeFail = 0L
- var manifSucceed = 0L
- var manifFail = 0L
- var hits = 0
- var misses = 0
-
/** Search for an implicit value. See the comment on `result` at the end of class `ImplicitSearch`
* for more info how the search is conducted.
* @param tree The tree for which the implicit needs to be inserted.
@@ -52,10 +43,18 @@ self: Analyzer =>
* @return A search result
*/
def inferImplicit(tree: Tree, pt: Type, reportAmbiguous: Boolean, isView: Boolean, context: Context): SearchResult = {
+ val rawTypeStart = startCounter(rawTypeImpl)
+ val findMemberStart = startCounter(findMemberImpl)
+ val subtypeStart = startCounter(subtypeImpl)
+ val start = startTimer(implicitNanos)
if (traceImplicits && !tree.isEmpty && !context.undetparams.isEmpty)
println("typing implicit with undetermined type params: "+context.undetparams+"\n"+tree)
val result = new ImplicitSearch(tree, pt, isView, context.makeImplicit(reportAmbiguous)).bestImplicit
context.undetparams = context.undetparams filterNot (result.subst.from contains _)
+ stopTimer(implicitNanos, start)
+ stopCounter(rawTypeImpl, rawTypeStart)
+ stopCounter(findMemberImpl, findMemberStart)
+ stopCounter(subtypeImpl, subtypeStart)
result
}
@@ -103,9 +102,14 @@ self: Analyzer =>
/** Does type `tp` contain an Error type as parameter or result?
*/
private def containsError(tp: Type): Boolean = tp match {
- case PolyType(tparams, restpe) => containsError(restpe)
- case MethodType(params, restpe) => (params map (_.tpe) exists (_.isError)) || containsError(restpe)
- case _ => tp.isError
+ case PolyType(tparams, restpe) =>
+ containsError(restpe)
+ case MethodType(params, restpe) =>
+ for (p <- params)
+ if (p.tpe.isError) return true
+ containsError(restpe)
+ case _ =>
+ tp.isError
}
def isCyclicOrErroneous = try {
@@ -214,10 +218,12 @@ self: Analyzer =>
/** Is implicit info `info1` better than implicit info `info2`?
*/
- def improves(info1: ImplicitInfo, info2: ImplicitInfo) =
+ def improves(info1: ImplicitInfo, info2: ImplicitInfo) = {
+ incCounter(improvesCount)
(info2 == NoImplicitInfo) ||
(info1 != NoImplicitInfo) &&
isStrictlyMoreSpecific(info1.tpe, info2.tpe, info1.sym, info2.sym)
+ }
/** Map all type params in given list to WildcardType
* @param tp The type in which to do the mapping
@@ -282,7 +288,7 @@ self: Analyzer =>
overlaps(dtor1, dted1) && (dtor1 =:= dted1 || complexity(dtor1) > complexity(dted1))
}
- if (util.Statistics.enabled) implcnt += 1
+ incCounter(implicitSearchCount)
/** Issues an error signalling ambiguous implicits */
private def ambiguousImplicitError(info1: ImplicitInfo, info2: ImplicitInfo,
@@ -307,6 +313,12 @@ self: Analyzer =>
/** The type parameters to instantiate */
val undetParams = if (isView) List() else context.outer.undetparams
+ /** Replace undetParams in type `tp` by Any/Nothing, according to variance */
+ def approximate(tp: Type) =
+ tp.instantiateTypeParams(undetParams, undetParams map (_ => WildcardType))
+
+ val wildPt = approximate(pt)
+
/** Try to construct a typed tree from given implicit info with given
* expected type.
* Detect infinite search trees for implicits.
@@ -353,48 +365,56 @@ self: Analyzer =>
case _ => tp.isStable
}
- /** Replace undetParams in type `tp` by Any/Nothing, according to variance */
- def approximate(tp: Type) =
- tp.instantiateTypeParams(undetParams, undetParams map (_ => WildcardType))
-
- /** Instantiated `pt' so that undetermined type parameters are replaced by wildcards
- */
- val wildPt = approximate(pt)
-
/** Does type `tp' match expected type `pt'
* This is the case if either `pt' is a unary function type with a
* HasMethodMatching type as result, and `tp' is a unary function
* or method type whose result type has a method whose name and type
* correspond to the HasMethodMatching type,
* or otherwise if `tp' is compatible with `pt'.
+ * This methid is performance critical: 5-8% of typechecking time.
*/
def matchesPt(tp: Type, pt: Type, undet: List[Symbol]) = {
- isCompatible(tp, pt) ||
- isView && {
+ val start = startTimer(matchesPtNanos)
+ val result = normSubType(tp, pt) || isView && {
pt match {
case Function1(arg, res) =>
- normalize(tp) match {
- case Function1(arg1, res1) =>
- (arg.deconst weak_<:< arg1) && {
- res match {
- case HasMethodMatching(name, argtpes, restpe) =>
- (res1.member(name) filter (m =>
- isApplicableSafe(undet, m.tpe, argtpes, restpe))) != NoSymbol
- case _ =>
- res1 <:< res
- }
- }
- case _ => false
- }
- case _ => false
+ matchesPtView(tp, arg, res, undet)
+ case _ =>
+ false
}
}
+ stopTimer(matchesPtNanos, start)
+ result
}
- //if (traceImplicits) println("typed impl for "+wildPt+"? "+info.name+":"+depoly(info.tpe)+"/"+undetParams+"/"+isPlausiblyCompatible(info.tpe, wildPt)+"/"+matchesPt(depoly(info.tpe), wildPt, List()))
- if (isPlausiblyCompatible(info.tpe, wildPt) &&
- matchesPt(depoly(info.tpe), wildPt, List()) &&
- isStable(info.pre)) {
+ def matchesPtView(tp: Type, ptarg: Type, ptres: Type, undet: List[Symbol]): Boolean = tp match {
+ case MethodType(params, restpe) =>
+ if (tp.isInstanceOf[ImplicitMethodType]) matchesPtView(restpe, ptarg, ptres, undet)
+ else params.length == 1 && matchesArgRes(params.head.tpe, restpe, ptarg, ptres, undet)
+ case ExistentialType(tparams, qtpe) =>
+ matchesPtView(normalize(tp), ptarg, ptres, undet)
+ case Function1(arg1, res1) =>
+ matchesArgRes(arg1, res1, ptarg, ptres, undet)
+ case _ => false
+ }
+
+ def matchesArgRes(tparg: Type, tpres: Type, ptarg: Type, ptres: Type, undet: List[Symbol]): Boolean =
+ (ptarg weak_<:< tparg) && {
+ ptres match {
+ case HasMethodMatching(name, argtpes, restpe) =>
+ (tpres.member(name) filter (m =>
+ isApplicableSafe(undet, m.tpe, argtpes, restpe))) != NoSymbol
+ case _ =>
+ tpres <:< ptres
+ }
+ }
+
+ incCounter(plausiblyCompatibleImplicits)
+
+ //if (traceImplicits) println("typed impl for "+wildPt+"? "+info.name+":"+depoly(info.tpe)+"/"+undetParams+"/"+isPlausiblyCompatible(info.tpe, wildPt)+"/"+matchesPt(depoly(info.tpe), wildPt, List())+"/"+info.pre+"/"+isStable(info.pre))
+ if (matchesPt(depoly(info.tpe), wildPt, List()) && isStable(info.pre)) {
+
+ incCounter(matchingImplicits)
val itree = atPos(tree.pos.focus) {
if (info.pre == NoPrefix) Ident(info.name)
@@ -417,6 +437,8 @@ self: Analyzer =>
else
typed1(itree, EXPRmode, wildPt)
+ incCounter(typedImplicits)
+
if (traceImplicits) println("typed implicit "+itree1+":"+itree1.tpe+", pt = "+wildPt)
val itree2 = if (isView) (itree1: @unchecked) match { case Apply(fun, _) => fun }
else adapt(itree1, EXPRmode, wildPt)
@@ -451,6 +473,7 @@ self: Analyzer =>
subst traverse itree2
val result = new SearchResult(itree2, subst)
+ incCounter(foundImplicits)
if (traceImplicits) println("RESULT = "+result)
// println("RESULT = "+itree+"///"+itree1+"///"+itree2)//DEBUG
result
@@ -517,9 +540,19 @@ self: Analyzer =>
isLocal: Boolean,
invalidImplicits: ListBuffer[Symbol]): Map[ImplicitInfo, SearchResult] = {
+ val start = startCounter(subtypeAppInfos)
+
/** A set containing names that are shadowed by implicit infos */
lazy val shadowed = new HashSet[Name]("shadowed", 512)
+ /** Is `sym' the standard conforms method in Predef?
+ * Note: DON't replace this by sym == Predef_conforms, as Predef_conforms is a `def'
+ * which does a member lookup (it can't be a lazy val because we might reload Predef
+ * during resident compilations).
+ */
+ def isConformsMethod(sym: Symbol) =
+ sym.name == nme.conforms && sym.owner == PredefModule.moduleClass
+
/** Try implicit `info` to see whether it is applicable for expected type `pt`.
* This is the case if all of the following holds:
* - the info's type is not erroneous,
@@ -528,18 +561,23 @@ self: Analyzer =>
* - the result of typedImplicit is non-empty.
* @return A search result with an attributed tree containing the implicit if succeeded,
* SearchFailure if not.
+ * @note Extreme hotspot!
*/
- def tryImplicit(info: ImplicitInfo): SearchResult =
+ def tryImplicit(info: ImplicitInfo): SearchResult = {
+ incCounter(triedImplicits)
if (info.isCyclicOrErroneous ||
(isLocal && shadowed.contains(info.name)) ||
- (isView && (info.sym == Predef_identity || info.sym == Predef_conforms)) //@M this condition prevents no-op conversions, which are a problem (besides efficiency),
- // TODO: remove `info.sym == Predef_identity` once we have a new STARR that only has conforms as an implicit
+ (isView && isConformsMethod(info.sym)) ||
+ //@M this condition prevents no-op conversions, which are a problem (besides efficiency),
// one example is removeNames in NamesDefaults, which relies on the type checker failing in case of ambiguity between an assignment/named arg
- ) SearchFailure
- else typedImplicit(info)
+ !isPlausiblyCompatible(info.tpe, wildPt))
+ SearchFailure
+ else
+ typedImplicit(info)
+ }
- def appInfos(is: List[ImplicitInfo]): Map[ImplicitInfo, SearchResult] = {
- var applicable = Map[ImplicitInfo, SearchResult]()
+ def addAppInfos(is: List[ImplicitInfo], m: Map[ImplicitInfo, SearchResult]): Map[ImplicitInfo, SearchResult] = {
+ var applicable = m
for (i <- is)
if (!isValid(i.sym)) invalidImplicits += i.sym
else {
@@ -551,7 +589,11 @@ self: Analyzer =>
applicable
}
- (Map[ImplicitInfo, SearchResult]() /: (iss map appInfos))(_ ++ _)
+ var applicable = Map[ImplicitInfo, SearchResult]()
+ for (is <- iss) applicable = addAppInfos(is, applicable)
+
+ stopCounter(subtypeAppInfos, start)
+ applicable
}
/** Search list of implicit info lists for one matching prototype
@@ -580,6 +622,8 @@ self: Analyzer =>
"\n because it comes after the application point and it lacks an explicit result type")
}
+ val start = startCounter(subtypeImprovCount)
+
/** A candidate for best applicable info wrt `improves` */
val best = (NoImplicitInfo /: applicable.keysIterator) (
(best, alt) => if (improves(alt, best)) alt else best)
@@ -589,67 +633,49 @@ self: Analyzer =>
val competing = applicable.keySet dropWhile (alt => best == alt || improves(best, alt))
if (!competing.isEmpty) ambiguousImplicitError(best, competing.head, "both", "and", "")
- // Also check that applicable infos that did not get selected are not
- // in (a companion object of) a subclass of (a companion object of) the class
- // containing the winning info.
- // (no longer needed; rules have changed)
- /*
- for (alt <- applicable.keySet) {
- if (isProperSubClassOrObject(alt.sym.owner, best.sym.owner)) {
- ambiguousImplicitError(best, alt,
- "most specific definition is:",
- "yet alternative definition ",
- "is defined in a subclass.\n Both definitions ")
- }
- }
- */
+ stopCounter(subtypeImprovCount, start)
applicable(best)
}
} // end searchImplicit
- /** The implicits made available directly by class type `tp`.
- * If `tp` refers to class C, these are all implicit members of the companion object of C.
- */
- private def implicitsOfClass(tp: Type): List[ImplicitInfo] = tp match {
- case TypeRef(pre, clazz, _) =>
- clazz.initialize.linkedClassOfClass.info.members.toList.filter(_.hasFlag(IMPLICIT)) map
- (sym => new ImplicitInfo(sym.name, pre.memberType(clazz.linkedModuleOfClass), sym))
- case _ =>
- List()
- }
-
/** The parts of a type is the smallest set of types that contains
* - the type itself
* - the parts of its immediate components (prefix and argument)
* - the parts of its base types
+ * - for alias types and abstract types, we take instead the parts
+ * - of their upper bounds.
+ * @return For those parts that refer to classes with companion objects that
+ * can be accessed with unambiguous stable prefixes, the implicits infos
+ * which are members of these companion objects.
*/
- private def parts(tp: Type): List[Type] = {
- val partMap = new collection.mutable.LinkedHashMap[Symbol, List[Type]]
- /** Add a new type to partMap, unless a subtype of it with the same
- * type symbol exists already.
- */
- def addType(newtp: Type): Boolean = {
- val tsym = newtp.typeSymbol
- partMap.get(tsym) match {
- case Some(ts) =>
- if (ts exists (_ <:< newtp)) false
- else { partMap.put(tsym, newtp :: ts); true }
- case None =>
- partMap.put(tsym, List(newtp)); true
- }
- }
- /** Enter all parts of `tp` into `partMap`
+ private def companionImplicits(tp: Type): List[List[ImplicitInfo]] = {
+
+ val partMap = new LinkedHashMap[Symbol, Type]
+
+ /** Enter all parts of `tp` into `parts` set.
+ * This method is performance critical: about 2-4% of all type checking is spent here
*/
def getParts(tp: Type) {
tp match {
- case TypeRef(pre, sym, args) if (!sym.isPackageClass) =>
- if (sym.isClass && !sym.isRefinementClass && !sym.isAnonymousClass) {
- if (addType(tp)) {
- for (bc <- sym.ancestors)
- getParts(tp.baseType(bc))
- getParts(pre)
- args foreach getParts
- }
+ case TypeRef(pre, sym, args) =>
+ if (sym.isClass) {
+ if (!((sym.name == nme.REFINE_CLASS_NAME.toTypeName) ||
+ (sym.name startsWith nme.ANON_CLASS_NAME) ||
+ (sym.name == nme.ROOT.toTypeName)))
+ partMap get sym match {
+ case Some(pre1) =>
+ if (!(pre =:= pre1)) partMap(sym) = NoType // ambiguous prefix - ignore implicit members
+ case None =>
+ if (pre.isStable) partMap(sym) = pre
+ val bts = tp.baseTypeSeq
+ var i = 1
+ while (i < bts.length) {
+ getParts(bts(i))
+ i += 1
+ }
+ getParts(pre)
+ args foreach getParts
+ }
} else if (sym.isAliasType) {
getParts(tp.normalize)
} else if (sym.isAbstractType) {
@@ -663,23 +689,29 @@ self: Analyzer =>
for (p <- ps) getParts(p)
case AnnotatedType(_, t, _) =>
getParts(t)
- case ExistentialType(tparams, t) =>
+ case ExistentialType(_, t) =>
+ getParts(t)
+ case PolyType(_, t) =>
getParts(t)
case _ =>
}
}
- /** Gives a list of typerefs with the same type symbol,
- * remove all those that have a prefix which is a supertype
- * of some other elements's prefix.
- */
- def compactify(ts: List[Type]): List[Type] = ts match {
- case List() => ts
- case (t @ TypeRef(pre, _, _)) :: ts1 =>
- if (ts1 exists (_.prefix <:< pre)) compactify(ts1)
- else t :: compactify(ts1 filterNot (pre <:< _.prefix))
- }
+
getParts(tp)
- for ((k, ts) <- partMap.iterator.toList; t <- compactify(ts)) yield t
+ val buf = new ListBuffer[List[ImplicitInfo]]
+ for ((clazz, pre) <- partMap) {
+ if (pre != NoType) {
+ val companion = clazz.linkedModuleOfClass
+ companion.moduleClass match {
+ case mc: ModuleClassSymbol =>
+ buf += (mc.implicitMembers map (im =>
+ new ImplicitInfo(im.name, SingleType(pre, companion), im)))
+ case _ =>
+ }
+ }
+ }
+ //println("companion implicits of "+tp+" = "+buf.toList) // DEBUG
+ buf.toList
}
/** The implicits made available by type `pt`.
@@ -687,32 +719,18 @@ self: Analyzer =>
* such that some part of `tp` has C as one of its superclasses.
*/
private def implicitsOfExpectedType: List[List[ImplicitInfo]] = implicitsCache get pt match {
- case Some(implicitInfoss) => hits += 1; implicitInfoss
- case None => {
- misses += 1
- val implicitInfoss = parts(pt).iterator.map(implicitsOfClass).toList
+ case Some(implicitInfoss) =>
+ incCounter(implicitCacheHits)
+ implicitInfoss
+ case None =>
+ incCounter(implicitCacheMisses)
+ val start = startTimer(subtypeETNanos)
+ val implicitInfoss = companionImplicits(pt)
+ stopTimer(subtypeETNanos, start)
implicitsCache(pt) = implicitInfoss
if (implicitsCache.size >= sizeLimit)
implicitsCache -= implicitsCache.keysIterator.next
implicitInfoss
- }
- }
-
-
- /** The manifest corresponding to type `pt`, provided `pt` is an instance of Manifest.
- */
- private def implicitManifest(pt: Type): Tree = pt.dealias match {
- case TypeRef(_, FullManifestClass, List(arg)) =>
- manifestOfType(arg, true)
- case TypeRef(_, PartialManifestClass, List(arg)) =>
- manifestOfType(arg, false)
- case TypeRef(_, OptManifestClass, List(arg)) =>
- val itree = manifestOfType(arg, false)
- if (itree == EmptyTree) gen.mkAttributedRef(NoManifest) else itree
- case TypeRef(_, tsym, _) if (tsym.isAbstractType) =>
- implicitManifest(pt.bounds.lo)
- case _ =>
- EmptyTree
}
/** Creates a tree that calls the relevant factory method in object
@@ -804,6 +822,26 @@ self: Analyzer =>
mot(tp)
}
+ def wrapResult(tree: Tree): SearchResult =
+ if (tree == EmptyTree) SearchFailure else new SearchResult(tree, EmptyTreeTypeSubstituter)
+
+ /** The manifest corresponding to type `pt`, provided `pt` is an instance of Manifest.
+ */
+ private def implicitManifestOrOfExpectedType(pt: Type): SearchResult = pt.dealias match {
+ case TypeRef(_, FullManifestClass, List(arg)) =>
+ wrapResult(manifestOfType(arg, true))
+ case TypeRef(_, PartialManifestClass, List(arg)) =>
+ wrapResult(manifestOfType(arg, false))
+ case TypeRef(_, OptManifestClass, List(arg)) =>
+ val itree = manifestOfType(arg, false)
+ wrapResult(if (itree == EmptyTree) gen.mkAttributedRef(NoManifest)
+ else itree)
+ case TypeRef(_, tsym, _) if (tsym.isAbstractType) =>
+ implicitManifestOrOfExpectedType(pt.bounds.lo)
+ case _ =>
+ searchImplicit(implicitsOfExpectedType, false)
+ }
+
/** The result of the implicit search:
* First search implicits visible in current context.
* If that fails, search implicits in expected type `pt`.
@@ -811,24 +849,34 @@ self: Analyzer =>
* If all fails return SearchFailure
*/
def bestImplicit: SearchResult = {
- val start = System.nanoTime()
+ val failstart = startTimer(inscopeFailNanos)
+ val succstart = startTimer(inscopeSucceedNanos)
+
var result = searchImplicit(context.implicitss, true)
- val timer1 = System.nanoTime()
- if (result == SearchFailure) inscopeFail += timer1 - start else inscopeSucceed += timer1 - start
- if (result == SearchFailure)
- result = searchImplicit(implicitsOfExpectedType, false)
- val timer2 = System.nanoTime()
- if (result == SearchFailure) oftypeFail += timer2 - timer1 else oftypeSucceed += timer2 - timer1
if (result == SearchFailure) {
- val resultTree = implicitManifest(pt)
- if (resultTree != EmptyTree) result = new SearchResult(resultTree, EmptyTreeTypeSubstituter)
+ stopTimer(inscopeFailNanos, failstart)
+ } else {
+ stopTimer(inscopeSucceedNanos, succstart)
+ incCounter(inscopeImplicitHits)
}
- val timer3 = System.nanoTime()
- if (result == SearchFailure) manifFail += timer3 - timer2 else manifSucceed += timer3 - timer2
+ if (result == SearchFailure) {
+ val failstart = startTimer(oftypeFailNanos)
+ val succstart = startTimer(oftypeSucceedNanos)
+
+ result = implicitManifestOrOfExpectedType(pt)
+
+ if (result == SearchFailure) {
+ stopTimer(oftypeFailNanos, failstart)
+ } else {
+ stopTimer(oftypeSucceedNanos, succstart)
+ incCounter(oftypeImplicitHits)
+ }
+ }
+
if (result == SearchFailure && settings.debug.value)
- log("no implicits found for "+pt+" "+pt.typeSymbol.info.baseClasses+" "+parts(pt)+implicitsOfExpectedType)
- implicitTime += System.nanoTime() - start
+ log("no implicits found for "+pt+" "+pt.typeSymbol.info.baseClasses+" "+implicitsOfExpectedType)
+
result
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index 76ff6734d4..5c6788f0f6 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -21,11 +21,6 @@ trait Infer {
import global._
import definitions._
- // statistics
- var normM = 0
- var normP = 0
- var normO = 0
-
private final val inferInfo = false //@MDEBUG
/* -- Type parameter inference utility functions --------------------------- */
@@ -33,7 +28,8 @@ trait Infer {
private def assertNonCyclic(tvar: TypeVar) =
assert(tvar.constr.inst != tvar, tvar.origin)
- def isVarArgs(formals: List[Type]) = !formals.isEmpty && isRepeatedParamType(formals.last)
+ def isVarArgs(params: List[Symbol]) = !params.isEmpty && isRepeatedParamType(params.last.tpe)
+ def isVarArgTpes(formals: List[Type]) = !formals.isEmpty && isRepeatedParamType(formals.last)
def isWildcard(tp: Type) = tp match {
case WildcardType | BoundedWildcardType(_) => true
@@ -53,11 +49,11 @@ trait Infer {
/** This variant allows keeping ByName parameters. Useed in NamesDefaults. */
def formalTypes(formals: List[Type], nargs: Int, removeByName: Boolean): List[Type] = {
- val formals1 = if (removeByName) formals map {
+ val formals1 = if (removeByName) formals mapConserve {
case TypeRef(_, sym, List(arg)) if (sym == ByNameParamClass) => arg
case formal => formal
} else formals
- if (isVarArgs(formals1)) {
+ if (isVarArgTpes(formals1)) {
val ft = formals1.last.normalize.typeArgs.head
formals1.init ::: (for (i <- List.range(formals1.length - 1, nargs)) yield ft)
} else formals1
@@ -82,9 +78,11 @@ trait Infer {
//todo: remove comments around following privates; right now they cause an IllegalAccess
// error when built with scalac
- /*private*/ class NoInstance(msg: String) extends RuntimeException(msg) with ControlException
+ /*private*/
+ class NoInstance(msg: String) extends RuntimeException(msg) with ControlException
- /*private*/ class DeferredNoInstance(getmsg: () => String) extends NoInstance("") {
+ /*private*/
+ class DeferredNoInstance(getmsg: () => String) extends NoInstance("") {
override def getMessage(): String = getmsg()
}
@@ -181,18 +179,17 @@ trait Infer {
* A method type becomes the corresponding function type.
* A nullary method type becomes its result type.
* Implicit parameters are skipped.
+ * This method seems to be performance critical.
*/
- def normalize(tp: Type): Type = skipImplicit(tp) match {
+ def normalize(tp: Type): Type = tp match {
case MethodType(params, restpe) if (!restpe.isDependent) =>
- if (util.Statistics.enabled) normM += 1
- functionType(params map (_.tpe), normalize(restpe))
+ if (tp.isInstanceOf[ImplicitMethodType]) normalize(restpe)
+ else functionType(params map (_.tpe), normalize(restpe))
case PolyType(List(), restpe) => // nullary method type
- if (util.Statistics.enabled) normP += 1
normalize(restpe)
case ExistentialType(tparams, qtpe) =>
ExistentialType(tparams, normalize(qtpe))
case tp1 =>
- if (util.Statistics.enabled) normO += 1
tp1 // @MAT aliases already handled by subtyping
}
@@ -401,32 +398,51 @@ trait Infer {
isPlausiblyCompatible(mt.resultType, pt)
case ExistentialType(tparams, qtpe) =>
isPlausiblyCompatible(qtpe, pt)
- case MethodType(params, _) =>
- val formals = tp.paramTypes
- pt.normalize match {
+ case MethodType(params, restpe) =>
+ if (tp.isInstanceOf[ImplicitMethodType]) isPlausiblyCompatible(restpe, pt)
+ else pt match {
case TypeRef(pre, sym, args) =>
- !sym.isClass || {
+ if (sym.isAliasType) {
+ isPlausiblyCompatible(tp, pt.dealias)
+ } else if (sym.isAbstractType) {
+ isPlausiblyCompatible(tp, pt.bounds.lo)
+ } else {
val l = args.length - 1
- l == formals.length &&
- sym == FunctionClass(l) &&
- ((args, formals).zipped forall isPlausiblySubType) &&
- isPlausiblySubType(tp.resultApprox, args.last)
+ l == params.length &&
+ sym == FunctionClass(l) && {
+ var curargs = args
+ var curparams = params
+ while (curparams.nonEmpty) {
+ if (!isPlausiblySubType(curargs.head, curparams.head.tpe))
+ return false
+ curargs = curargs.tail
+ curparams = curparams.tail
+ }
+ isPlausiblySubType(restpe, curargs.head)
+ }
}
case _ =>
- true
+ false
}
case _ =>
- true
+ isPlausiblySubType(tp, pt)
}
- private def isPlausiblySubType(tp1: Type, tp2: Type): Boolean = tp1.normalize match {
+ private def isPlausiblySubType(tp1: Type, tp2: Type): Boolean = tp1 match {
case TypeRef(_, sym1, _) =>
- !sym1.isClass || {
- tp2.normalize match {
- case TypeRef(_, sym2, _) =>
- !sym2.isClass || (sym1 isSubClass sym2) || isNumericSubType(tp1, tp2)
- case _ => true
- }
+ if (sym1.isAliasType) isPlausiblySubType(tp1.dealias, tp2)
+ else if (!sym1.isClass) true
+ else tp2 match {
+ case TypeRef(_, sym2, _) =>
+ if (sym2.isAliasType) isPlausiblySubType(tp1, tp2.dealias)
+ else if (!sym2.isClass) true
+ else if (sym1 isSubClass sym2) true
+ else
+ isNumericValueClass(sym1) &&
+ isNumericValueClass(sym2) &&
+ (sym1 == sym2 || numericWidth(sym1) < numericWidth(sym2))
+ case _ =>
+ true
}
case _ =>
true
@@ -437,6 +453,41 @@ trait Infer {
(tp1 <:< pt) || isCoercible(tp1, pt)
}
+ final def normSubType(tp: Type, pt: Type): Boolean = tp match {
+ case MethodType(params, restpe) =>
+ if (tp.isInstanceOf[ImplicitMethodType]) normSubType(restpe, pt)
+ else pt match {
+ case TypeRef(pre, sym, args) =>
+ if (sym.isAliasType) {
+ normSubType(tp, pt.dealias)
+ } else if (sym.isAbstractType) {
+ normSubType(tp, pt.bounds.lo)
+ } else {
+ val l = args.length - 1
+ l == params.length &&
+ sym == FunctionClass(l) && {
+ var curargs = args
+ var curparams = params
+ while (curparams.nonEmpty) {
+ if (!(curargs.head <:< curparams.head.tpe))
+ return false
+ curargs = curargs.tail
+ curparams = curparams.tail
+ }
+ normSubType(restpe, curargs.head)
+ }
+ }
+ case _ =>
+ tp <:< pt
+ }
+ case PolyType(List(), restpe) => // nullary method type
+ normSubType(restpe, pt)
+ case ExistentialType(tparams, qtpe) =>
+ normalize(tp) <:< pt
+ case _ =>
+ tp <:< pt
+ }
+
def isCompatibleArg(tp: Type, pt: Type): Boolean = {
val tp1 = normalize(tp)
(tp1 weak_<:< pt) || isCoercible(tp1, pt)
@@ -446,7 +497,7 @@ trait Infer {
pt.typeSymbol == UnitClass || // can perform unit coercion
isCompatible(tp, pt) ||
tp.isInstanceOf[MethodType] && // can perform implicit () instantiation
- tp.paramTypes.length == 0 && isCompatible(tp.resultType, pt)
+ tp.params.isEmpty && isCompatible(tp.resultType, pt)
/** Like weakly compatible but don't apply any implicit conversions yet.
* Used when comparing the result type of a method with its prototype.
@@ -700,8 +751,10 @@ trait Infer {
case OverloadedType(pre, alts) =>
alts exists (alt => hasExactlyNumParams(pre.memberType(alt), n))
case _ =>
- formalTypes(tp.paramTypes, n).length == n
+ val len = tp.params.length
+ len == n || isVarArgs(tp.params) && len <= n + 1
}
+
/**
* Verifies whether the named application is valid. The logic is very
* similar to the one in NamesDefaults.removeNames.
@@ -767,13 +820,13 @@ trait Infer {
case ExistentialType(tparams, qtpe) =>
isApplicable(undetparams, qtpe, argtpes0, pt)
case MethodType(params, _) =>
- def paramType(param: Symbol) = param.tpe match {
- case TypeRef(_, sym, List(tpe)) if sym isNonBottomSubClass CodeClass =>
- tpe
- case tpe =>
- tpe
+ val formals0 = params map { param =>
+ param.tpe match {
+ case TypeRef(_, sym, List(tpe)) if sym isNonBottomSubClass CodeClass => tpe
+ case tpe => tpe
+ }
}
- val formals = formalTypes(params map paramType, argtpes0.length)
+ val formals = formalTypes(formals0, argtpes0.length)
def tryTupleApply: Boolean = {
// if 1 formal, 1 argtpe (a tuple), otherwise unmodified argtpes0
@@ -883,7 +936,7 @@ trait Infer {
isAsSpecific(ftpe1.resultType, ftpe2)
case MethodType(params @ (x :: xs), _) =>
var argtpes = params map (_.tpe)
- if (isVarArgs(argtpes) && isVarArgs(ftpe2.paramTypes))
+ if (isVarArgs(params) && isVarArgs(ftpe2.params))
argtpes = argtpes map (argtpe =>
if (isRepeatedParamType(argtpe)) argtpe.typeArgs.head else argtpe)
isApplicable(List(), ftpe2, argtpes, WildcardType)
@@ -994,7 +1047,7 @@ trait Infer {
def isStrictlyBetter(tpe1: Type, tpe2: Type) = {
def isNullary(tpe: Type): Boolean = tpe match {
case tp: RewrappingTypeProxy => isNullary(tp.underlying)
- case _ => tpe.paramSectionCount == 0 || tpe.paramTypes.isEmpty
+ case _ => tpe.paramSectionCount == 0 || tpe.params.isEmpty
}
def isMethod(tpe: Type): Boolean = tpe match {
case tp: RewrappingTypeProxy => isMethod(tp.underlying)
@@ -1640,7 +1693,7 @@ trait Infer {
isApplicable(undetparams, followApply(pre.memberType(alt)), argtpes, pt))
if (varArgsOnly)
- allApplicable = allApplicable filter (alt => isVarArgs(alt.tpe.paramTypes))
+ allApplicable = allApplicable filter (alt => isVarArgs(alt.tpe.params))
// if there are multiple, drop those that use a default
// (keep those that use vararg / tupling conversion)
@@ -1653,7 +1706,7 @@ trait Infer {
alts map (_.tpe)
case t => List(t)
}
- mtypes.exists(t => t.paramTypes.length < argtpes.length || // tupling (*)
+ mtypes.exists(t => t.params.length < argtpes.length || // tupling (*)
hasExactlyNumParams(t, argtpes.length)) // same nb or vararg
// (*) more arguments than parameters, but still applicable: tuplig conversion works.
// todo: should not return "false" when paramTypes = (Unit) no argument is given
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index 40eb72aaeb..1955348f91 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -874,14 +874,15 @@ trait Namers { self: Analyzer =>
}
for (vparams <- vparamss) {
- var pfs = resultPt.paramTypes
+ var pps = resultPt.params
for (vparam <- vparams) {
if (vparam.tpt.isEmpty) {
- vparam.tpt defineType pfs.head
+ val paramtpe = pps.head.tpe
+ vparam.symbol setInfo paramtpe
+ vparam.tpt defineType paramtpe
vparam.tpt setPos vparam.pos.focus
- vparam.symbol setInfo pfs.head
}
- pfs = pfs.tail
+ pps = pps.tail
}
resultPt = resultPt.resultType
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index 835b6f2024..7b41c7b249 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -740,7 +740,7 @@ abstract class RefChecks extends InfoTransform {
sym = sym.info.bounds.hi.widen.typeSymbol
sym
}
- val formal = underlyingClass(fn.tpe.paramTypes.head)
+ val formal = underlyingClass(fn.tpe.params.head.tpe)
val actual = underlyingClass(args.head.tpe)
val receiver = underlyingClass(qual.tpe)
def nonSensibleWarning(what: String, alwaysEqual: Boolean) =
@@ -948,7 +948,7 @@ abstract class RefChecks extends InfoTransform {
private def isRepeatedParamArg(tree: Tree) = currentApplication match {
case Apply(fn, args) =>
!args.isEmpty && (args.last eq tree) &&
- fn.tpe.paramTypes.length == args.length && isRepeatedParamType(fn.tpe.paramTypes.last)
+ fn.tpe.params.length == args.length && isRepeatedParamType(fn.tpe.params.last.tpe)
case _ =>
false
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
index d09cd85137..e59b469057 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
@@ -42,22 +42,14 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
case Some((_, buf)) => buf
case None => throw new AssertionError("no acc def buf for "+clazz)
}
-/*
- private def transformArgs(args: List[Tree], formals: List[Type]) = {
- if (!formals.isEmpty && formals.last.symbol == definitions.ByNameParamClass)
- ((args take (formals.length - 1) map transform) :::
- withInvalidOwner { args drop (formals.length - 1) map transform })
- else
- args map transform
- }
-*/
- private def transformArgs(args: List[Tree], formals: List[Type]) =
- ((args, formals).zipped map { (arg, formal) =>
- if (formal.typeSymbol == definitions.ByNameParamClass)
- withInvalidOwner { checkPackedConforms(transform(arg), formal.typeArgs.head) }
+
+ private def transformArgs(args: List[Tree], params: List[Symbol]) =
+ ((args, params).zipped map { (arg, param) =>
+ if (param.tpe.typeSymbol == definitions.ByNameParamClass)
+ withInvalidOwner { checkPackedConforms(transform(arg), param.tpe.typeArgs.head) }
else transform(arg)
}) :::
- (args drop formals.length map transform)
+ (args drop params.length map transform)
private def checkPackedConforms(tree: Tree, pt: Type): Tree = {
if (tree.tpe exists (_.typeSymbol.isExistentialSkolem)) {
@@ -223,7 +215,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
case Apply(fn, args) =>
assert(fn.tpe != null, tree)
- treeCopy.Apply(tree, transform(fn), transformArgs(args, fn.tpe.paramTypes))
+ treeCopy.Apply(tree, transform(fn), transformArgs(args, fn.tpe.params))
case Function(vparams, body) =>
withInvalidOwner {
treeCopy.Function(tree, vparams, transform(body))
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index c43d9c8788..f29f6fa7a3 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -13,11 +13,12 @@ package typechecker
import scala.collection.mutable.{HashMap, ListBuffer}
import scala.util.control.ControlException
-import scala.compat.Platform.currentTime
import scala.tools.nsc.interactive.RangePositions
import scala.tools.nsc.util.{ Position, Set, NoPosition, SourceFile, BatchSourceFile }
import symtab.Flags._
+import util.Statistics._
+
// Suggestion check whether we can do without priming scopes with symbols of outer scopes,
// like the IDE does.
/** This trait provides methods to assign types to trees.
@@ -29,16 +30,6 @@ trait Typers { self: Analyzer =>
import global._
import definitions._
- var appcnt = 0
- var idcnt = 0
- var selcnt = 0
- var implcnt = 0
- var impltime = 0l
-
- var failedApplies = 0L
- var failedOpEqs = 0L
- var failedSilent = 0L
-
// namer calls typer.computeType(rhs) on DefDef / ValDef when tpt is empty. the result
// is cached here and re-used in typedDefDef / typedValDef
private val transformed = new HashMap[Tree, Tree]
@@ -46,6 +37,8 @@ trait Typers { self: Analyzer =>
// currently not used at all (March 09)
private val superDefs = new HashMap[Symbol, ListBuffer[Tree]]
+ final val shortenImports = false
+
def resetTyper() {
resetContexts
resetNamer()
@@ -545,6 +538,24 @@ trait Typers { self: Analyzer =>
final val xtypes = false
+ /** Is symbol defined and not stale?
+ */
+ def reallyExists(sym: Symbol) = {
+ if (isStale(sym)) sym.setInfo(NoType)
+ sym.exists
+ }
+
+ /** A symbol is stale if it is toplevel, to be loaded from a classfile, and
+ * the classfile is produced from a sourcefile which is compiled in the current run.
+ */
+ def isStale(sym: Symbol): Boolean = {
+ sym.rawInfo.isInstanceOf[loaders.ClassfileLoader] && {
+ sym.rawInfo.load(sym)
+ (sym.sourceFile ne null) &&
+ (currentRun.compiledFiles contains sym.sourceFile)
+ }
+ }
+
/** Does the context of tree <code>tree</code> require a stable type?
*/
private def isStableContext(tree: Tree, mode: Int, pt: Type) =
@@ -674,7 +685,7 @@ trait Typers { self: Analyzer =>
case Select(qual, _) => qual.tpe
case _ => NoPrefix
}
- if (tree.tpe.isInstanceOf[MethodType] && pre.isStable && sym.tpe.paramTypes.isEmpty &&
+ if (tree.tpe.isInstanceOf[MethodType] && pre.isStable && sym.tpe.params.isEmpty &&
(isStableContext(tree, mode, pt) || sym.isModule))
tree.setType(MethodType(List(), singleType(pre, sym)))
else tree
@@ -690,28 +701,35 @@ trait Typers { self: Analyzer =>
}
def silent[T](op: Typer => T): Any /* in fact, TypeError or T */ = {
-// val start = System.nanoTime()
+ val rawTypeStart = startCounter(rawTypeFailed)
+ val findMemberStart = startCounter(findMemberFailed)
+ val subtypeStart = startCounter(subtypeFailed)
+ val failedSilentStart = startTimer(failedSilentNanos)
try {
- if (context.reportGeneralErrors) {
- val context1 = context.makeSilent(context.reportAmbiguousErrors)
- context1.undetparams = context.undetparams
- context1.savedTypeBounds = context.savedTypeBounds
- context1.namedApplyBlockInfo = context.namedApplyBlockInfo
- val typer1 = newTyper(context1)
- val result = op(typer1)
- context.undetparams = context1.undetparams
- context.savedTypeBounds = context1.savedTypeBounds
- context.namedApplyBlockInfo = context1.namedApplyBlockInfo
- result
- } else {
- op(this)
+ if (context.reportGeneralErrors) {
+ val context1 = context.makeSilent(context.reportAmbiguousErrors)
+ context1.undetparams = context.undetparams
+ context1.savedTypeBounds = context.savedTypeBounds
+ context1.namedApplyBlockInfo = context.namedApplyBlockInfo
+ val typer1 = newTyper(context1)
+ val result = op(typer1)
+ context.undetparams = context1.undetparams
+ context.savedTypeBounds = context1.savedTypeBounds
+ context.namedApplyBlockInfo = context1.namedApplyBlockInfo
+ result
+ } else {
+ op(this)
+ }
+ } catch {
+ case ex: CyclicReference => throw ex
+ case ex: TypeError =>
+ stopCounter(rawTypeFailed, rawTypeStart)
+ stopCounter(findMemberFailed, findMemberStart)
+ stopCounter(subtypeFailed, subtypeStart)
+ stopTimer(failedSilentNanos, failedSilentStart)
+ ex
}
- } catch {
- case ex: CyclicReference => throw ex
- case ex: TypeError =>
-// failedSilent += System.nanoTime() - start
- ex
- }}
+ }
/** Utility method: Try op1 on tree. If that gives an error try op2 instead.
*/
@@ -802,7 +820,7 @@ trait Typers { self: Analyzer =>
if (!context.undetparams.isEmpty/* && (mode & POLYmode) == 0 disabled to make implicits in new collection work; we should revisit this. */) { // (9)
// println("adapt IMT: "+(context.undetparams, pt)) //@MDEBUG
context.undetparams = inferExprInstance(
- tree, context.extractUndetparams(), pt, mt.paramTypes exists isManifest)
+ tree, context.extractUndetparams(), pt, mt.params exists (p => isManifest(p.tpe)))
// if we are looking for a manifest, instantiate type to Nothing anyway,
// as we would get amnbiguity errors otherwise. Example
// Looking for a manifest of Nil: This mas many potential types,
@@ -895,7 +913,7 @@ trait Typers { self: Analyzer =>
case _ => TypeTree(tree.tpe) setOriginal(tree)
}
} else if ((mode & (PATTERNmode | FUNmode)) == (PATTERNmode | FUNmode)) { // (5)
- val extractor = tree.symbol.filter(sym => unapplyMember(sym.tpe).exists)
+ val extractor = tree.symbol.filter(sym => reallyExists(unapplyMember(sym.tpe)))
if (extractor != NoSymbol) {
tree setSymbol extractor
val unapply = unapplyMember(extractor.tpe)
@@ -1215,7 +1233,8 @@ trait Typers { self: Analyzer =>
if (!(selfType <:< parent.tpe.typeOfThis) &&
!phase.erasedTypes &&
!(context.owner hasFlag SYNTHETIC) && // don't do this check for synthetic concrete classes for virtuals (part of DEVIRTUALIZE)
- !(settings.suppressVTWarn.value))
+ !(settings.suppressVTWarn.value) &&
+ !selfType.isErroneous && !parent.tpe.isErroneous)
{
//Console.println(context.owner);//DEBUG
//Console.println(context.owner.unsafeTypeParams);//DEBUG
@@ -1558,11 +1577,11 @@ trait Typers { self: Analyzer =>
def decompose(call: Tree): (Tree, List[Tree]) = call match {
case Apply(fn, args) =>
val (superConstr, args1) = decompose(fn)
- val formals = fn.tpe.paramTypes
- val args2 = if (formals.isEmpty || !isRepeatedParamType(formals.last)) args
- else args.take(formals.length - 1) ::: List(EmptyTree)
- if (args2.length != formals.length)
- assert(false, "mismatch " + clazz + " " + formals + " " + args2);//debug
+ val params = fn.tpe.params
+ val args2 = if (params.isEmpty || !isRepeatedParamType(params.last.tpe)) args
+ else args.take(params.length - 1) ::: List(EmptyTree)
+ if (args2.length != params.length)
+ assert(false, "mismatch " + clazz + " " + (params map (_.tpe)) + " " + args2);//debug
(superConstr, args1 ::: args2)
case Block(stats, expr) if !stats.isEmpty =>
decompose(stats.last)
@@ -1655,11 +1674,11 @@ trait Typers { self: Analyzer =>
while (cx != NoContext) {
val pre = cx.enclClass.prefix
val defEntry = cx.scope.lookupEntry(name)
- if ((defEntry ne null) && defEntry.sym.exists)
+ if ((defEntry ne null) && reallyExists(defEntry.sym))
return Some(defEntry.sym)
cx = cx.enclClass
- (pre member name filter (sym => sym.exists && context.isAccessible(sym, pre, false))) match {
+ (pre member name filter (sym => reallyExists(sym) && context.isAccessible(sym, pre, false))) match {
case NoSymbol => cx = cx.outer
case other => return Some(other)
}
@@ -1807,8 +1826,6 @@ trait Typers { self: Analyzer =>
transformedOrTyped(ddef.rhs, tpt1.tpe)
}
- checkMethodStructuralCompatible(meth)
-
if (meth.isPrimaryConstructor && meth.isClassConstructor &&
phase.id <= currentRun.typerPhase.id && !reporter.hasErrors)
computeParamAliases(meth.owner, vparamss1, rhs1)
@@ -2158,7 +2175,7 @@ trait Typers { self: Analyzer =>
def typedArgs(args: List[Tree], mode: Int, originalFormals: List[Type], adaptedFormals: List[Type]) = {
def newmode(i: Int) =
- if (isVarArgs(originalFormals) && i >= originalFormals.length - 1) STARmode else 0
+ if (isVarArgTpes(originalFormals) && i >= originalFormals.length - 1) STARmode else 0
for (((arg, formal), i) <- (args zip adaptedFormals).zipWithIndex) yield
typedArg(arg, mode, newmode(i), formal)
@@ -2194,14 +2211,6 @@ trait Typers { self: Analyzer =>
def isNamedApplyBlock(tree: Tree) =
context.namedApplyBlockInfo exists (_._1 == tree)
- /**
- * @param tree ...
- * @param fun0 ...
- * @param args ...
- * @param mode ...
- * @param pt ...
- * @return ...
- */
def doTypedApply(tree: Tree, fun0: Tree, args: List[Tree], mode: Int, pt: Type): Tree = {
var fun = fun0
if (fun.hasSymbol && (fun.symbol hasFlag OVERLOADED)) {
@@ -2262,8 +2271,9 @@ trait Typers { self: Analyzer =>
doTypedApply(tree, adapt(fun, funMode(mode), WildcardType), args1, mode, pt)
case mt @ MethodType(params, _) =>
+ val paramTypes = mt.paramTypes
// repeat vararg as often as needed, remove by-name
- val formals = formalTypes(mt.paramTypes, args.length)
+ val formals = formalTypes(paramTypes, args.length)
/** Try packing all arguments into a Tuple and apply `fun'
* to that. This is the last thing which is tried (after
@@ -2364,7 +2374,7 @@ trait Typers { self: Analyzer =>
} else {
val tparams = context.extractUndetparams()
if (tparams.isEmpty) { // all type params are defined
- val args1 = typedArgs(args, argMode(fun, mode), mt.paramTypes, formals)
+ val args1 = typedArgs(args, argMode(fun, mode), paramTypes, formals)
val restpe = mt.resultType(args1 map (_.tpe)) // instantiate dependent method types
def ifPatternSkipFormals(tp: Type) = tp match {
case MethodType(_, rtp) if ((mode & PATTERNmode) != 0) => rtp
@@ -3243,12 +3253,12 @@ trait Typers { self: Analyzer =>
* insert an implicit conversion.
*/
def tryTypedApply(fun: Tree, args: List[Tree]): Tree = {
- val start = System.nanoTime()
+ val start = startTimer(failedApplyNanos)
silent(_.doTypedApply(tree, fun, args, mode, pt)) match {
case t: Tree =>
t
case ex: TypeError =>
- failedApplies += System.nanoTime() - start
+ stopTimer(failedApplyNanos, start)
def errorInResult(tree: Tree): Boolean = tree.pos == ex.pos || {
tree match {
case Block(_, r) => errorInResult(r)
@@ -3287,11 +3297,12 @@ trait Typers { self: Analyzer =>
typed1(tree, mode & ~PATTERNmode | EXPRmode, pt)
} else {
val funpt = if ((mode & PATTERNmode) != 0) pt else WildcardType
- val start = System.nanoTime()
+ val appStart = startTimer(failedApplyNanos)
+ val opeqStart = startTimer(failedOpEqNanos)
silent(_.typed(fun, funMode(mode), funpt)) match {
case fun1: Tree =>
val fun2 = if (stableApplication) stabilizeFun(fun1, mode, pt) else fun1
- if (util.Statistics.enabled) appcnt += 1
+ incCounter(typedApplyCount)
val res =
if (phase.id <= currentRun.typerPhase.id &&
fun2.isInstanceOf[Select] &&
@@ -3320,14 +3331,15 @@ trait Typers { self: Analyzer =>
else res
*/
case ex: TypeError =>
- failedOpEqs += System.nanoTime() - start
fun match {
case Select(qual, name)
if (mode & PATTERNmode) == 0 && nme.isOpAssignmentName(name.decode) =>
val qual1 = typedQualifier(qual)
if (treeInfo.isVariableOrGetter(qual1)) {
+ stopTimer(failedOpEqNanos, opeqStart)
convertToAssignment(fun, qual1, name, args, ex)
} else {
+ stopTimer(failedApplyNanos, appStart)
if ((qual1.symbol ne null) && qual1.symbol.isValue)
error(tree.pos, "reassignment to val")
else
@@ -3335,6 +3347,7 @@ trait Typers { self: Analyzer =>
setError(tree)
}
case _ =>
+ stopTimer(failedApplyNanos, appStart)
reportTypeError(fun.pos, ex)
setError(tree)
}
@@ -3467,7 +3480,7 @@ trait Typers { self: Analyzer =>
val qual1 = adaptToName(qual, name)
if (qual1 ne qual) return typed(treeCopy.Select(tree, qual1, name), mode, pt)
}
- if (!sym.exists) {
+ if (!reallyExists(sym)) {
if (settings.debug.value) Console.err.println("qual = "+qual+":"+qual.tpe+"\nSymbol="+qual.tpe.termSymbol+"\nsymbol-info = "+qual.tpe.termSymbol.info+"\nscope-id = "+qual.tpe.termSymbol.info.decls.hashCode()+"\nmembers = "+qual.tpe.members+"\nname = "+name+"\nfound = "+sym+"\nowner = "+context.enclClass.owner)
if (!qual.tpe.widen.isErroneous) {
error(tree.pos,
@@ -3534,12 +3547,15 @@ trait Typers { self: Analyzer =>
var pre: Type = NoPrefix // the prefix type of defSym, if a class member
var qual: Tree = EmptyTree // the qualififier tree if transformed tree is a select
- // if we are in a constructor of a pattern, ignore all definitions
+ // A symbol qualifies if it exists and is not stale. Stale symbols
+ // are made to disappear here. In addition,
+ // if we are in a constructor of a pattern, we ignore all definitions
// which are methods (note: if we don't do that
// case x :: xs in class List would return the :: method).
- def qualifies(sym: Symbol): Boolean =
- sym.exists &&
+ def qualifies(sym: Symbol): Boolean = {
+ reallyExists(sym) &&
((mode & PATTERNmode | FUNmode) != (PATTERNmode | FUNmode) || !sym.isSourceMethod)
+ }
if (defSym == NoSymbol) {
var defEntry: ScopeEntry = null // the scope entry of defSym, if defined in a local scope
@@ -3569,7 +3585,7 @@ trait Typers { self: Analyzer =>
else cx.depth - (cx.scope.nestingLevel - defEntry.owner.nestingLevel)
var impSym: Symbol = NoSymbol; // the imported symbol
var imports = context.imports; // impSym != NoSymbol => it is imported from imports.head
- while (!impSym.exists && !imports.isEmpty && imports.head.depth > symDepth) {
+ while (!reallyExists(impSym) && !imports.isEmpty && imports.head.depth > symDepth) {
impSym = imports.head.importedSymbol(name)
if (!impSym.exists) imports = imports.tail
}
@@ -3611,7 +3627,7 @@ trait Typers { self: Analyzer =>
(!imports.head.isExplicitImport(name) ||
imports1.head.depth == imports.head.depth)) {
var impSym1 = imports1.head.importedSymbol(name)
- if (impSym1.exists) {
+ if (reallyExists(impSym1)) {
if (imports1.head.isExplicitImport(name)) {
if (imports.head.isExplicitImport(name) ||
imports1.head.depth != imports.head.depth) ambiguousImport()
@@ -3623,7 +3639,9 @@ trait Typers { self: Analyzer =>
imports1 = imports1.tail
}
defSym = impSym
- qual = atPos(tree.pos.focusStart)(resetPos(imports.head.qual.duplicate))
+ val qual0 = imports.head.qual
+ if (!(shortenImports && qual0.symbol.isPackage)) // optimization: don't write out package prefixes
+ qual = atPos(tree.pos.focusStart)(resetPos(qual0.duplicate))
pre = qual.tpe
} else {
if (settings.debug.value) {
@@ -3949,7 +3967,7 @@ trait Typers { self: Analyzer =>
typedSelect(qual1, nme.CONSTRUCTOR)
case Select(qual, name) =>
- if (util.Statistics.enabled) selcnt += 1
+ incCounter(typedSelectCount)
var qual1 = checkDead(typedQualifier(qual, mode))
if (name.isTypeName) qual1 = checkStable(qual1)
@@ -3977,7 +3995,7 @@ trait Typers { self: Analyzer =>
else tree1
case Ident(name) =>
- if (util.Statistics.enabled) idcnt += 1
+ incCounter(typedIdentCount)
if ((name == nme.WILDCARD && (mode & (PATTERNmode | FUNmode)) == PATTERNmode) ||
(name == nme.WILDCARD.toTypeName && (mode & TYPEmode) != 0))
tree setType makeFullyDefined(pt)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
index 812276612f..3898e46c0b 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
@@ -94,8 +94,8 @@ trait Unapplies extends ast.TreeDSL
}
/** returns unapply member's parameter type. */
def unapplyParameterType(extractor: Symbol) = {
- val tps = extractor.tpe.paramTypes
- if (tps.length == 1) tps.head.typeSymbol
+ val ps = extractor.tpe.params
+ if (ps.length == 1) ps.head.tpe.typeSymbol
else NoSymbol
}
diff --git a/src/compiler/scala/tools/nsc/util/Statistics.scala b/src/compiler/scala/tools/nsc/util/Statistics.scala
index e9e9f65058..eb8a25a6ed 100644
--- a/src/compiler/scala/tools/nsc/util/Statistics.scala
+++ b/src/compiler/scala/tools/nsc/util/Statistics.scala
@@ -9,57 +9,230 @@ package scala.tools.nsc
package util
object Statistics {
+
var enabled = false
+ var phasesShown = List("parser", "typer", "erasure", "cleanup")
+
+ def currentTime() =
+ if (enabled) System.nanoTime() else 0L
+
+ private def showPercent(x: Double, base: Double) =
+ if (base == 0) "" else " ("+"%2.1f".format(x / base * 100)+"%)"
+
+ def incCounter(c: Counter) {
+ if (enabled) c.value += 1
+ }
+
+ def incCounter(c: Counter, delta: Int) {
+ if (enabled) c.value += delta
+ }
+
+ def startCounter(sc: SubCounter): IntPair =
+ if (enabled) sc.start() else null
+
+ def stopCounter(sc: SubCounter, start: IntPair) {
+ if (enabled) sc.stop(start)
+ }
+
+ def startTimer(tm: Timer): LongPair =
+ if (enabled) tm.start() else null
+
+ def stopTimer(tm: Timer, start: LongPair) {
+ if (enabled) tm.stop(start)
+ }
+
+ case class IntPair(x: Int, y: Int)
+ case class LongPair(x: Long, y: Long)
+
+ class Counter {
+ var value: Int = 0
+ override def toString = value.toString
+ }
+
+ class SubCounter(c: Counter) {
+ var value: Int = 0
+ def start(): IntPair =
+ if (enabled) IntPair(value, c.value) else null
+ def stop(prev: IntPair) {
+ if (enabled) {
+ val IntPair(value0, cvalue0) = prev
+ value = value0 + c.value - cvalue0
+ }
+ }
+ override def toString =
+ value+showPercent(value, c.value)
+ }
+
+ class Timer {
+ var nanos: Long = 0L
+ def start(): LongPair =
+ if (enabled) LongPair(nanos, System.nanoTime()) else null
+ def stop(prev: LongPair) {
+ if (enabled) {
+ val LongPair(nanos0, start) = prev
+ nanos = nanos0 + System.nanoTime() - start
+ }
+ }
+ override def toString = nanos.toString+"ns"
+ }
+
+ class ClassCounts extends scala.collection.mutable.HashMap[Class[_], Int] {
+ override def default(key: Class[_]) = 0
+ }
+
+ var nodeByType = new ClassCounts
+
+ val singletonBaseTypeSeqCount = new Counter
+ val compoundBaseTypeSeqCount = new Counter
+ val typerefBaseTypeSeqCount = new Counter
+ val findMemberCount = new Counter
+ val noMemberCount = new Counter
+ val multMemberCount = new Counter
+ val findMemberNanos = new Timer
+ val asSeenFromCount = new Counter
+ val asSeenFromNanos = new Timer
+ val subtypeCount = new Counter
+ val subtypeNanos = new Timer
+ val sametypeCount = new Counter
+ val rawTypeCount = new Counter
+ val rawTypeFailed = new SubCounter(rawTypeCount)
+ val findMemberFailed = new SubCounter(findMemberCount)
+ val subtypeFailed = new SubCounter(subtypeCount)
+ val rawTypeImpl = new SubCounter(rawTypeCount)
+ val findMemberImpl = new SubCounter(findMemberCount)
+ val subtypeImpl = new SubCounter(subtypeCount)
+ val baseTypeSeqCount = new Counter
+ val baseTypeSeqLenTotal = new Counter
+ val typeSymbolCount = new Counter
+ val classSymbolCount = new Counter
+ val typedApplyCount = new Counter
+ val typedIdentCount = new Counter
+ val typedSelectCount = new Counter
+ val typerNanos = new Timer
+ val classReadNanos = new Timer
+
+ val failedApplyNanos = new Timer
+ val failedOpEqNanos = new Timer
+ val failedSilentNanos = new Timer
+
+ val implicitSearchCount = new Counter
+ val implicitNanos = new Timer
+ val oftypeImplicitHits = new Counter
+ val inscopeImplicitHits = new Counter
+
+ val triedImplicits = new Counter
+ val plausiblyCompatibleImplicits = new Counter
+ val matchingImplicits = new Counter
+ val typedImplicits = new Counter
+ val foundImplicits = new Counter
+
+ val inscopeSucceedNanos = new Timer
+ val inscopeFailNanos = new Timer
+ val oftypeSucceedNanos = new Timer
+ val oftypeFailNanos = new Timer
+ val implicitCacheHits = new Counter
+ val implicitCacheMisses = new Counter
+ val improvesCount = new Counter
+ val subtypeAppInfos = new SubCounter(subtypeCount)
+ val subtypeImprovCount = new SubCounter(subtypeCount)
+ val subtypeETNanos = new Timer
+ val matchesPtNanos = new Timer
+ val counter1: SubCounter = new SubCounter(findMemberCount)
+ val counter2: SubCounter = new SubCounter(findMemberCount)
+ val timer1: Timer = new Timer
+ val timer2: Timer = new Timer
}
abstract class Statistics {
+ import Statistics._
+
val global: Global
import global._
- def showRelative(base: Long)(time: Long) = "%2.1f".format(time.toDouble / base * 100)+" / "+time+"ns"
- def showRelTyper(time: Long) = showRelative(analyzer.typerTime)(time)
-
- def print(phase: Phase) = {
- if (List("typer", "erasure", "cleanup") contains phase.name) {
- inform("*** Cumulative statistics at phase " + phase)
- inform("#tree nodes : " + nodeCount)
- inform("#identifiers : " + analyzer.idcnt)
- inform("#selections : " + analyzer.selcnt)
- inform("#applications: " + analyzer.appcnt)
- inform("#implicits : " + analyzer.implcnt)
- inform("#uniquetypes : " + uniqueTypeCount)
- inform("#symbols : " + symbolCount)
- inform("#type symbols: " + typeSymbolCount)
- inform("#class symbols: " + classSymbolCount)
- inform("#singleton closures: " + singletonBaseTypeSeqCount)
- inform("#compound closures : " + compoundBaseTypeSeqCount)
- inform("#typeref closures : " + typerefBaseTypeSeqCount)
- inform("#findMember : " + findMemberCount)
- inform("#notfound member: " + noMemberCount)
- inform("#multiple member: " + multMemberCount)
- inform("time findMember: " + findMemberNanos)
- inform("#norm meth : " + analyzer.normM)
- inform("#norm poly : " + analyzer.normP)
- inform("#norm other : " + analyzer.normO)
- inform("#subtype : " + subtypeCount)
- inform("ns subtype : " + subtypeNanos)
- inform("#sametype : " + sametypeCount)
+ def countNodes(tree: Tree, counts: ClassCounts) {
+ for (t <- tree) counts(t.getClass) += 1
+ counts
+ }
+
+ def showRelative(base: Long)(value: Long) =
+ value+showPercent(value, base)
+
+ def showRelTyper(timer: Timer) =
+ timer.nanos+"ns"+showPercent(timer.nanos, typerNanos.nanos)
+
+ def showCounts(counts: ClassCounts) =
+ counts.toSeq.sortWith(_._2 > _._2).map {
+ case (cls, cnt) =>
+ cls.toString.substring(cls.toString.lastIndexOf("$") + 1)+": "+cnt
+ }
+
+ def print(phase: Phase) = if (phasesShown contains phase.name) {
+ inform("*** Cumulative statistics at phase " + phase)
+ inform("#created tree nodes : " + nodeCount)
+ inform("#created tree nodes by type: "+showCounts(nodeByType))
+ if (phase.name != "parser") {
+ val counts = new ClassCounts
+ for (u <- currentRun.units; t <- u.body) counts(t.getClass) += 1
+ inform("#retained nodes : " + counts.valuesIterable.sum)
+ inform("#retained nodes by type : " + showCounts(counts))
+ inform("#typechecked identifiers : " + typedIdentCount)
+ inform("#typechecked selections : " + typedSelectCount)
+ inform("#typechecked applications: " + typedApplyCount)
+ inform("#raw type creations : " + rawTypeCount)
+ inform(" of which in failed : " + rawTypeFailed)
+ inform(" of which in implicits : " + rawTypeImpl)
+ inform("#unique types : " + uniqueTypeCount)
+ inform("#symbols : " + symbolCount)
+ inform(" of which type symbols : " + typeSymbolCount)
+ inform(" of which class symbols : " + classSymbolCount)
+ inform("#base type seqs : " + baseTypeSeqCount)
+ inform("avg base type seq length : " + baseTypeSeqLenTotal.value.toFloat / baseTypeSeqCount.value)
+ inform("#singleton base type seqs: " + singletonBaseTypeSeqCount)
+ inform("#compound base type seqs : " + compoundBaseTypeSeqCount)
+ inform("#typeref base type seqs : " + typerefBaseTypeSeqCount)
+ inform("#findMember ops : " + findMemberCount)
+ inform(" of which in failed : " + findMemberFailed)
+ inform(" of which in implicits : " + findMemberImpl)
+ inform("#notfound member : " + noMemberCount)
+ inform("#multiple member : " + multMemberCount)
+ inform("#asSeenFrom ops : " + asSeenFromCount)
+ inform("#subtype : " + subtypeCount)
+ inform(" of which in failed : " + subtypeFailed)
+ inform(" of which in implicits : " + subtypeImpl)
+ inform(" of which in app impl : " + subtypeAppInfos)
+ inform(" of which in improv : " + subtypeImprovCount)
+ inform("#sametype : " + sametypeCount)
inform("ms type-flow-analysis: " + analysis.timer.millis)
+
if (phase.name == "typer") {
- inform("time spent typechecking: "+showRelTyper(analyzer.typerTime))
- inform("time spent in implicits: "+showRelTyper(analyzer.implicitTime))
- inform(" successful in scope: "+showRelTyper(analyzer.inscopeSucceed))
- inform(" failed in scope: "+showRelTyper(analyzer.inscopeFail))
- inform(" successful of type: "+showRelTyper(analyzer.oftypeSucceed))
- inform(" failed of type: "+showRelTyper(analyzer.oftypeFail))
- inform(" successful manifest: "+showRelTyper(analyzer.manifSucceed))
- inform(" failed manifest: "+showRelTyper(analyzer.manifFail))
- inform("implicit cache hitratio: "+"%2.1f".format(analyzer.hits.toDouble / (analyzer.hits + analyzer.misses) * 100))
- inform("time spent in failed : "+showRelTyper(analyzer.failedSilent))
- inform(" failed op= : "+showRelTyper(analyzer.failedOpEqs))
- inform(" failed apply : "+showRelTyper(analyzer.failedApplies))
+ inform("time spent typechecking : "+showRelTyper(typerNanos))
+ inform("time classfilereading : "+showRelTyper(classReadNanos))
+ inform("time spent in implicits : "+showRelTyper(implicitNanos))
+ inform(" successful in scope : "+showRelTyper(inscopeSucceedNanos))
+ inform(" failed in scope : "+showRelTyper(inscopeFailNanos))
+ inform(" successful of type : "+showRelTyper(oftypeSucceedNanos))
+ inform(" failed of type : "+showRelTyper(oftypeFailNanos))
+ inform(" assembling parts : "+showRelTyper(subtypeETNanos))
+ inform(" matchesPT : "+showRelTyper(matchesPtNanos))
+ inform("implicit cache hits : "+showRelative(implicitCacheHits.value + implicitCacheMisses.value)(implicitCacheHits.value))
+ inform("time spent in failed : "+showRelTyper(failedSilentNanos))
+ inform(" failed apply : "+showRelTyper(failedApplyNanos))
+ inform(" failed op= : "+showRelTyper(failedOpEqNanos))
+ inform("time spent in <:< : "+showRelTyper(subtypeNanos))
+ inform("time spent in findmember : "+showRelTyper(findMemberNanos))
+ inform("time spent in asSeenFrom : "+showRelTyper(asSeenFromNanos))
+ inform("#implicit searches : " + implicitSearchCount)
+ inform("#tried, plausible, matching, typed, found implicits: "+triedImplicits+", "+plausiblyCompatibleImplicits+", "+matchingImplicits+", "+typedImplicits+", "+foundImplicits)
+ inform("#implicit improves tests : " + improvesCount)
+ inform("#implicit inscope hits : " + inscopeImplicitHits)
+ inform("#implicit oftype hits : " + oftypeImplicitHits)
}
+
+ if (counter1 != null) inform("#counter1 : " + counter1)
+ if (counter2 != null) inform("#counter2 : " + counter2)
+ if (timer1 != null) inform("#timer1 : " + timer1)
+ if (timer2 != null) inform("#timer2 : " + timer2)
//for (t <- uniques.iterator) println("unique: "+t)
}
}
diff --git a/src/library/scala/Predef.scala b/src/library/scala/Predef.scala
index 73512e2688..5684c91aaa 100644
--- a/src/library/scala/Predef.scala
+++ b/src/library/scala/Predef.scala
@@ -117,7 +117,7 @@ object Predef extends LowPriorityImplicits {
throw new IllegalArgumentException("requirement failed: "+ message)
}
- class Ensuring[A](x: A) {
+ final class Ensuring[A](val x: A) {
def ensuring(cond: Boolean): A = { assert(cond); x }
def ensuring(cond: Boolean, msg: Any): A = { assert(cond, msg); x }
def ensuring(cond: A => Boolean): A = { assert(cond(x)); x }
@@ -139,8 +139,8 @@ object Predef extends LowPriorityImplicits {
def unapply[A, B, C](x: Tuple3[A, B, C]): Option[Tuple3[A, B, C]] = Some(x)
}
- class ArrowAssoc[A](x: A) {
- def -> [B](y: B): Tuple2[A, B] = Tuple2(x, y)
+ final class ArrowAssoc[A](val x: A) {
+ @inline def -> [B](y: B): Tuple2[A, B] = Tuple2(x, y)
def →[B](y: B): Tuple2[A, B] = ->(y)
}
implicit def any2ArrowAssoc[A](x: A): ArrowAssoc[A] = new ArrowAssoc(x)
diff --git a/src/library/scala/collection/IndexedSeqViewLike.scala b/src/library/scala/collection/IndexedSeqViewLike.scala
index b27383424f..06fa6c8953 100644
--- a/src/library/scala/collection/IndexedSeqViewLike.scala
+++ b/src/library/scala/collection/IndexedSeqViewLike.scala
@@ -24,47 +24,24 @@ import TraversableView.NoBuilder
trait IndexedSeqViewLike[+A,
+Coll,
+This <: IndexedSeqView[A, Coll] with IndexedSeqViewLike[A, Coll, This]]
- extends IndexedSeq[A] with IndexedSeqLike[A, This] with SeqView[A, Coll] with SeqViewLike[A, Coll, This]
+ extends IndexedSeq[A]
+ with IndexedSeqLike[A, This]
+ with SeqView[A, Coll]
+ with SeqViewLike[A, Coll, This]
+ with views.IndexedSeqTransformations[A, Coll, This]
{ self =>
- trait Transformed[+B] extends IndexedSeqView[B, Coll] with super.Transformed[B]
+ trait Transformed[+B] extends views.IndexedSeqLike[B, Coll] with super.Transformed[B]
- trait Sliced extends Transformed[A] with super.Sliced {
- /** Override to use IndexedSeq's foreach; todo: see whether this is really faster */
- override def foreach[U](f: A => U) = super[Transformed].foreach(f)
- }
-
- trait Mapped[B] extends Transformed[B] with super.Mapped[B] {
- override def foreach[U](f: B => U) = super[Transformed].foreach(f)
- }
-
- trait FlatMapped[B] extends Transformed[B] with super.FlatMapped[B] {
- override def foreach[U](f: B => U) = super[Transformed].foreach(f)
- }
-
- trait Appended[B >: A] extends Transformed[B] with super.Appended[B] {
- override def foreach[U](f: B => U) = super[Transformed].foreach(f)
- }
-
- trait Filtered extends Transformed[A] with super.Filtered {
- override def foreach[U](f: A => U) = super[Transformed].foreach(f)
- }
-
- trait TakenWhile extends Transformed[A] with super.TakenWhile {
- override def foreach[U](f: A => U) = super[Transformed].foreach(f)
- }
-
- trait DroppedWhile extends Transformed[A] with super.DroppedWhile {
- override def foreach[U](f: A => U) = super[Transformed].foreach(f)
- }
-
- trait Reversed extends Transformed[A] with super.Reversed {
- override def foreach[U](f: A => U) = super[Transformed].foreach(f)
- }
-
- trait Patched[B >: A] extends Transformed[B] with super.Patched[B] {
- override def foreach[U](f: B => U) = super[Transformed].foreach(f)
- }
+ trait Sliced extends Transformed[A] with super.Sliced
+ trait Mapped[B] extends Transformed[B] with super.Mapped[B]
+ trait FlatMapped[B] extends Transformed[B] with super.FlatMapped[B]
+ trait Appended[B >: A] extends Transformed[B] with super.Appended[B]
+ trait Filtered extends Transformed[A] with super.Filtered
+ trait TakenWhile extends Transformed[A] with super.TakenWhile
+ trait DroppedWhile extends Transformed[A] with super.DroppedWhile
+ trait Reversed extends Transformed[A] with super.Reversed
+ trait Patched[B >: A] extends Transformed[B] with super.Patched[B]
trait Zipped[B] extends Transformed[(A, B)] {
protected[this] val other: Iterable[B]
@@ -88,22 +65,5 @@ trait IndexedSeqViewLike[+A,
}
override def stringPrefix = self.stringPrefix+"Z"
}
-
- /** Boilerplate method, to override in each subclass
- * This method could be eliminated if Scala had virtual classes
- */
- protected override def newAppended[B >: A](that: Traversable[B]): Transformed[B] = new Appended[B] { val rest = that }
- protected override def newMapped[B](f: A => B): Transformed[B] = new Mapped[B] { val mapping = f }
- protected override def newFlatMapped[B](f: A => Traversable[B]): Transformed[B] = new FlatMapped[B] { val mapping = f }
- protected override def newFiltered(p: A => Boolean): Transformed[A] = new Filtered { val pred = p }
- protected override def newSliced(_from: Int, _until: Int): Transformed[A] = new Sliced { val from = _from; val until = _until }
- protected override def newDroppedWhile(p: A => Boolean): Transformed[A] = new DroppedWhile { val pred = p }
- protected override def newTakenWhile(p: A => Boolean): Transformed[A] = new TakenWhile { val pred = p }
- protected override def newZipped[B](that: Iterable[B]): Transformed[(A, B)] = new Zipped[B] { val other = that }
- protected override def newZippedAll[A1 >: A, B](that: Iterable[B], _thisElem: A1, _thatElem: B): Transformed[(A1, B)] = new ZippedAll[A1, B] { val other = that; val thisElem = _thisElem; val thatElem = _thatElem }
- protected override def newReversed: Transformed[A] = new Reversed { }
- protected override def newPatched[B >: A](_from: Int, _patch: Seq[B], _replaced: Int): Transformed[B] = new Patched[B] {
- val from = _from; val patch = _patch; val replaced = _replaced
- }
override def stringPrefix = "IndexedSeqView"
}
diff --git a/src/library/scala/collection/IterableViewLike.scala b/src/library/scala/collection/IterableViewLike.scala
index 27323294c4..831a244352 100644
--- a/src/library/scala/collection/IterableViewLike.scala
+++ b/src/library/scala/collection/IterableViewLike.scala
@@ -24,10 +24,14 @@ import TraversableView.NoBuilder
trait IterableViewLike[+A,
+Coll,
+This <: IterableView[A, Coll] with IterableViewLike[A, Coll, This]]
-extends Iterable[A] with IterableLike[A, This] with TraversableView[A, Coll] with TraversableViewLike[A, Coll, This]
+ extends Iterable[A]
+ with IterableLike[A, This]
+ with TraversableView[A, Coll]
+ with TraversableViewLike[A, Coll, This]
+ with views.IterableTransformations[A, Coll, This]
{ self =>
- trait Transformed[+B] extends IterableView[B, Coll] with super.Transformed[B]
+ trait Transformed[+B] extends views.IterableLike[B, Coll] with super.Transformed[B]
trait Sliced extends Transformed[A] with super.Sliced {
override def iterator = self.iterator slice (from, until)
@@ -84,25 +88,5 @@ extends Iterable[A] with IterableLike[A, This] with TraversableView[A, Coll] wit
override def zipAll[B, A1 >: A, That](that: Iterable[B], thisElem: A1, thatElem: B)(implicit bf: CanBuildFrom[This, (A1, B), That]): That =
newZippedAll(that, thisElem, thatElem).asInstanceOf[That]
- protected def newZipped[B](that: Iterable[B]): Transformed[(A, B)] = new Zipped[B] {
- val other = that
- }
- protected def newZippedAll[A1 >: A, B](that: Iterable[B], _thisElem: A1, _thatElem: B): Transformed[(A1, B)] = new ZippedAll[A1, B] {
- val other: Iterable[B] = that
- val thisElem = _thisElem
- val thatElem = _thatElem
- }
-
- /** Boilerplate method, to override in each subclass
- * This method could be eliminated if Scala had virtual classes
- */
- protected override def newAppended[B >: A](that: Traversable[B]): Transformed[B] = new Appended[B] { val rest = that }
- protected override def newMapped[B](f: A => B): Transformed[B] = new Mapped[B] { val mapping = f }
- protected override def newFlatMapped[B](f: A => Traversable[B]): Transformed[B] = new FlatMapped[B] { val mapping = f }
- protected override def newFiltered(p: A => Boolean): Transformed[A] = new Filtered { val pred = p }
- protected override def newSliced(_from: Int, _until: Int): Transformed[A] = new Sliced { val from = _from; val until = _until }
- protected override def newDroppedWhile(p: A => Boolean): Transformed[A] = new DroppedWhile { val pred = p }
- protected override def newTakenWhile(p: A => Boolean): Transformed[A] = new TakenWhile { val pred = p }
-
override def stringPrefix = "IterableView"
}
diff --git a/src/library/scala/collection/SeqViewLike.scala b/src/library/scala/collection/SeqViewLike.scala
index 1a8cd20013..6f677616e7 100644
--- a/src/library/scala/collection/SeqViewLike.scala
+++ b/src/library/scala/collection/SeqViewLike.scala
@@ -23,13 +23,14 @@ import TraversableView.NoBuilder
trait SeqViewLike[+A,
+Coll,
+This <: SeqView[A, Coll] with SeqViewLike[A, Coll, This]]
- extends Seq[A] with SeqLike[A, This] with IterableView[A, Coll] with IterableViewLike[A, Coll, This]
+ extends Seq[A]
+ with SeqLike[A, This]
+ with IterableView[A, Coll]
+ with IterableViewLike[A, Coll, This]
+ with views.SeqTransformations[A, Coll, This]
{ self =>
- trait Transformed[+B] extends SeqView[B, Coll] with super.Transformed[B] {
- override def length: Int
- override def apply(idx: Int): B
- }
+ trait Transformed[+B] extends views.SeqLike[B, Coll] with super.Transformed[B]
trait Sliced extends Transformed[A] with super.Sliced {
override def length = ((until min self.length) - from) max 0
@@ -143,21 +144,6 @@ trait SeqViewLike[+A,
override def stringPrefix = self.stringPrefix+"P"
}
- /** Boilerplate method, to override in each subclass
- * This method could be eliminated if Scala had virtual classes
- */
- protected override def newAppended[B >: A](that: Traversable[B]): Transformed[B] = new Appended[B] { val rest = that }
- protected override def newMapped[B](f: A => B): Transformed[B] = new Mapped[B] { val mapping = f }
- protected override def newFlatMapped[B](f: A => Traversable[B]): Transformed[B] = new FlatMapped[B] { val mapping = f }
- protected override def newFiltered(p: A => Boolean): Transformed[A] = new Filtered { val pred = p }
- protected override def newSliced(_from: Int, _until: Int): Transformed[A] = new Sliced { val from = _from; val until = _until }
- protected override def newDroppedWhile(p: A => Boolean): Transformed[A] = new DroppedWhile { val pred = p }
- protected override def newTakenWhile(p: A => Boolean): Transformed[A] = new TakenWhile { val pred = p }
- protected override def newZipped[B](that: Iterable[B]): Transformed[(A, B)] = new Zipped[B] { val other = that }
- protected override def newZippedAll[A1 >: A, B](that: Iterable[B], _thisElem: A1, _thatElem: B): Transformed[(A1, B)] = new ZippedAll[A1, B] { val other = that; val thisElem = _thisElem; val thatElem = _thatElem }
- protected def newReversed: Transformed[A] = new Reversed { }
- protected def newPatched[B >: A](_from: Int, _patch: Seq[B], _replaced: Int): Transformed[B] = new Patched[B] { val from = _from; val patch = _patch; val replaced = _replaced }
-
override def reverse: This = newReversed.asInstanceOf[This]
override def patch[B >: A, That](from: Int, patch: Seq[B], replaced: Int)(implicit bf: CanBuildFrom[This, B, That]): That = {
diff --git a/src/library/scala/collection/TraversableViewLike.scala b/src/library/scala/collection/TraversableViewLike.scala
index 84c33296db..7f4d0ebd71 100644
--- a/src/library/scala/collection/TraversableViewLike.scala
+++ b/src/library/scala/collection/TraversableViewLike.scala
@@ -33,7 +33,9 @@ import TraversableView.NoBuilder
trait TraversableViewLike[+A,
+Coll,
+This <: TraversableView[A, Coll] with TraversableViewLike[A, Coll, This]]
- extends Traversable[A] with TraversableLike[A, This] {
+ extends Traversable[A]
+ with TraversableLike[A, This]
+ with views.TraversableTransformations[A, Coll, This] {
self =>
override protected[this] def newBuilder: Builder[A, This] =
@@ -41,16 +43,16 @@ self =>
protected def underlying: Coll
+ trait Transformed[+B] extends views.TraversableLike[B, Coll] {
+ lazy val underlying = self.underlying
+ }
+
def force[B >: A, That](implicit bf: CanBuildFrom[Coll, B, That]) = {
val b = bf(underlying)
b ++= this
b.result()
}
- trait Transformed[+B] extends TraversableView[B, Coll] {
- lazy val underlying = self.underlying
- }
-
/** pre: from >= 0
*/
trait Sliced extends Transformed[A] {
@@ -131,17 +133,6 @@ self =>
override def stringPrefix = self.stringPrefix+"D"
}
- /** Boilerplate method, to override in each subclass
- * This method could be eliminated if Scala had virtual classes
- */
- protected def newAppended[B >: A](that: Traversable[B]): Transformed[B] = new Appended[B] { val rest = that }
- protected def newMapped[B](f: A => B): Transformed[B] = new Mapped[B] { val mapping = f }
- protected def newFlatMapped[B](f: A => Traversable[B]): Transformed[B] = new FlatMapped[B] { val mapping = f }
- protected def newFiltered(p: A => Boolean): Transformed[A] = new Filtered { val pred = p }
- protected def newSliced(_from: Int, _until: Int): Transformed[A] = new Sliced { val from = _from; val until = _until }
- protected def newDroppedWhile(p: A => Boolean): Transformed[A] = new DroppedWhile { val pred = p }
- protected def newTakenWhile(p: A => Boolean): Transformed[A] = new TakenWhile { val pred = p }
-
override def ++[B >: A, That](that: Traversable[B])(implicit bf: CanBuildFrom[This, B, That]): That = {
newAppended(that).asInstanceOf[That]
// was: if (bf.isInstanceOf[ByPassCanBuildFrom]) newAppended(that).asInstanceOf[That]
diff --git a/src/library/scala/collection/immutable/StringOps.scala b/src/library/scala/collection/immutable/StringOps.scala
index 9138c2bbac..db6479742b 100644
--- a/src/library/scala/collection/immutable/StringOps.scala
+++ b/src/library/scala/collection/immutable/StringOps.scala
@@ -17,7 +17,7 @@ import mutable.StringBuilder
/**
* @since 2.8
*/
-class StringOps(override val repr: String) extends StringLike[String] {
+final class StringOps(override val repr: String) extends StringLike[String] {
override protected[this] def thisCollection: WrappedString = new WrappedString(repr)
override protected[this] def toCollection(repr: String): WrappedString = new WrappedString(repr)
diff --git a/src/library/scala/collection/mutable/ArrayBuffer.scala b/src/library/scala/collection/mutable/ArrayBuffer.scala
index c60cbcd68c..c88b9d3374 100644
--- a/src/library/scala/collection/mutable/ArrayBuffer.scala
+++ b/src/library/scala/collection/mutable/ArrayBuffer.scala
@@ -132,7 +132,7 @@ class ArrayBuffer[A](override protected val initialSize: Int)
require(count >= 0, "removing negative number of elements")
if (n < 0 || n > size0 - count) throw new IndexOutOfBoundsException(n.toString)
copy(n + count, n, size0 - (n + count))
- size0 -= count
+ reduceToSize(size0 - count)
}
/** Removes the element on a given index position
diff --git a/src/library/scala/collection/mutable/IndexedSeqView.scala b/src/library/scala/collection/mutable/IndexedSeqView.scala
index e864845455..db1735b543 100644
--- a/src/library/scala/collection/mutable/IndexedSeqView.scala
+++ b/src/library/scala/collection/mutable/IndexedSeqView.scala
@@ -13,7 +13,6 @@ package scala.collection
package mutable
import generic._
-
import TraversableView.NoBuilder
/** A non-strict view of a mutable IndexedSeq.
@@ -30,9 +29,7 @@ self =>
def update(idx: Int, elem: A)
- trait Transformed[B] extends IndexedSeqView[B, Coll] with super.Transformed[B] {
- def update(idx: Int, elem: B)
- }
+ trait Transformed[B] extends views.MutableIndexedSeq[B, Coll] with IndexedSeqView[B, Coll] with super.Transformed[B]
trait Sliced extends Transformed[A] with super.Sliced {
override def update(idx: Int, elem: A) =
diff --git a/src/library/scala/collection/mutable/MultiMap.scala b/src/library/scala/collection/mutable/MultiMap.scala
index 8279bd53f8..e335500349 100644
--- a/src/library/scala/collection/mutable/MultiMap.scala
+++ b/src/library/scala/collection/mutable/MultiMap.scala
@@ -40,7 +40,9 @@ trait MultiMap[A, B] extends Map[A, Set[B]] {
def removeBinding(key: A, value: B): this.type = {
get(key) match {
case None =>
- case Some(set) => set -= value
+ case Some(set) =>
+ set -= value
+ if (set.isEmpty) this -= key
}
this
}
diff --git a/src/library/scala/collection/views/Transformed.scala b/src/library/scala/collection/views/Transformed.scala
new file mode 100644
index 0000000000..189ca127c8
--- /dev/null
+++ b/src/library/scala/collection/views/Transformed.scala
@@ -0,0 +1,128 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+// $Id$
+
+
+package scala.collection
+package views
+
+import generic.CanBuildFrom
+
+/** These classes act as accumulators for the majority of methods in the
+ * collections hierarchy. By creating abstract classes rather than using
+ * the traits exclusively, we avoid creating forwarders in dozens of distinct
+ * anonymous classes and reduce the size of scala-library.jar by over 200K.
+ */
+private[collection] trait Transformed
+private[collection] abstract class TraversableLike[+B, +Coll] extends TraversableView[B, Coll] with Transformed {
+ override def foreach[C](f: B => C): Unit
+}
+private[collection] abstract class IterableLike[+B, +Coll] extends TraversableLike[B, Coll] with IterableView[B, Coll] {
+ override def iterator: Iterator[B]
+}
+private[collection] abstract class SeqLike[+B, +Coll] extends IterableLike[B, Coll] with SeqView[B, Coll] {
+ override def length: Int
+ override def apply(idx: Int): B
+}
+private[collection] abstract class IndexedSeqLike[+B, +Coll] extends SeqLike[B, Coll] with IndexedSeqView[B, Coll] {
+ /** Override to use IndexedSeq's foreach; todo: see whether this is really faster */
+ override def foreach[U](f: B => U) = super[IndexedSeqView].foreach(f)
+}
+private[collection] abstract class MutableIndexedSeq[B, +Coll] extends IndexedSeqLike[B, Coll] {
+ def update(idx: Int, elem: B)
+}
+
+/** The boilerplate in the following traits factored out of the *ViewLike classes
+ * to reduce noise. It exists only to specialize the return type of each method.
+ * It would be unnecessary if scala had virtual classes because the inner classes
+ * of subtraits would subclass the parent trait inner classes, and the same method
+ * would then suffice for both.
+ */
+private[collection] trait TraversableTransformations[+A, +Coll, +This <: TraversableView[A, Coll] with TraversableViewLike[A, Coll, This]] {
+ self: TraversableViewLike[A, Coll, This] =>
+
+ /** Boilerplate methods, to override in each subclass. */
+ protected def newAppended[B >: A](that: Traversable[B]): Transformed[B] = new Appended[B] { val rest = that }
+ protected def newMapped[B](f: A => B): Transformed[B] = new Mapped[B] { val mapping = f }
+ protected def newFlatMapped[B](f: A => Traversable[B]): Transformed[B] = new FlatMapped[B] { val mapping = f }
+ protected def newFiltered(p: A => Boolean): Transformed[A] = new Filtered { val pred = p }
+ protected def newSliced(_from: Int, _until: Int): Transformed[A] = new Sliced { val from = _from; val until = _until }
+ protected def newDroppedWhile(p: A => Boolean): Transformed[A] = new DroppedWhile { val pred = p }
+ protected def newTakenWhile(p: A => Boolean): Transformed[A] = new TakenWhile { val pred = p }
+}
+
+private[collection] trait IterableTransformations[+A, +Coll, +This <: IterableView[A, Coll] with IterableViewLike[A, Coll, This]]
+ extends TraversableTransformations[A, Coll, This]
+{
+ self: IterableViewLike[A, Coll, This] =>
+
+ /** Inherited from TraversableView */
+ protected override def newAppended[B >: A](that: Traversable[B]): Transformed[B] = new Appended[B] { val rest = that }
+ protected override def newMapped[B](f: A => B): Transformed[B] = new Mapped[B] { val mapping = f }
+ protected override def newFlatMapped[B](f: A => Traversable[B]): Transformed[B] = new FlatMapped[B] { val mapping = f }
+ protected override def newFiltered(p: A => Boolean): Transformed[A] = new Filtered { val pred = p }
+ protected override def newSliced(_from: Int, _until: Int): Transformed[A] = new Sliced { val from = _from; val until = _until }
+ protected override def newDroppedWhile(p: A => Boolean): Transformed[A] = new DroppedWhile { val pred = p }
+ protected override def newTakenWhile(p: A => Boolean): Transformed[A] = new TakenWhile { val pred = p }
+
+ /** IterableView boilerplate contribution */
+ protected def newZipped[B](that: Iterable[B]): Transformed[(A, B)] = new Zipped[B] {
+ val other = that
+ }
+ protected def newZippedAll[A1 >: A, B](that: Iterable[B], _thisElem: A1, _thatElem: B): Transformed[(A1, B)] = new ZippedAll[A1, B] {
+ val other: Iterable[B] = that
+ val thisElem = _thisElem
+ val thatElem = _thatElem
+ }
+}
+
+private[collection] trait SeqTransformations[+A, +Coll, +This <: SeqView[A, Coll] with SeqViewLike[A, Coll, This]]
+ extends IterableTransformations[A, Coll, This]
+{
+ self: SeqViewLike[A, Coll, This] =>
+
+ /** Inherited from IterableView */
+ protected override def newAppended[B >: A](that: Traversable[B]): Transformed[B] = new Appended[B] { val rest = that }
+ protected override def newMapped[B](f: A => B): Transformed[B] = new Mapped[B] { val mapping = f }
+ protected override def newFlatMapped[B](f: A => Traversable[B]): Transformed[B] = new FlatMapped[B] { val mapping = f }
+ protected override def newFiltered(p: A => Boolean): Transformed[A] = new Filtered { val pred = p }
+ protected override def newSliced(_from: Int, _until: Int): Transformed[A] = new Sliced { val from = _from; val until = _until }
+ protected override def newDroppedWhile(p: A => Boolean): Transformed[A] = new DroppedWhile { val pred = p }
+ protected override def newTakenWhile(p: A => Boolean): Transformed[A] = new TakenWhile { val pred = p }
+ protected override def newZipped[B](that: Iterable[B]): Transformed[(A, B)] = new Zipped[B] { val other = that }
+ protected override def newZippedAll[A1 >: A, B](that: Iterable[B], _thisElem: A1, _thatElem: B): Transformed[(A1, B)] =
+ new ZippedAll[A1, B] { val other = that; val thisElem = _thisElem; val thatElem = _thatElem }
+
+ /** SeqView boilerplate contribution */
+ protected def newReversed: Transformed[A] = new Reversed { }
+ protected def newPatched[B >: A](_from: Int, _patch: Seq[B], _replaced: Int): Transformed[B] =
+ new Patched[B] { val from = _from; val patch = _patch; val replaced = _replaced }
+}
+
+private[collection] trait IndexedSeqTransformations[+A, +Coll, +This <: IndexedSeqView[A, Coll] with IndexedSeqViewLike[A, Coll, This]]
+ extends SeqTransformations[A, Coll, This]
+{
+ self: IndexedSeqViewLike[A, Coll, This] =>
+
+ /** Inherited from SeqView */
+ protected override def newAppended[B >: A](that: Traversable[B]): Transformed[B] = new Appended[B] { val rest = that }
+ protected override def newMapped[B](f: A => B): Transformed[B] = new Mapped[B] { val mapping = f }
+ protected override def newFlatMapped[B](f: A => Traversable[B]): Transformed[B] = new FlatMapped[B] { val mapping = f }
+ protected override def newFiltered(p: A => Boolean): Transformed[A] = new Filtered { val pred = p }
+ protected override def newSliced(_from: Int, _until: Int): Transformed[A] = new Sliced { val from = _from; val until = _until }
+ protected override def newDroppedWhile(p: A => Boolean): Transformed[A] = new DroppedWhile { val pred = p }
+ protected override def newTakenWhile(p: A => Boolean): Transformed[A] = new TakenWhile { val pred = p }
+
+ protected override def newZipped[B](that: Iterable[B]): Transformed[(A, B)] = new Zipped[B] { val other = that }
+ protected override def newZippedAll[A1 >: A, B](that: Iterable[B], _thisElem: A1, _thatElem: B): Transformed[(A1, B)] =
+ new ZippedAll[A1, B] { val other = that; val thisElem = _thisElem; val thatElem = _thatElem }
+ protected override def newReversed: Transformed[A] = new Reversed { }
+ protected override def newPatched[B >: A](_from: Int, _patch: Seq[B], _replaced: Int): Transformed[B] =
+ new Patched[B] { val from = _from; val patch = _patch; val replaced = _replaced }
+}
diff --git a/src/library/scala/ref/PhantomReference.scala b/src/library/scala/ref/PhantomReference.scala
index 7ba26389ff..73746a948f 100644
--- a/src/library/scala/ref/PhantomReference.scala
+++ b/src/library/scala/ref/PhantomReference.scala
@@ -15,7 +15,11 @@ package scala.ref
*/
class PhantomReference[+T <: AnyRef](value: T, queue: ReferenceQueue[T]) extends ReferenceWrapper[T] {
val underlying: java.lang.ref.PhantomReference[_ <: T] =
- new java.lang.ref.PhantomReference[T](value, queue.underlying.asInstanceOf[java.lang.ref.ReferenceQueue[T]])
- queue.register(this)
+ new PhantomReferenceWithWrapper[T](value, queue, this)
}
+/**
+ * @author Philipp Haller
+ */
+private class PhantomReferenceWithWrapper[T <: AnyRef](value: T, queue: ReferenceQueue[T], val wrapper: PhantomReference[T])
+ extends java.lang.ref.PhantomReference[T](value, queue.underlying.asInstanceOf[java.lang.ref.ReferenceQueue[T]]) with ReferenceWithWrapper[T]
diff --git a/src/library/scala/ref/ReferenceQueue.scala b/src/library/scala/ref/ReferenceQueue.scala
index c7fae4eb79..5ef74bbc95 100644
--- a/src/library/scala/ref/ReferenceQueue.scala
+++ b/src/library/scala/ref/ReferenceQueue.scala
@@ -10,30 +10,23 @@
package scala.ref
-import scala.collection.mutable.HashMap
-
/**
- * @author Sean McDirmid, Philipp Haller
+ * @author Sean McDirmid
+ * @author Philipp Haller
*/
class ReferenceQueue[+T <: AnyRef] {
+
private[ref] val underlying: java.lang.ref.ReferenceQueue[_ <: T] = new java.lang.ref.ReferenceQueue[T]
override def toString = underlying.toString
- protected def Wrapper(jref: java.lang.ref.Reference[_]) = jref match {
- case null => None
- case ref =>
- val refWrapper = wrappers(ref)
- wrappers -= ref
- Some(refWrapper.asInstanceOf[Reference[T]])
- }
+ protected def Wrapper(jref: java.lang.ref.Reference[_]): Option[Reference[T]] =
+ jref match {
+ case null => None
+ case ref => Some(ref.asInstanceOf[ReferenceWithWrapper[T]].wrapper)
+ }
def poll: Option[Reference[T]] = Wrapper(underlying.poll)
def remove: Option[Reference[T]] = Wrapper(underlying.remove)
def remove(timeout: Long): Option[Reference[T]] = Wrapper(underlying.remove(timeout))
- protected val wrappers = new HashMap[java.lang.ref.Reference[_],
- ReferenceWrapper[_ <: AnyRef]]
- def register(ref: ReferenceWrapper[_ <: AnyRef]) {
- wrappers += ((ref.underlying, ref))
- }
}
diff --git a/src/library/scala/ref/ReferenceWrapper.scala b/src/library/scala/ref/ReferenceWrapper.scala
index 7c3e8686e4..5661737053 100644
--- a/src/library/scala/ref/ReferenceWrapper.scala
+++ b/src/library/scala/ref/ReferenceWrapper.scala
@@ -30,3 +30,10 @@ trait ReferenceWrapper[+T <: AnyRef] extends Reference[T] with Proxy {
def self = underlying
}
+
+/**
+ * @author Philipp Haller
+ */
+private trait ReferenceWithWrapper[T <: AnyRef] {
+ val wrapper: ReferenceWrapper[T]
+}
diff --git a/src/library/scala/ref/SoftReference.scala b/src/library/scala/ref/SoftReference.scala
index 5d4d470c6a..1026a85023 100644
--- a/src/library/scala/ref/SoftReference.scala
+++ b/src/library/scala/ref/SoftReference.scala
@@ -16,8 +16,11 @@ package scala.ref
class SoftReference[+T <: AnyRef](value : T, queue : ReferenceQueue[T]) extends ReferenceWrapper[T] {
def this(value : T) = this(value, null);
val underlying: java.lang.ref.SoftReference[_ <: T] =
- if (queue == null) new java.lang.ref.SoftReference[T](value);
- else new java.lang.ref.SoftReference[T](value, queue.underlying.asInstanceOf[java.lang.ref.ReferenceQueue[T]])
- if (queue != null)
- queue.register(this)
+ new SoftReferenceWithWrapper[T](value, queue, this)
}
+
+/**
+ * @author Philipp Haller
+ */
+private class SoftReferenceWithWrapper[T <: AnyRef](value: T, queue: ReferenceQueue[T], val wrapper: SoftReference[T])
+ extends java.lang.ref.SoftReference[T](value, if (queue == null) null else queue.underlying.asInstanceOf[java.lang.ref.ReferenceQueue[T]]) with ReferenceWithWrapper[T]
diff --git a/src/library/scala/ref/WeakReference.scala b/src/library/scala/ref/WeakReference.scala
index 661b2db60a..e6386d9f24 100644
--- a/src/library/scala/ref/WeakReference.scala
+++ b/src/library/scala/ref/WeakReference.scala
@@ -16,8 +16,11 @@ package scala.ref
class WeakReference[+T <: AnyRef](value: T, queue: ReferenceQueue[T]) extends ReferenceWrapper[T] {
def this(value: T) = this(value, null)
val underlying: java.lang.ref.WeakReference[_ <: T] =
- if (queue == null) new java.lang.ref.WeakReference[T](value)
- else new java.lang.ref.WeakReference[T](value, queue.underlying.asInstanceOf[java.lang.ref.ReferenceQueue[T]])
- if (queue != null)
- queue.register(this)
+ new WeakReferenceWithWrapper[T](value, queue, this)
}
+
+/**
+ * @author Philipp Haller
+ */
+private class WeakReferenceWithWrapper[T <: AnyRef](value: T, queue: ReferenceQueue[T], val wrapper: WeakReference[T])
+ extends java.lang.ref.WeakReference[T](value, if (queue == null) null else queue.underlying.asInstanceOf[java.lang.ref.ReferenceQueue[T]]) with ReferenceWithWrapper[T]
diff --git a/src/library/scala/xml/factory/XMLLoader.scala b/src/library/scala/xml/factory/XMLLoader.scala
index b167029907..a1bca21b40 100644
--- a/src/library/scala/xml/factory/XMLLoader.scala
+++ b/src/library/scala/xml/factory/XMLLoader.scala
@@ -15,6 +15,7 @@ import parsing.{ FactoryAdapter, NoBindingFactoryAdapter }
import org.xml.sax.InputSource
import java.io.{ InputStream, Reader, StringReader, File, FileDescriptor, FileInputStream }
import javax.xml.parsers.{ SAXParser, SAXParserFactory }
+import java.net.URL
/** Presents collection of XML loading methods which use the parser
* created by "def parser".
@@ -53,11 +54,12 @@ trait XMLLoader[T <: Node]
def loadFile(fd: FileDescriptor): T = loadXML(fromFile(fd), parser)
def loadFile(name: String): T = loadXML(fromFile(name), parser)
- /** loads XML from given InputStream, Reader, sysID, or InputSource. */
+ /** loads XML from given InputStream, Reader, sysID, InputSource, or URL. */
def load(is: InputStream): T = loadXML(fromInputStream(is), parser)
def load(reader: Reader): T = loadXML(fromReader(reader), parser)
def load(sysID: String): T = loadXML(fromSysId(sysID), parser)
def load(source: InputSource): T = loadXML(source, parser)
+ def load(url: URL): T = loadXML(fromInputStream(url.openStream()), parser)
/** Loads XML from the given String. */
def loadString(string: String): T = loadXML(fromString(string), parser)
diff --git a/src/library/scala/xml/parsing/MarkupParser.scala b/src/library/scala/xml/parsing/MarkupParser.scala
index ff19bd3ed5..2779fe1d7c 100644
--- a/src/library/scala/xml/parsing/MarkupParser.scala
+++ b/src/library/scala/xml/parsing/MarkupParser.scala
@@ -581,7 +581,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
val pos = this.pos
val (qname, aMap, scope) = xTag(pscope)
val (pre, local) = Utility.prefix(qname) match {
- case Some(p) => (p, qname drop p.length)
+ case Some(p) => (p, qname drop p.length+1)
case _ => (null, qname)
}
val ts = {
diff --git a/src/partest/scala/tools/partest/PartestTask.scala b/src/partest/scala/tools/partest/PartestTask.scala
index a5eb991cf7..59781b0aa2 100644
--- a/src/partest/scala/tools/partest/PartestTask.scala
+++ b/src/partest/scala/tools/partest/PartestTask.scala
@@ -145,7 +145,7 @@ class PartestTask extends Task {
Array()
private def getPosFiles = getFilesAndDirs(posFiles)
- private def getNegFiles = getFiles(negFiles)
+ private def getNegFiles = getFilesAndDirs(negFiles)
private def getRunFiles = getFiles(runFiles)
private def getJvmFiles = getFilesAndDirs(jvmFiles)
private def getResidentFiles = getFiles(residentFiles)