summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMartin Odersky <odersky@gmail.com>2011-05-09 08:14:40 +0000
committerMartin Odersky <odersky@gmail.com>2011-05-09 08:14:40 +0000
commite6c4b6b3e5515ee1d82b41bf03449b4b947f73b9 (patch)
treea3ee33bf49d110cf16e7049ac424700561e3ff6d
parentc3d61ced61841d2d02c53be47349ebef92ea41d3 (diff)
downloadscala-e6c4b6b3e5515ee1d82b41bf03449b4b947f73b9.tar.gz
scala-e6c4b6b3e5515ee1d82b41bf03449b4b947f73b9.tar.bz2
scala-e6c4b6b3e5515ee1d82b41bf03449b4b947f73b9.zip
Made 2.8 branch compatible with new presentatio...
Made 2.8 branch compatible with new presentation compiler
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala15
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala139
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Scanners.scala37
-rw-r--r--src/compiler/scala/tools/nsc/interactive/BuildManager.scala4
-rw-r--r--src/compiler/scala/tools/nsc/interactive/CompilerControl.scala312
-rw-r--r--src/compiler/scala/tools/nsc/interactive/Global.scala848
-rw-r--r--src/compiler/scala/tools/nsc/interactive/InteractiveReporter.scala43
-rw-r--r--src/compiler/scala/tools/nsc/interactive/Picklers.scala175
-rw-r--r--src/compiler/scala/tools/nsc/interactive/PresentationCompilerThread.scala55
-rw-r--r--src/compiler/scala/tools/nsc/interactive/REPL.scala27
-rw-r--r--src/compiler/scala/tools/nsc/interactive/RangePositions.scala36
-rw-r--r--src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala15
-rw-r--r--src/compiler/scala/tools/nsc/interactive/Response.scala6
-rw-r--r--src/compiler/scala/tools/nsc/interactive/RichCompilationUnits.scala14
-rw-r--r--src/compiler/scala/tools/nsc/io/Lexer.scala301
-rw-r--r--src/compiler/scala/tools/nsc/io/Pickler.scala455
-rw-r--r--src/compiler/scala/tools/nsc/io/PrettyWriter.scala41
-rw-r--r--src/compiler/scala/tools/nsc/io/Replayer.scala74
-rw-r--r--src/compiler/scala/tools/nsc/reporters/Reporter.scala17
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaSettings.scala9
-rw-r--r--src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala118
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala119
-rw-r--r--src/compiler/scala/tools/nsc/symtab/Symbols.scala60
-rw-r--r--src/compiler/scala/tools/nsc/symtab/Types.scala92
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Analyzer.scala12
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala39
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala108
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala9
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala103
-rw-r--r--src/compiler/scala/tools/nsc/util/MultiHashMap.scala10
-rw-r--r--src/compiler/scala/tools/nsc/util/Position.scala16
-rw-r--r--src/compiler/scala/tools/nsc/util/SourceFile.scala8
-rw-r--r--src/compiler/scala/tools/nsc/util/WorkScheduler.scala12
-rw-r--r--test/files/neg/bug1286.check5
-rw-r--r--test/files/neg/names-defaults-neg.check22
37 files changed, 2701 insertions, 661 deletions
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index 94d3245d7a..ef3f596d93 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -124,6 +124,10 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
// ------------ Hooks for interactive mode-------------------------
+ /** Called from parser, which signals hereby that a method definition has been parsed.
+ */
+ def signalParseProgress(pos: Position) {}
+
/** Called every time an AST node is successfully typechecked in typerPhase.
*/
def signalDone(context: analyzer.Context, old: Tree, result: Tree) {}
@@ -573,6 +577,14 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
// ----------- Runs ---------------------------------------
+ /** Remove the current run when not needed anymore. Used by the build
+ * manager to save on the memory foot print. The current run holds on
+ * to all compilation units, which in turn hold on to trees.
+ */
+ private [nsc] def dropRun() {
+ curRun = null
+ }
+
private var curRun: Run = null
private var curRunId = 0
@@ -982,6 +994,9 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
def forJVM : Boolean = settings.target.value startsWith "jvm"
def forMSIL: Boolean = settings.target.value == "msil"
+ def forInteractive = onlyPresentation
+ def forScaladoc = onlyPresentation
+ @deprecated("Use forInteractive or forScaladoc, depending on what you're after")
def onlyPresentation = false
def createJavadoc = false
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
index 60df523d92..cd68d4aa32 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
@@ -48,7 +48,7 @@ trait MarkupParsers
import global._
- class MarkupParser(parser: UnitParser, final val preserveWS: Boolean) extends scala.xml.parsing.MarkupParserCommon {
+ class MarkupParser(parser: SourceFileParser, final val preserveWS: Boolean) extends scala.xml.parsing.MarkupParserCommon {
import Tokens.{ EMPTY, LBRACE, RBRACE }
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index 61d45114a9..6d35ef4199 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2010 LAMP/EPFL
+ * Copyright 2005-2011 LAMP/EPFL
* @author Martin Odersky
*/
//todo: allow infix type patterns
@@ -9,7 +9,7 @@ package scala.tools.nsc
package ast.parser
import scala.collection.mutable.ListBuffer
-import util.{ OffsetPosition, BatchSourceFile }
+import util.{ SourceFile, OffsetPosition, FreshNameCreator }
import symtab.Flags
import Tokens._
@@ -59,80 +59,121 @@ self =>
case class OpInfo(operand: Tree, operator: Name, offset: Offset)
- class UnitParser(val unit: global.CompilationUnit, patches: List[BracePatch]) extends Parser {
-
- def this(unit: global.CompilationUnit) = this(unit, List())
+ class SourceFileParser(val source: SourceFile) extends Parser {
/** The parse starting point depends on whether the source file is self-contained:
* if not, the AST will be supplemented.
*/
def parseStartRule =
- if (unit.source.isSelfContained) () => compilationUnit()
+ if (source.isSelfContained) () => compilationUnit()
else () => scriptBody()
- val in = new UnitScanner(unit, patches)
+ def newScanner = new SourceFileScanner(source)
+
+ val in = newScanner
in.init()
- def freshName(pos: Position, prefix: String): Name =
- unit.fresh.newName(pos, prefix)
+ private val globalFresh = new FreshNameCreator.Default
+
+ override def freshName(pos: Position, prefix: String): Name = newTermName(globalFresh.newName(prefix))
+
+ def o2p(offset: Int): Position = new OffsetPosition(source, offset)
+ def r2p(start: Int, mid: Int, end: Int): Position = rangePos(source, start, mid, end)
+
+ // suppress warnings; silent abort on errors
+ def warning(offset: Int, msg: String) {}
+ def deprecationWarning(offset: Int, msg: String) {}
+
+ def syntaxError(offset: Int, msg: String): Unit = throw new MalformedInput(offset, msg)
+ def incompleteInputError(msg: String): Unit = throw new MalformedInput(source.content.length - 1, msg)
+
+ /** the markup parser */
+ lazy val xmlp = new MarkupParser(this, true)
+
+ object symbXMLBuilder extends SymbolicXMLBuilder(this, true) { // DEBUG choices
+ val global: self.global.type = self.global
+ def freshName(prefix: String): Name = SourceFileParser.this.freshName(prefix)
+ }
+
+ def xmlLiteral : Tree = xmlp.xLiteral
+ def xmlLiteralPattern : Tree = xmlp.xLiteralPattern
+ }
+
+ class OutlineParser(source: SourceFile) extends SourceFileParser(source) {
+
+ def skipBraces[T](body: T): T = {
+ accept(LBRACE)
+ var openBraces = 1
+ while (in.token != EOF && openBraces > 0) {
+ if (in.token == XMLSTART) xmlLiteral()
+ else {
+ if (in.token == LBRACE) openBraces += 1
+ else if (in.token == RBRACE) openBraces -= 1
+ in.nextToken()
+ }
+ }
+ body
+ }
+
+ override def blockExpr(): Tree = skipBraces(EmptyTree)
+
+ override def templateBody(isPre: Boolean) = skipBraces(emptyValDef, List(EmptyTree))
+ }
+
+ class UnitParser(val unit: global.CompilationUnit, patches: List[BracePatch]) extends SourceFileParser(unit.source) {
+
+ def this(unit: global.CompilationUnit) = this(unit, List())
+
+ override def newScanner = new UnitScanner(unit, patches)
- def o2p(offset: Int): Position = new OffsetPosition(unit.source,offset)
- def r2p(start: Int, mid: Int, end: Int): Position = rangePos(unit.source, start, mid, end)
- def warning(offset: Int, msg: String) { unit.warning(o2p(offset), msg) }
- def deprecationWarning(offset: Int,
- msg: String) {
+
+ override def warning(offset: Int, msg: String) {
+ unit.warning(o2p(offset), msg)
+ }
+
+ override def deprecationWarning(offset: Int, msg: String) {
unit.deprecationWarning(o2p(offset), msg)
}
- var smartParsing = false
+ private var smartParsing = false
+ private def withSmartParsing[T](body: => T): T = {
+ val saved = smartParsing
+ try {
+ smartParsing = true
+ body
+ }
+ finally smartParsing = saved // false
+ }
val syntaxErrors = new ListBuffer[(Int, String)]
+ def showSyntaxErrors() =
+ for ((offset, msg) <- syntaxErrors)
+ unit.error(o2p(offset), msg)
- def incompleteInputError(msg: String) {
- val offset = unit.source.asInstanceOf[BatchSourceFile].content.length - 1
+ override def syntaxError(offset: Int, msg: String) {
if (smartParsing) syntaxErrors += ((offset, msg))
- else unit.incompleteInputError(o2p(offset), msg)
+ else unit.error(o2p(offset), msg)
}
- def syntaxError(offset: Int, msg: String) {
+ override def incompleteInputError(msg: String) {
+ val offset = source.content.length - 1
if (smartParsing) syntaxErrors += ((offset, msg))
- else unit.error(o2p(offset), msg)
+ else unit.incompleteInputError(o2p(offset), msg)
}
/** parse unit. If there are inbalanced braces,
* try to correct them and reparse.
*/
- def smartParse(): Tree = try {
- smartParsing = true
+ def smartParse(): Tree = withSmartParsing {
val firstTry = parse()
if (syntaxErrors.isEmpty) firstTry
- else {
- val patches = in.healBraces()
- if (patches.isEmpty) {
- for ((offset, msg) <- syntaxErrors) unit.error(o2p(offset), msg)
- firstTry
- } else {
-// println(patches)
- new UnitParser(unit, patches).parse()
- }
+ else in.healBraces() match {
+ case Nil => showSyntaxErrors() ; firstTry
+ case patches => new UnitParser(unit, patches).parse()
}
- } finally {
- smartParsing = false
- }
-
- /** the markup parser */
- lazy val xmlp = new MarkupParser(this, true)
-
- object symbXMLBuilder extends SymbolicXMLBuilder(this, true) { // DEBUG choices
- val global: self.global.type = self.global
- def freshName(prefix: String): Name = UnitParser.this.freshName(prefix)
}
-
- def xmlLiteral : Tree = xmlp.xLiteral
-
- def xmlLiteralPattern : Tree = xmlp.xLiteralPattern
}
final val Local = 0
@@ -1456,7 +1497,9 @@ self =>
if (tok == EQUALS && eqOK) in.nextToken()
else accept(LARROW)
val rhs = expr()
- enums += makeGenerator(r2p(start, point, in.lastOffset), pat, tok == EQUALS, rhs)
+ enums += makeGenerator(r2p(start, point, in.lastOffset max start), pat, tok == EQUALS, rhs)
+ // why max above? IDE stress tests have shown that lastOffset could be less than start,
+ // I guess this happens if instead if a for-expression we sit on a closing paren.
while (in.token == IF) enums += makeFilter(in.offset, guard())
}
@@ -2201,7 +2244,7 @@ self =>
var newmods = mods
val nameOffset = in.offset
val name = ident()
- atPos(start, if (name == nme.ERROR) start else nameOffset) {
+ val result = atPos(start, if (name == nme.ERROR) start else nameOffset) {
// contextBoundBuf is for context bounded type parameters of the form
// [T : B] or [T : => B]; it contains the equivalent implicit parameter type,
// i.e. (B[T] or T => B)
@@ -2229,6 +2272,8 @@ self =>
}
DefDef(newmods, name, tparams, vparamss, restype, rhs)
}
+ signalParseProgress(result.pos)
+ result
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
index 445d869681..4605beb5cf 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
@@ -58,7 +58,9 @@ trait Scanners {
def resume(lastCode: Int) = {
token = lastCode
- assert(next.token == EMPTY || reporter.hasErrors)
+ if (next.token != EMPTY && !reporter.hasErrors)
+ syntaxError("unexpected end of input: possible missing '}' in XML block")
+
nextToken()
}
@@ -242,7 +244,8 @@ trait Scanners {
// println("blank line found at "+lastOffset+":"+(lastOffset to idx).map(buf(_)).toList)
return true
}
- } while (idx < end && ch <= ' ')
+ if (idx == end) return false
+ } while (ch <= ' ')
}
idx += 1; ch = buf(idx)
}
@@ -783,7 +786,7 @@ trait Scanners {
/** Backquoted idents like 22.`foo`. */
case '`' =>
- return setStrVal() /** Note the early return **/
+ return setStrVal() /** Note the early return */
/** These letters may be part of a literal, or a method invocation on an Int */
case 'd' | 'D' | 'f' | 'F' =>
@@ -1011,17 +1014,31 @@ trait Scanners {
else "'<" + token + ">'"
}
+ class MalformedInput(val offset: Int, val msg: String) extends Exception
+
+ /** A scanner for a given source file not necessarily attached to a compilation unit.
+ * Useful for looking inside source files that aren not currently compiled to see what's there
+ */
+ class SourceFileScanner(val source: SourceFile) extends Scanner {
+ val buf = source.content
+ override val decodeUni: Boolean = !settings.nouescape.value
+
+ // suppress warnings, throw exception on errors
+ def warning(off: Offset, msg: String): Unit = {}
+ def deprecationWarning(off: Offset, msg: String) = {}
+ def error (off: Offset, msg: String): Unit = throw new MalformedInput(off, msg)
+ def incompleteInputError(off: Offset, msg: String): Unit = throw new MalformedInput(off, msg)
+ }
+
/** A scanner over a given compilation unit
*/
- class UnitScanner(unit: CompilationUnit, patches: List[BracePatch]) extends Scanner {
+ class UnitScanner(unit: CompilationUnit, patches: List[BracePatch]) extends SourceFileScanner(unit.source) {
def this(unit: CompilationUnit) = this(unit, List())
- val buf = unit.source.asInstanceOf[BatchSourceFile].content
- override val decodeUni: Boolean = !settings.nouescape.value
- def warning(off: Offset, msg: String) = unit.warning(unit.position(off), msg)
- def error (off: Offset, msg: String) = unit.error(unit.position(off), msg)
- def incompleteInputError(off: Offset, msg: String) = unit.incompleteInputError(unit.position(off), msg)
- def deprecationWarning(off: Offset, msg: String) = unit.deprecationWarning(unit.position(off), msg)
+ override def warning(off: Offset, msg: String) = unit.warning(unit.position(off), msg)
+ override def deprecationWarning(off: Offset, msg: String) = unit.deprecationWarning(unit.position(off), msg)
+ override def error (off: Offset, msg: String) = unit.error(unit.position(off), msg)
+ override def incompleteInputError(off: Offset, msg: String) = unit.incompleteInputError(unit.position(off), msg)
private var bracePatches: List[BracePatch] = patches
diff --git a/src/compiler/scala/tools/nsc/interactive/BuildManager.scala b/src/compiler/scala/tools/nsc/interactive/BuildManager.scala
index 302bba0e07..2827d97ae2 100644
--- a/src/compiler/scala/tools/nsc/interactive/BuildManager.scala
+++ b/src/compiler/scala/tools/nsc/interactive/BuildManager.scala
@@ -51,7 +51,7 @@ object BuildManagerTest extends EvalLoop {
def prompt = "builder > "
- def error(msg: String) {
+ private def buildError(msg: String) {
println(msg + "\n scalac -help gives more information")
}
@@ -67,7 +67,7 @@ object BuildManagerTest extends EvalLoop {
Set.empty ++ result._1
}
- val settings = new Settings(error)
+ val settings = new Settings(buildError)
settings.Ybuildmanagerdebug.value = true
val command = new CompilerCommand(args.toList, settings)
// settings.make.value = "off"
diff --git a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala b/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
index 1772f6f722..aefdece905 100644
--- a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
+++ b/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
@@ -8,62 +8,73 @@ import scala.tools.nsc.symtab._
import scala.tools.nsc.ast._
/** Interface of interactive compiler to a client such as an IDE
+ * The model the presentation compiler consists of the following parts:
+ *
+ * unitOfFile: The map from sourcefiles to loaded units. A sourcefile/unit is loaded if it occurs in that map.
+ *
+ * manipulated by: removeUnitOf, reloadSources.
+ *
+ * A call to reloadSources will add the given sources to the loaded units, and
+ * start a new background compiler pass to compile all loaded units (with the indicated sources first).
+ * Each background compiler pass has its own typer run.
+ * The background compiler thread can be interrupted each time an AST node is
+ * completely typechecked in the following ways:
+
+ * 1. by a new call to reloadSources. This starts a new background compiler pass with a new typer run.
+ * 2. by a call to askTypeTree. This starts a new typer run if the forceReload parameter = true
+ * 3. by a call to askTypeAt, askTypeCompletion, askScopeCompletion, askToDoFirst, askLinkPos, askLastType.
+ * 4. by raising an exception in the scheduler.
+ * 5. by passing a high-priority action wrapped in ask { ... }.
+ *
+ * Actions under 1-3 can themselves be interrupted if they involve typechecking
+ * AST nodes. High-priority actions under 5 cannot; they always run to completion.
+ * So these high-priority actions should to be short.
+ *
+ * Normally, an interrupted action continues after the interrupting action is finished.
+ * However, if the interrupting action created a new typer run, the interrupted
+ * action is aborted. If there's an outstanding response, it will be set to
+ * a Right value with a FreshRunReq exception.
*/
trait CompilerControl { self: Global =>
- abstract class WorkItem extends (() => Unit)
-
- /** Info given for every member found by completion
- */
- abstract class Member {
- val sym: Symbol
- val tpe: Type
- val accessible: Boolean
- }
-
- case class TypeMember(
- sym: Symbol,
- tpe: Type,
- accessible: Boolean,
- inherited: Boolean,
- viaView: Symbol) extends Member
-
- case class ScopeMember(
- sym: Symbol,
- tpe: Type,
- accessible: Boolean,
- viaImport: Tree) extends Member
+ import syntaxAnalyzer.UnitParser
type Response[T] = scala.tools.nsc.interactive.Response[T]
/** The scheduler by which client and compiler communicate
* Must be initialized before starting compilerRunner
*/
- protected val scheduler = new WorkScheduler
+ protected[interactive] val scheduler = new WorkScheduler
+
+ /** Return the compilation unit attached to a source file, or None
+ * if source is not loaded.
+ */
+ def getUnitOf(s: SourceFile): Option[RichCompilationUnit] = getUnit(s)
+
+ /** Run operation `op` on a compilation unit assocuated with given `source`.
+ * If source has a loaded compilation unit, this one is passed to `op`.
+ * Otherwise a new compilation unit is created, but not added to the set of loaded units.
+ */
+ def onUnitOf[T](source: SourceFile)(op: RichCompilationUnit => T): T =
+ op(unitOfFile.getOrElse(source.file, new RichCompilationUnit(source)))
/** The compilation unit corresponding to a source file
- * if it does not yet exist creat a new one atomically
- */
- def unitOf(s: SourceFile): RichCompilationUnit = unitOfFile.synchronized {
- unitOfFile get s.file match {
- case Some(unit) =>
- unit
- case None =>
- val unit = new RichCompilationUnit(s)
- unitOfFile(s.file) = unit
- unit
- }
- }
+ * if it does not yet exist create a new one atomically
+ * Note: We want to get roid of this operation as it messes compiler invariants.
+ */
+ @deprecated("use getUnitOf(s) or onUnitOf(s) instead")
+ def unitOf(s: SourceFile): RichCompilationUnit = getOrCreateUnitOf(s)
/** The compilation unit corresponding to a position */
- def unitOf(pos: Position): RichCompilationUnit = unitOf(pos.source)
+ @deprecated("use getUnitOf(pos.source) or onUnitOf(pos.source) instead")
+ def unitOf(pos: Position): RichCompilationUnit = getOrCreateUnitOf(pos.source)
- /** Remove the CompilationUnit corresponding to the given SourceFile
+ /** Removes the CompilationUnit corresponding to the given SourceFile
* from consideration for recompilation.
*/
- def removeUnitOf(s: SourceFile) = unitOfFile remove s.file
+ def removeUnitOf(s: SourceFile): Option[RichCompilationUnit] = { toBeRemoved += s.file; unitOfFile get s.file }
- /* returns the top level classes and objects that were deleted
+ /** Returns the top level classes and objects that were deleted
* in the editor since last time recentlyDeleted() was called.
*/
def recentlyDeleted(): List[Symbol] = deletedTopLevelSyms.synchronized {
@@ -73,14 +84,14 @@ trait CompilerControl { self: Global =>
}
/** Locate smallest tree that encloses position
+ * @pre Position must be loaded
*/
- def locateTree(pos: Position): Tree =
- new Locator(pos) locateIn unitOf(pos).body
+ def locateTree(pos: Position): Tree = onUnitOf(pos.source) { unit => new Locator(pos) locateIn unit.body }
/** Locates smallest context that encloses position as an optional value.
*/
def locateContext(pos: Position): Option[Context] =
- locateContext(unitOf(pos).contexts, pos)
+ for (unit <- getUnit(pos.source); cx <- locateContext(unit.contexts, pos)) yield cx
/** Returns the smallest context that contains given `pos`, throws FatalError if none exists.
*/
@@ -88,70 +99,203 @@ trait CompilerControl { self: Global =>
throw new FatalError("no context found for "+pos)
}
- /** Make sure a set of compilation units is loaded and parsed.
- * Return () to syncvar `result` on completion.
+ /** Makes sure a set of compilation units is loaded and parsed.
+ * Returns () to syncvar `response` on completions.
+ * Afterwards a new background compiler run is started with
+ * the given sources at the head of the list of to-be-compiled sources.
*/
- def askReload(sources: List[SourceFile], result: Response[Unit]) =
- scheduler postWorkItem new WorkItem {
- def apply() = reload(sources, result)
- override def toString = "reload "+sources
+ def askReload(sources: List[SourceFile], response: Response[Unit]) = {
+ val superseeded = scheduler.dequeueAll {
+ case ri: ReloadItem if ri.sources == sources => Some(ri)
+ case _ => None
}
+ superseeded foreach (_.response.set())
+ scheduler postWorkItem new ReloadItem(sources, response)
+ }
- /** Set sync var `result` to the smallest fully attributed tree that encloses position `pos`.
+ /** Sets sync var `response` to the smallest fully attributed tree that encloses position `pos`.
+ * Note: Unlike for most other ask... operations, the source file belonging to `pos` needs not be be loaded.
*/
- def askTypeAt(pos: Position, result: Response[Tree]) =
- scheduler postWorkItem new WorkItem {
- def apply() = self.getTypedTreeAt(pos, result)
- override def toString = "typeat "+pos.source+" "+pos.show
- }
+ def askTypeAt(pos: Position, response: Response[Tree]) =
+ scheduler postWorkItem new AskTypeAtItem(pos, response)
- /** Set sync var `result` to the fully attributed & typechecked tree contained in `source`.
+ /** Sets sync var `response` to the fully attributed & typechecked tree contained in `source`.
+ * @pre `source` needs to be loaded.
*/
- def askType(source: SourceFile, forceReload: Boolean, result: Response[Tree]) =
- scheduler postWorkItem new WorkItem {
- def apply() = self.getTypedTree(source, forceReload, result)
- override def toString = "typecheck"
+ def askType(source: SourceFile, forceReload: Boolean, response: Response[Tree]) = {
+ if (debugIDE) {
+ println("ask type called")
+ new Exception().printStackTrace()
+ }
+ scheduler postWorkItem new AskTypeItem(source, forceReload, response)
}
- /** Set sync var `result' to list of members that are visible
+ /** Sets sync var `response` to the position of the definition of the given link in
+ * the given sourcefile.
+ *
+ * @param sym The symbol referenced by the link (might come from a classfile)
+ * @param source The source file that's supposed to contain the definition
+ * @param response A response that will be set to the following:
+ * If `source` contains a definition that is referenced by the given link
+ * the position of that definition, otherwise NoPosition.
+ * Note: This operation does not automatically load `source`. If `source`
+ * is unloaded, it stays that way.
+ */
+ def askLinkPos(sym: Symbol, source: SourceFile, response: Response[Position]) =
+ scheduler postWorkItem new AskLinkPosItem(sym, source, response)
+
+ /** Sets sync var `response' to list of members that are visible
* as members of the tree enclosing `pos`, possibly reachable by an implicit.
+ * @pre source is loaded
*/
- def askTypeCompletion(pos: Position, result: Response[List[Member]]) =
- scheduler postWorkItem new WorkItem {
- def apply() = self.getTypeCompletion(pos, result)
- override def toString = "type completion "+pos.source+" "+pos.show
- }
+ def askTypeCompletion(pos: Position, response: Response[List[Member]]) =
+ scheduler postWorkItem new AskTypeCompletionItem(pos, response)
- /** Set sync var `result' to list of members that are visible
+ /** Sets sync var `response' to list of members that are visible
* as members of the scope enclosing `pos`.
+ * @pre source is loaded
*/
- def askScopeCompletion(pos: Position, result: Response[List[Member]]) =
- scheduler postWorkItem new WorkItem {
- def apply() = self.getScopeCompletion(pos, result)
- override def toString = "scope completion "+pos.source+" "+pos.show
- }
+ def askScopeCompletion(pos: Position, response: Response[List[Member]]) =
+ scheduler postWorkItem new AskScopeCompletionItem(pos, response)
- /** Ask to do unit first on present and subsequent type checking passes */
- def askToDoFirst(f: SourceFile) = {
- scheduler postWorkItem new WorkItem {
- def apply() = moveToFront(List(f))
- override def toString = "dofirst "+f
- }
- }
+ /** Asks to do unit corresponding to given source file on present and subsequent type checking passes */
+ def askToDoFirst(source: SourceFile) =
+ scheduler postWorkItem new AskToDoFirstItem(source)
+
+ /** If source is not yet loaded, loads it, and starts a new run, otherwise
+ * continues with current pass.
+ * Waits until source is fully type checked and returns body in response.
+ * @param source The source file that needs to be fully typed.
+ * @param response The response, which is set to the fully attributed tree of `source`.
+ * If the unit corresponding to `source` has been removed in the meantime
+ * the a NoSuchUnitError is raised in the response.
+ */
+ def askLoadedTyped(source: SourceFile, response: Response[Tree]) =
+ scheduler postWorkItem new AskLoadedTypedItem(source, response)
- /** Cancel current compiler run and start a fresh one where everything will be re-typechecked
+ /** Set sync var `response` to the parse tree of `source` with all top-level symbols entered.
+ * @param source The source file to be analyzed
+ * @param keepLoaded If set to `true`, source file will be kept as a loaded unit afterwards.
+ * If keepLoaded is `false` the operation is run at low priority, only after
+ * everything is brought up to date in a regular type checker run.
+ * @param response The response.
+ */
+ def askParsedEntered(source: SourceFile, keepLoaded: Boolean, response: Response[Tree]) =
+ scheduler postWorkItem new AskParsedEnteredItem(source, keepLoaded, response)
+
+ /** Cancels current compiler run and start a fresh one where everything will be re-typechecked
* (but not re-loaded).
*/
def askReset() = scheduler raise FreshRunReq
- /** Tell the compile server to shutdown, and do not restart again */
+ /** Tells the compile server to shutdown, and not to restart again */
def askShutdown() = scheduler raise ShutdownReq
- /** Ask for a computation to be done quickly on the presentation compiler thread */
+ @deprecated("use parseTree(source) instead")
+ def askParse(source: SourceFile, response: Response[Tree]) = respond(response) {
+ parseTree(source)
+ }
+
+ /** Returns parse tree for source `source`. No symbols are entered. Syntax errors are reported.
+ * Can be called asynchronously from presentation compiler.
+ */
+ def parseTree(source: SourceFile): Tree = ask { () =>
+ getUnit(source) match {
+ case Some(unit) if unit.status >= JustParsed =>
+ unit.body
+ case _ =>
+ new UnitParser(new CompilationUnit(source)).parse()
+ }
+ }
+
+ /** Asks for a computation to be done quickly on the presentation compiler thread */
def ask[A](op: () => A): A = scheduler doQuickly op
- // ---------------- Interpreted exceptions -------------------
+ /** Info given for every member found by completion
+ */
+ abstract class Member {
+ val sym: Symbol
+ val tpe: Type
+ val accessible: Boolean
+ def implicitlyAdded = false
+ }
+
+ case class TypeMember(
+ sym: Symbol,
+ tpe: Type,
+ accessible: Boolean,
+ inherited: Boolean,
+ viaView: Symbol) extends Member {
+ override def implicitlyAdded = viaView != NoSymbol
+ }
+
+ case class ScopeMember(
+ sym: Symbol,
+ tpe: Type,
+ accessible: Boolean,
+ viaImport: Tree) extends Member
+
+ // items that get sent to scheduler
+
+ abstract class WorkItem extends (() => Unit)
+
+ case class ReloadItem(sources: List[SourceFile], response: Response[Unit]) extends WorkItem {
+ def apply() = reload(sources, response)
+ override def toString = "reload "+sources
+ }
+
+ class AskTypeAtItem(val pos: Position, response: Response[Tree]) extends WorkItem {
+ def apply() = self.getTypedTreeAt(pos, response)
+ override def toString = "typeat "+pos.source+" "+pos.show
+ }
+
+ class AskTypeItem(val source: SourceFile, val forceReload: Boolean, response: Response[Tree]) extends WorkItem {
+ def apply() = self.getTypedTree(source, forceReload, response)
+ override def toString = "typecheck"
+ }
+
+ class AskTypeCompletionItem(val pos: Position, response: Response[List[Member]]) extends WorkItem {
+ def apply() = self.getTypeCompletion(pos, response)
+ override def toString = "type completion "+pos.source+" "+pos.show
+ }
+
+ class AskScopeCompletionItem(val pos: Position, response: Response[List[Member]]) extends WorkItem {
+ def apply() = self.getScopeCompletion(pos, response)
+ override def toString = "scope completion "+pos.source+" "+pos.show
+ }
+
+ class AskToDoFirstItem(val source: SourceFile) extends WorkItem {
+ def apply() = moveToFront(List(source))
+ override def toString = "dofirst "+source
+ }
+
+ class AskLinkPosItem(val sym: Symbol, val source: SourceFile, response: Response[Position]) extends WorkItem {
+ def apply() = self.getLinkPos(sym, source, response)
+ override def toString = "linkpos "+sym+" in "+source
+ }
+
+ class AskLoadedTypedItem(val source: SourceFile, response: Response[Tree]) extends WorkItem {
+ def apply() = self.waitLoadedTyped(source, response)
+ override def toString = "wait loaded & typed "+source
+ }
- object FreshRunReq extends ControlThrowable
- object ShutdownReq extends ControlThrowable
+ class AskParsedEnteredItem(val source: SourceFile, val keepLoaded: Boolean, response: Response[Tree]) extends WorkItem {
+ def apply() = self.getParsedEntered(source, keepLoaded, response)
+ override def toString = "getParsedEntered "+source+", keepLoaded = "+keepLoaded
+ }
}
+
+ // ---------------- Interpreted exceptions -------------------
+
+/** Signals a request for a fresh background compiler run.
+ * Note: The object has to stay top-level so that the PresentationCompilerThread may access it.
+ */
+object FreshRunReq extends ControlThrowable
+
+/** Signals a request for a shutdown of the presentation compiler.
+ * Note: The object has to stay top-level so that the PresentationCompilerThread may access it.
+ */
+object ShutdownReq extends ControlThrowable
+
+class NoSuchUnitError(file: AbstractFile) extends Exception("no unit found for file "+file)
+
diff --git a/src/compiler/scala/tools/nsc/interactive/Global.scala b/src/compiler/scala/tools/nsc/interactive/Global.scala
index 260ebf2351..e0189b99f6 100644
--- a/src/compiler/scala/tools/nsc/interactive/Global.scala
+++ b/src/compiler/scala/tools/nsc/interactive/Global.scala
@@ -1,17 +1,22 @@
package scala.tools.nsc
package interactive
-import java.io.{ PrintWriter, StringWriter }
+import java.io.{ PrintWriter, StringWriter, FileReader, FileWriter }
+import collection.mutable.{ArrayBuffer, ListBuffer, SynchronizedBuffer, HashMap}
import scala.collection.mutable
import mutable.{LinkedHashMap, SynchronizedMap,LinkedHashSet, SynchronizedSet}
import scala.concurrent.SyncVar
import scala.util.control.ControlThrowable
-import scala.tools.nsc.io.AbstractFile
-import scala.tools.nsc.util.{SourceFile, Position, RangePosition, NoPosition, WorkScheduler}
+import scala.tools.nsc.io.{ AbstractFile, LogReplay, Logger, NullLogger, Replayer }
+import scala.tools.nsc.util.{ SourceFile, BatchSourceFile, Position, RangePosition, NoPosition, WorkScheduler, MultiHashMap }
import scala.tools.nsc.reporters._
import scala.tools.nsc.symtab._
import scala.tools.nsc.ast._
+import scala.tools.nsc.io.Pickler._
+import scala.tools.nsc.typechecker.DivergentImplicit
+import scala.annotation.tailrec
+import scala.reflect.generic.Flags.{ACCESSOR, PARAMACCESSOR}
/** The main class of the presentation compiler in an interactive environment such as an IDE
*/
@@ -20,94 +25,148 @@ class Global(settings: Settings, reporter: Reporter)
with CompilerControl
with RangePositions
with ContextTrees
- with RichCompilationUnits {
-self =>
+ with RichCompilationUnits
+ with Picklers {
import definitions._
- val debugIDE = false
+ val debugIDE: Boolean = settings.YpresentationDebug.value
+ val verboseIDE: Boolean = settings.YpresentationVerbose.value
+
+ private def replayName = settings.YpresentationReplay.value
+ private def logName = settings.YpresentationLog.value
+ private def afterTypeDelay = settings.YpresentationDelay.value
+ private final val SleepTime = 10
+
+ val log =
+ if (replayName != "") new Replayer(new FileReader(replayName))
+ else if (logName != "") new Logger(new FileWriter(logName))
+ else NullLogger
+
+ import log.logreplay
+ debugLog("interactive compiler from 20 Feb")
+ debugLog("logger: " + log.getClass + " writing to " + (new java.io.File(logName)).getAbsolutePath)
+ debugLog("classpath: "+classPath)
+
+ private var curTime = System.nanoTime
+ private def timeStep = {
+ val last = curTime
+ curTime = System.nanoTime
+ ", delay = " + (curTime - last) / 1000000 + "ms"
+ }
- override def onlyPresentation = true
+ /** Print msg only when debugIDE is true. */
+ @inline final def debugLog(msg: => String) =
+ if (debugIDE) println(msg)
- /** A list indicating in which order some units should be typechecked.
- * All units in firsts are typechecked before any unit not in this list
- * Modified by askToDoFirst, reload, typeAtTree.
- */
- var firsts: List[SourceFile] = List()
+ /** Inform with msg only when verboseIDE is true. */
+ @inline final def informIDE(msg: => String) =
+ if (verboseIDE) println("["+msg+"]")
+
+ override def forInteractive = true
/** A map of all loaded files to the rich compilation units that correspond to them.
*/
val unitOfFile = new LinkedHashMap[AbstractFile, RichCompilationUnit] with
SynchronizedMap[AbstractFile, RichCompilationUnit]
+ /** A list containing all those files that need to be removed
+ * Units are removed by getUnit, typically once a unit is finished compiled.
+ */
+ protected val toBeRemoved = new ArrayBuffer[AbstractFile] with SynchronizedBuffer[AbstractFile]
+
+ /** A map that associates with each abstract file the set of responses that are waiting
+ * (via waitLoadedTyped) for the unit associated with the abstract file to be loaded and completely typechecked.
+ */
+ protected val waitLoadedTypeResponses = new MultiHashMap[SourceFile, Response[Tree]]
+
+ /** A map that associates with each abstract file the set of responses that ware waiting
+ * (via build) for the unit associated with the abstract file to be parsed and entered
+ */
+ protected var getParsedEnteredResponses = new MultiHashMap[SourceFile, Response[Tree]]
+
+ /** The compilation unit corresponding to a source file
+ * if it does not yet exist create a new one atomically
+ * Note: We want to remove this.
+ */
+ protected[interactive] def getOrCreateUnitOf(source: SourceFile): RichCompilationUnit =
+ unitOfFile.getOrElse(source.file, { println("precondition violated: "+source+" is not loaded"); new Exception().printStackTrace(); new RichCompilationUnit(source) })
+
+ /** Work through toBeRemoved list to remove any units.
+ * Then return optionlly unit associated with given source.
+ */
+ protected[interactive] def getUnit(s: SourceFile): Option[RichCompilationUnit] = {
+ toBeRemoved.synchronized {
+ for (f <- toBeRemoved) {
+ unitOfFile -= f
+ allSources = allSources filter (_.file != f)
+ }
+ toBeRemoved.clear()
+ }
+ unitOfFile get s.file
+ }
+
+ /** A list giving all files to be typechecked in the order they should be checked.
+ */
+ protected var allSources: List[SourceFile] = List()
+
/** The currently active typer run */
private var currentTyperRun: TyperRun = _
+ newTyperRun()
/** Is a background compiler run needed?
- * Note: outOfDate is true as long as there is a backgroud compile scheduled or going on.
+ * Note: outOfDate is true as long as there is a background compile scheduled or going on.
*/
- private var outOfDate = false
+ protected[interactive] var outOfDate = false
/** Units compiled by a run with id >= minRunId are considered up-to-date */
- private[interactive] var minRunId = 1
+ protected[interactive] var minRunId = 1
+
+ private var interruptsEnabled = true
private val NoResponse: Response[_] = new Response[Any]
- private var pendingResponse: Response[_] = NoResponse
- /** Is a reload/background compiler currently running? */
- private var acting = false
+ /** The response that is currently pending, i.e. the compiler
+ * is working on providing an asnwer for it.
+ */
+ private var pendingResponse: Response[_] = NoResponse
// ----------- Overriding hooks in nsc.Global -----------------------
- /** Called from typechecker, which signal hereby that a node has been completely typechecked.
- * If the node is included in unit.targetPos, abandons run and returns newly attributed tree.
+ /** Called from parser, which signals hereby that a method definition has been parsed.
+ */
+ override def signalParseProgress(pos: Position) {
+ checkForMoreWork(pos)
+ }
+
+ /** Called from typechecker, which signals hereby that a node has been completely typechecked.
+ * If the node includes unit.targetPos, abandons run and returns newly attributed tree.
* Otherwise, if there's some higher priority work to be done, also abandons run with a FreshRunReq.
* @param context The context that typechecked the node
* @param old The original node
* @param result The transformed node
*/
override def signalDone(context: Context, old: Tree, result: Tree) {
- def integrateNew() {
- context.unit.body = new TreeReplacer(old, result) transform context.unit.body
- }
- if (activeLocks == 0) {
+ if (interruptsEnabled && analyzer.lockedCount == 0) {
if (context.unit != null &&
result.pos.isOpaqueRange &&
(result.pos includes context.unit.targetPos)) {
- integrateNew()
- var located = new Locator(context.unit.targetPos) locateIn result
+ var located = new TypedLocator(context.unit.targetPos) locateIn result
if (located == EmptyTree) {
println("something's wrong: no "+context.unit+" in "+result+result.pos)
located = result
}
throw new TyperResult(located)
}
- val typerRun = currentTyperRun
-
- while(true)
- try {
- try {
- pollForWork()
- } catch {
- case ex : Throwable =>
- if (context.unit != null) integrateNew()
- throw ex
- }
- if (typerRun == currentTyperRun)
- return
-
- // @Martin
- // Guard against NPEs in integrateNew if context.unit == null here.
- // But why are we doing this at all? If it was non-null previously
- // integrateNew will already have been called. If it was null previously
- // it will still be null now?
- if (context.unit != null)
- integrateNew()
- throw FreshRunReq
- }
- catch {
- case ex : ValidateException => // Ignore, this will have been reported elsewhere
- }
+ try {
+ checkForMoreWork(old.pos)
+ } catch {
+ case ex: ValidateException => // Ignore, this will have been reported elsewhere
+ debugLog("validate exception caught: "+ex)
+ case ex: Throwable =>
+ log.flush()
+ throw ex
+ }
}
}
@@ -131,43 +190,102 @@ self =>
*/
override def registerTopLevelSym(sym: Symbol) { currentTopLevelSyms += sym }
+ /** Symbol loaders in the IDE parse all source files loaded from a package for
+ * top-level idents. Therefore, we can detect top-level symbols that have a name
+ * different from their source file
+ */
+ override lazy val loaders = new BrowsingLoaders {
+ val global: Global.this.type = Global.this
+ }
+
// ----------------- Polling ---------------------------------------
+ case class WorkEvent(atNode: Int, atMillis: Long)
+
+ private var moreWorkAtNode: Int = -1
+ private var nodesSeen = 0
+ private var lastWasReload = false
+
+ /** The number of pollForWorks after which the presentation compiler yields.
+ * Yielding improves responsiveness on systems with few cores because it
+ * gives the UI thread a chance to get new tasks and interrupt the presentation
+ * compiler with them.
+ */
+ private final val yieldPeriod = 10
+
/** Called from runner thread and signalDone:
* Poll for interrupts and execute them immediately.
* Then, poll for exceptions and execute them.
* Then, poll for work reload/typedTreeAt/doFirst commands during background checking.
+ * @param pos The position of the tree if polling while typechecking, NoPosition otherwise
+ *
*/
- def pollForWork() {
- scheduler.pollInterrupt() match {
- case Some(ir) =>
- ir.execute(); pollForWork()
- case _ =>
- }
- if (pendingResponse.isCancelled)
- throw CancelException
- scheduler.pollThrowable() match {
- case Some(ex @ FreshRunReq) =>
- currentTyperRun = newTyperRun
- minRunId = currentRunId
- if (outOfDate) throw ex
- else outOfDate = true
- case Some(ex: Throwable) => throw ex
- case _ =>
- }
- scheduler.nextWorkItem() match {
- case Some(action) =>
- try {
- acting = true
- if (debugIDE) println("picked up work item: "+action)
- action()
- if (debugIDE) println("done with work item: "+action)
- } finally {
- if (debugIDE) println("quitting work item: "+action)
- acting = false
- }
+ protected[interactive] def pollForWork(pos: Position) {
+ if (pos == NoPosition || nodesSeen % yieldPeriod == 0)
+ Thread.`yield`()
+
+ def nodeWithWork(): Option[WorkEvent] =
+ if (scheduler.moreWork || pendingResponse.isCancelled) Some(new WorkEvent(nodesSeen, System.currentTimeMillis))
+ else None
+
+ nodesSeen += 1
+ logreplay("atnode", nodeWithWork()) match {
+ case Some(WorkEvent(id, _)) =>
+ debugLog("some work at node "+id+" current = "+nodesSeen)
+// assert(id >= nodesSeen)
+ moreWorkAtNode = id
case None =>
}
+
+ if (nodesSeen >= moreWorkAtNode) {
+
+ logreplay("asked", scheduler.pollInterrupt()) match {
+ case Some(ir) =>
+ try {
+ interruptsEnabled = false
+ debugLog("ask started"+timeStep)
+ ir.execute()
+ } finally {
+ debugLog("ask finished"+timeStep)
+ interruptsEnabled = true
+ }
+ pollForWork(pos)
+ case _ =>
+ }
+
+ if (logreplay("cancelled", pendingResponse.isCancelled)) {
+ throw CancelException
+ }
+
+ logreplay("exception thrown", scheduler.pollThrowable()) match {
+ case Some(ex @ FreshRunReq) =>
+ newTyperRun()
+ minRunId = currentRunId
+ demandNewCompilerRun()
+ case Some(ex: Throwable) => log.flush(); throw ex
+ case _ =>
+ }
+
+ lastWasReload = false
+
+ logreplay("workitem", scheduler.nextWorkItem()) match {
+ case Some(action) =>
+ try {
+ debugLog("picked up work item at "+pos+": "+action+timeStep)
+ action()
+ debugLog("done with work item: "+action)
+ } finally {
+ debugLog("quitting work item: "+action+timeStep)
+ }
+ case None =>
+ }
+ }
+ }
+
+ protected def checkForMoreWork(pos: Position) {
+ val typerRun = currentTyperRun
+ pollForWork(pos)
+ if (typerRun != currentTyperRun) demandNewCompilerRun()
}
def debugInfo(source : SourceFile, start : Int, length : Int): String = {
@@ -207,104 +325,118 @@ self =>
// ----------------- The Background Runner Thread -----------------------
- /** The current presentation compiler runner */
- private var compileRunner = newRunnerThread
+ private var threadId = 0
- private var threadId = 1
+ /** The current presentation compiler runner */
+ @volatile protected[interactive] var compileRunner = newRunnerThread()
/** Create a new presentation compiler runner.
*/
- def newRunnerThread: Thread = new Thread("Scala Presentation Compiler V"+threadId) {
- override def run() {
- try {
- while (true) {
- scheduler.waitForMoreWork()
- pollForWork()
- while (outOfDate) {
- try {
- backgroundCompile()
- outOfDate = false
- } catch {
- case FreshRunReq =>
- }
- }
- }
- } catch {
- case ShutdownReq =>
- ;
- case ex =>
- outOfDate = false
- compileRunner = newRunnerThread
- ex match {
- case FreshRunReq => // This shouldn't be reported
- case _ : ValidateException => // This will have been reported elsewhere
- case _ => ex.printStackTrace(); inform("Fatal Error: "+ex)
- }
- }
- }
+ protected[interactive] def newRunnerThread(): Thread = {
threadId += 1
- start()
+ compileRunner = new PresentationCompilerThread(this, threadId)
+ compileRunner.start()
+ compileRunner
}
- /** Compile all given units
- */
- private def backgroundCompile() {
- if (debugIDE) inform("Starting new presentation compiler type checking pass")
- reporter.reset
+ def demandNewCompilerRun() = {
+ if (outOfDate) throw FreshRunReq // cancel background compile
+ else outOfDate = true // proceed normally and enable new background compile
+ }
+ /** Compile all loaded source files in the order given by `allSources`.
+ */
+ protected[interactive] def backgroundCompile() {
+ informIDE("Starting new presentation compiler type checking pass")
+ reporter.reset()
// remove any files in first that are no longer maintained by presentation compiler (i.e. closed)
- firsts = firsts filter (s => unitOfFile contains (s.file))
+ allSources = allSources filter (s => unitOfFile contains (s.file))
- val prefix = firsts map unitOf
+ for (s <- allSources; unit <- getUnit(s)) {
+ checkForMoreWork(NoPosition)
+ if (!unit.isUpToDate && unit.status != JustParsed) reset(unit) // reparse previously typechecked units.
+ parseAndEnter(unit)
+ serviceParsedEntered()
+ }
+
+ /** Sleep window */
+ if (afterTypeDelay > 0 && lastWasReload) {
+ val limit = System.currentTimeMillis() + afterTypeDelay
+ while (System.currentTimeMillis() < limit) {
+ Thread.sleep(SleepTime)
+ checkForMoreWork(NoPosition)
+ }
+ }
- val units = prefix ::: (unitOfFile.values.toList diff prefix) filter (!_.isUpToDate)
+ for (s <- allSources; unit <- getUnit(s)) {
+ if (!unit.isUpToDate) typeCheck(unit)
+ else debugLog("already up to date: "+unit)
+ for (r <- waitLoadedTypeResponses(unit.source))
+ r set unit.body
+ serviceParsedEntered()
+ }
- recompile(units)
+ informIDE("Everything is now up to date")
- if (debugIDE) inform("Everything is now up to date")
+ for ((source, rs) <- waitLoadedTypeResponses; r <- rs) r raise new NoSuchUnitError(source.file)
+ waitLoadedTypeResponses.clear()
}
- /** Reset unit to just-parsed state */
- def reset(unit: RichCompilationUnit): Unit =
- if (unit.status > JustParsed) {
- unit.depends.clear()
- unit.defined.clear()
- unit.synthetics.clear()
- unit.toCheck.clear()
- unit.targetPos = NoPosition
- unit.contexts.clear()
- unit.body = EmptyTree
- unit.status = NotLoaded
+ /** Service all pending getParsedEntered requests
+ */
+ def serviceParsedEntered() {
+ var atOldRun = true
+ for ((source, rs) <- getParsedEnteredResponses; r <- rs) {
+ if (atOldRun) { newTyperRun(); atOldRun = false }
+ getParsedEnteredNow(source, r)
}
+ getParsedEnteredResponses.clear()
+ }
- /** Parse unit and create a name index. */
- def parse(unit: RichCompilationUnit): Unit = {
- currentTyperRun.compileLate(unit)
- if (!reporter.hasErrors) validatePositions(unit.body)
- //println("parsed: [["+unit.body+"]]")
- unit.status = JustParsed
+ /** Reset unit to unloaded state */
+ protected def reset(unit: RichCompilationUnit): Unit = {
+ unit.depends.clear()
+ unit.defined.clear()
+ unit.synthetics.clear()
+ unit.toCheck.clear()
+ unit.targetPos = NoPosition
+ unit.contexts.clear()
+ unit.problems.clear()
+ unit.body = EmptyTree
+ unit.status = NotLoaded
}
- /** Make sure symbol and type attributes are reset and recompile units.
- */
- def recompile(units: List[RichCompilationUnit]) {
- for (unit <- units) {
- reset(unit)
- if (debugIDE) inform("parsing: "+unit)
- parse(unit)
- }
- for (unit <- units) {
- if (debugIDE) inform("type checking: "+unit)
- activeLocks = 0
- currentTyperRun.typeCheck(unit)
- unit.status = currentRunId
+ /** Parse unit and create a name index, unless this has already been done before */
+ protected def parseAndEnter(unit: RichCompilationUnit): Unit =
+ if (unit.status == NotLoaded) {
+ debugLog("parsing: "+unit)
+ currentTyperRun.compileLate(unit)
+ if (debugIDE && !reporter.hasErrors) validatePositions(unit.body)
if (!unit.isJava) syncTopLevelSyms(unit)
+ unit.status = JustParsed
}
+
+ /** Make sure unit is typechecked
+ */
+ protected def typeCheck(unit: RichCompilationUnit) {
+ debugLog("type checking: "+unit)
+ parseAndEnter(unit)
+ unit.status = PartiallyChecked
+ currentTyperRun.typeCheck(unit)
+ unit.lastBody = unit.body
+ unit.status = currentRunId
}
+ /** Update deleted and current top-level symbols sets */
def syncTopLevelSyms(unit: RichCompilationUnit) {
val deleted = currentTopLevelSyms filter { sym =>
- sym.sourceFile == unit.source.file && runId(sym.validTo) < currentRunId
+ /** We sync after namer phase and it resets all the top-level symbols
+ * that survive the new parsing
+ * round to NoPeriod.
+ */
+ sym.sourceFile == unit.source.file &&
+ sym.validTo != NoPeriod &&
+ runId(sym.validTo) < currentRunId
}
for (d <- deleted) {
d.owner.info.decls unlink d
@@ -313,12 +445,12 @@ self =>
}
}
- /** Move list of files to front of firsts */
+ /** Move list of files to front of allSources */
def moveToFront(fs: List[SourceFile]) {
- firsts = fs ::: (firsts diff fs)
+ allSources = fs ::: (allSources diff fs)
}
- // ----------------- Implementations of client commands -----------------------
+ // ----------------- Implementations of client commands -----------------------+lknwqdklnwlknqwkldnlkwdn
def respond[T](result: Response[T])(op: => T): Unit =
respondGradually(result)(Stream(op))
@@ -332,74 +464,159 @@ self =>
while (!response.isCancelled && results.nonEmpty) {
val result = results.head
results = results.tail
- if (results.isEmpty) response set result
- else response setProvisionally result
+ if (results.isEmpty) {
+ response set result
+ debugLog("responded"+timeStep)
+ } else response setProvisionally result
}
}
} catch {
case CancelException =>
- ;
+ debugLog("cancelled")
+/* Commented out. Typing should always cancel requests
case ex @ FreshRunReq =>
scheduler.postWorkItem(() => respondGradually(response)(op))
throw ex
+*/
case ex =>
+ if (debugIDE) {
+ println("exception thrown during response: "+ex)
+ ex.printStackTrace()
+ }
response raise ex
- throw ex
} finally {
pendingResponse = prevResponse
}
}
+ protected def reloadSource(source: SourceFile) {
+ val unit = new RichCompilationUnit(source)
+ unitOfFile(source.file) = unit
+ reset(unit)
+ //parseAndEnter(unit)
+ }
+
/** Make sure a set of compilation units is loaded and parsed */
- def reloadSources(sources: List[SourceFile]) {
- currentTyperRun = newTyperRun
- for (source <- sources) {
- val unit = new RichCompilationUnit(source)
- unitOfFile(source.file) = unit
- parse(unit)
- }
+ protected def reloadSources(sources: List[SourceFile]) {
+ newTyperRun()
+ minRunId = currentRunId
+ sources foreach reloadSource
moveToFront(sources)
}
/** Make sure a set of compilation units is loaded and parsed */
- def reload(sources: List[SourceFile], response: Response[Unit]) {
+ protected def reload(sources: List[SourceFile], response: Response[Unit]) {
+ informIDE("reload: " + sources)
+ lastWasReload = true
respond(response)(reloadSources(sources))
- if (outOfDate) throw FreshRunReq // cancel background compile
- else outOfDate = true // proceed normally and enable new background compile
+ demandNewCompilerRun()
}
- /** A fully attributed tree located at position `pos` */
- def typedTreeAt(pos: Position): Tree = {
- val unit = unitOf(pos)
- val sources = List(unit.source)
- if (unit.status == NotLoaded) reloadSources(sources)
- moveToFront(sources)
- val typedTree = currentTyperRun.typedTreeAt(pos)
- new Locator(pos) locateIn typedTree
+ /** A fully attributed tree located at position `pos` */
+ protected def typedTreeAt(pos: Position): Tree = getUnit(pos.source) match {
+ case None =>
+ reloadSources(List(pos.source))
+ val result = typedTreeAt(pos)
+ removeUnitOf(pos.source)
+ result
+ case Some(unit) =>
+ informIDE("typedTreeAt " + pos)
+ parseAndEnter(unit)
+ val tree = locateTree(pos)
+ debugLog("at pos "+pos+" was found: "+tree.getClass+" "+tree.pos.show)
+ tree match {
+ case Import(expr, _) =>
+ debugLog("import found"+expr.tpe+" "+expr.tpe.members)
+ case _ =>
+ }
+ if (stabilizedType(tree) ne null) {
+ debugLog("already attributed")
+ tree
+ } else {
+ unit.targetPos = pos
+ try {
+ debugLog("starting targeted type check")
+ typeCheck(unit)
+ println("tree not found at "+pos)
+ EmptyTree
+ } catch {
+ case ex: TyperResult => new Locator(pos) locateIn ex.tree
+ } finally {
+ unit.targetPos = NoPosition
+ }
+ }
}
/** A fully attributed tree corresponding to the entire compilation unit */
- def typedTree(source: SourceFile, forceReload: Boolean): Tree = {
- val unit = unitOf(source)
- val sources = List(source)
- if (unit.status == NotLoaded || forceReload) reloadSources(sources)
- moveToFront(sources)
- currentTyperRun.typedTree(unitOf(source))
+ protected def typedTree(source: SourceFile, forceReload: Boolean): Tree = {
+ informIDE("typedTree " + source + " forceReload: " + forceReload)
+ val unit = getOrCreateUnitOf(source)
+ if (forceReload) reset(unit)
+ parseAndEnter(unit)
+ if (unit.status <= PartiallyChecked) {
+ //newTyperRun() // not deeded for idempotent type checker phase
+ typeCheck(unit)
+ }
+ unit.body
}
/** Set sync var `response` to a fully attributed tree located at position `pos` */
- def getTypedTreeAt(pos: Position, response: Response[Tree]) {
+ protected def getTypedTreeAt(pos: Position, response: Response[Tree]) {
respond(response)(typedTreeAt(pos))
}
- /** Set sync var `response` to a fully attributed tree corresponding to the entire compilation unit */
- def getTypedTree(source : SourceFile, forceReload: Boolean, response: Response[Tree]) {
+ /** Set sync var `response` to a fully attributed tree corresponding to the
+ * entire compilation unit */
+ protected def getTypedTree(source: SourceFile, forceReload: Boolean, response: Response[Tree]) {
respond(response)(typedTree(source, forceReload))
}
+ /** Implements CompilerControl.askLinkPos */
+ protected def getLinkPos(sym: Symbol, source: SourceFile, response: Response[Position]) {
+ informIDE("getLinkPos "+sym+" "+source)
+ respond(response) {
+ val preExisting = unitOfFile isDefinedAt source.file
+ reloadSources(List(source))
+ parseAndEnter(getUnit(source).get)
+ val owner = sym.owner
+ if (owner.isClass) {
+ val pre = adaptToNewRunMap(ThisType(owner))
+ val newsym = pre.decl(sym.name) filter { alt =>
+ sym.isType || {
+ try {
+ val tp1 = pre.memberType(alt) onTypeError NoType
+ val tp2 = adaptToNewRunMap(sym.tpe)
+ matchesType(tp1, tp2, false)
+ } catch {
+ case ex: Throwable =>
+ println("error in hyperlinking: "+ex)
+ ex.printStackTrace()
+ false
+ }
+ }
+ }
+ if (!preExisting) removeUnitOf(source)
+ if (newsym == NoSymbol) {
+ debugLog("link not found "+sym+" "+source+" "+pre)
+ NoPosition
+ } else if (newsym.isOverloaded) {
+ debugLog("link ambiguous "+sym+" "+source+" "+pre+" "+newsym.alternatives)
+ NoPosition
+ } else {
+ debugLog("link found for "+newsym+": "+newsym.pos)
+ newsym.pos
+ }
+ } else
+ debugLog("link not in class "+sym+" "+source+" "+owner)
+ NoPosition
+ }
+ }
+
def stabilizedType(tree: Tree): Type = tree match {
- case Ident(_) if tree.symbol.isStable => singleType(NoPrefix, tree.symbol)
- case Select(qual, _) if qual.tpe != null && tree.symbol.isStable => singleType(qual.tpe, tree.symbol)
+ case Ident(_) if tree.symbol.isStable =>
+ singleType(NoPrefix, tree.symbol)
+ case Select(qual, _) if qual.tpe != null && tree.symbol.isStable =>
+ singleType(qual.tpe, tree.symbol)
case Import(expr, selectors) =>
tree.symbol.info match {
case analyzer.ImportType(expr) => expr match {
@@ -415,66 +632,89 @@ self =>
import analyzer.{SearchResult, ImplicitSearch}
- def getScopeCompletion(pos: Position, response: Response[List[Member]]) {
+ protected def getScopeCompletion(pos: Position, response: Response[List[Member]]) {
+ informIDE("getScopeCompletion" + pos)
respond(response) { scopeMembers(pos) }
}
- val Dollar = newTermName("$")
+ private val Dollar = newTermName("$")
+
+ private class Members[M <: Member] extends LinkedHashMap[Name, Set[M]] {
+ override def default(key: Name) = Set()
+
+ private def matching(sym: Symbol, symtpe: Type, ms: Set[M]): Option[M] = ms.find { m =>
+ (m.sym.name == sym.name) && (m.sym.isType || (m.tpe matches symtpe))
+ }
+
+ private def keepSecond(m: M, sym: Symbol, implicitlyAdded: Boolean): Boolean =
+ m.sym.hasFlag(ACCESSOR | PARAMACCESSOR) &&
+ !sym.hasFlag(ACCESSOR | PARAMACCESSOR) &&
+ (!implicitlyAdded || m.implicitlyAdded)
+
+ def add(sym: Symbol, pre: Type, implicitlyAdded: Boolean)(toMember: (Symbol, Type) => M) {
+ if ((sym.isGetter || sym.isSetter) && sym.accessed != NoSymbol) {
+ add(sym.accessed, pre, implicitlyAdded)(toMember)
+ } else if (!sym.name.decode.containsName(Dollar) && !sym.isSynthetic && sym.hasRawInfo) {
+ val symtpe = pre.memberType(sym) onTypeError ErrorType
+ matching(sym, symtpe, this(sym.name)) match {
+ case Some(m) =>
+ if (keepSecond(m, sym, implicitlyAdded)) {
+ //print(" -+ "+sym.name)
+ this(sym.name) = this(sym.name) - m + toMember(sym, symtpe)
+ }
+ case None =>
+ //print(" + "+sym.name)
+ this(sym.name) = this(sym.name) + toMember(sym, symtpe)
+ }
+ }
+ }
+
+ def allMembers: List[M] = values.toList.flatten
+ }
/** Return all members visible without prefix in context enclosing `pos`. */
- def scopeMembers(pos: Position): List[ScopeMember] = {
+ protected def scopeMembers(pos: Position): List[ScopeMember] = {
typedTreeAt(pos) // to make sure context is entered
val context = doLocateContext(pos)
- val locals = new LinkedHashMap[Name, ScopeMember]
+ val locals = new Members[ScopeMember]
def addScopeMember(sym: Symbol, pre: Type, viaImport: Tree) =
- if (!sym.name.decode.containsName(Dollar) &&
- !sym.hasFlag(Flags.SYNTHETIC) &&
- !locals.contains(sym.name)) {
- //println("adding scope member: "+pre+" "+sym)
- locals(sym.name) = new ScopeMember(
- sym,
- pre.memberType(sym),
- context.isAccessible(sym, pre, false),
- viaImport)
+ locals.add(sym, pre, false) { (s, st) =>
+ new ScopeMember(s, st, context.isAccessible(s, pre, false), viaImport)
}
+ //print("add scope members")
var cx = context
while (cx != NoContext) {
for (sym <- cx.scope)
addScopeMember(sym, NoPrefix, EmptyTree)
if (cx == cx.enclClass) {
- val pre = cx.prefix
- for (sym <- pre.members)
- addScopeMember(sym, pre, EmptyTree)
+ val pre = cx.prefix
+ for (sym <- pre.members)
+ addScopeMember(sym, pre, EmptyTree)
}
cx = cx.outer
}
-
+ //print("\nadd imported members")
for (imp <- context.imports) {
val pre = imp.qual.tpe
for (sym <- imp.allImportedSymbols) {
addScopeMember(sym, pre, imp.qual)
}
}
- val result = locals.values.toList
- if (debugIDE) for (m <- result) println(m)
+ // println()
+ val result = locals.allMembers
+// if (debugIDE) for (m <- result) println(m)
result
}
- def getTypeCompletion(pos: Position, response: Response[List[Member]]) {
+ protected def getTypeCompletion(pos: Position, response: Response[List[Member]]) {
+ informIDE("getTypeCompletion " + pos)
respondGradually(response) { typeMembers(pos) }
- if (debugIDE) scopeMembers(pos)
+ //if (debugIDE) typeMembers(pos)
}
- def typeMembers(pos: Position): Stream[List[TypeMember]] = {
+ protected def typeMembers(pos: Position): Stream[List[TypeMember]] = {
var tree = typedTreeAt(pos)
- // Let's say you have something like val x: List[Int] and ypu want to get completion after List
- // Then the tree found at first is a TypeTree, ????
- tree match {
- case tt : TypeTree if tt.original != null => tree = tt.original // ???
- case _ =>
- }
-
// if tree consists of just x. or x.fo where fo is not yet a full member name
// ignore the selection and look in just x.
tree match {
@@ -485,59 +725,97 @@ self =>
val context = doLocateContext(pos)
if (tree.tpe == null)
+ // TODO: guard with try/catch to deal with ill-typed qualifiers.
tree = analyzer.newTyper(context).typedQualifier(tree)
- println("typeMembers at "+tree+" "+tree.tpe)
+ debugLog("typeMembers at "+tree+" "+tree.tpe)
val superAccess = tree.isInstanceOf[Super]
- val scope = new Scope
- val members = new LinkedHashMap[Symbol, TypeMember]
-
- def addTypeMember(sym: Symbol, pre: Type, inherited: Boolean, viaView: Symbol) {
- val symtpe = pre.memberType(sym)
- if (scope.lookupAll(sym.name) forall (sym => !(members(sym).tpe matches symtpe))) {
- scope enter sym
- members(sym) = new TypeMember(
- sym,
- symtpe,
- context.isAccessible(sym, pre, superAccess && (viaView == NoSymbol)),
+ val members = new Members[TypeMember]
+
+ def addTypeMember(sym: Symbol, pre: Type, inherited: Boolean, viaView: Symbol) = {
+ val implicitlyAdded = viaView != NoSymbol
+ members.add(sym, pre, implicitlyAdded) { (s, st) =>
+ new TypeMember(s, st,
+ context.isAccessible(s, pre, superAccess && !implicitlyAdded),
inherited,
viaView)
}
}
- /** Create a fucntion application of a given view function to `tree` and typechecked it.
+ /** Create a function application of a given view function to `tree` and typechecked it.
*/
def viewApply(view: SearchResult): Tree = {
assert(view.tree != EmptyTree)
- try {
- analyzer.newTyper(context.makeImplicit(reportAmbiguousErrors = false))
- .typed(Apply(view.tree, List(tree)) setPos tree.pos)
- } catch {
- case ex: TypeError => EmptyTree
- }
+ analyzer.newTyper(context.makeImplicit(reportAmbiguousErrors = false))
+ .typed(Apply(view.tree, List(tree)) setPos tree.pos)
+ .onTypeError(EmptyTree)
}
val pre = stabilizedType(tree)
- val ownerTpe = if (tree.tpe != null) tree.tpe else pre
-
- for (sym <- ownerTpe.decls)
- addTypeMember(sym, pre, false, NoSymbol)
- members.values.toList #:: {
- for (sym <- ownerTpe.members)
- addTypeMember(sym, pre, true, NoSymbol)
- members.values.toList #:: {
- val applicableViews: List[SearchResult] =
- new ImplicitSearch(tree, functionType(List(ownerTpe), AnyClass.tpe), isView = true, context.makeImplicit(reportAmbiguousErrors = false))
- .allImplicits
- for (view <- applicableViews) {
- val vtree = viewApply(view)
- val vpre = stabilizedType(vtree)
- for (sym <- vtree.tpe.members) {
- addTypeMember(sym, vpre, false, view.tree.symbol)
- }
+ val ownerTpe = tree.tpe match {
+ case analyzer.ImportType(expr) => expr.tpe
+ case null => pre
+ case _ => tree.tpe
+ }
+
+ //print("add members")
+ for (sym <- ownerTpe.members)
+ addTypeMember(sym, pre, sym.owner != ownerTpe.typeSymbol, NoSymbol)
+ members.allMembers #:: {
+ //print("\nadd pimped")
+ val applicableViews: List[SearchResult] =
+ if (ownerTpe.isErroneous) List()
+ else new ImplicitSearch(
+ tree, functionType(List(ownerTpe), AnyClass.tpe), isView = true,
+ context.makeImplicit(reportAmbiguousErrors = false)).allImplicits
+ for (view <- applicableViews) {
+ val vtree = viewApply(view)
+ val vpre = stabilizedType(vtree)
+ for (sym <- vtree.tpe.members) {
+ addTypeMember(sym, vpre, false, view.tree.symbol)
}
- Stream(members.values.toList)
+ }
+ //println()
+ Stream(members.allMembers)
+ }
+ }
+
+ /** Implements CompilerControl.askLoadedTyped */
+ protected def waitLoadedTyped(source: SourceFile, response: Response[Tree]) {
+ getUnit(source) match {
+ case Some(unit) =>
+ if (unit.isUpToDate) response set unit.body
+ else waitLoadedTypeResponses(source) += response
+ case None =>
+ reloadSources(List(source))
+ waitLoadedTyped(source, response)
+ }
+ }
+
+ /** Implements CompilerControl.askParsedEntered */
+ protected def getParsedEntered(source: SourceFile, keepLoaded: Boolean, response: Response[Tree]) {
+ getUnit(source) match {
+ case Some(unit) =>
+ getParsedEnteredNow(source, response)
+ case None =>
+ if (keepLoaded) {
+ reloadSources(List(source))
+ getParsedEnteredNow(source, response)
+ } else if (outOfDate) {
+ getParsedEnteredResponses(source) += response
+ } else {
+ getParsedEnteredNow(source, response)
+ }
+ }
+ }
+
+ /** Parses and enteres given source file, stroring parse tree in response */
+ protected def getParsedEnteredNow(source: SourceFile, response: Response[Tree]) {
+ respond(response) {
+ onUnitOf(source) { unit =>
+ parseAndEnter(unit)
+ unit.body
}
}
}
@@ -557,7 +835,8 @@ self =>
class TyperRun extends Run {
// units is always empty
- /** canRedefine is used to detect double declarations in multiple source files.
+ /** canRedefine is used to detect double declarations of classes and objects
+ * in multiple source files.
* Since the IDE rechecks units several times in the same run, these tests
* are disabled by always returning true here.
*/
@@ -567,44 +846,6 @@ self =>
applyPhase(typerPhase, unit)
}
- def enterNames(unit: CompilationUnit): Unit = {
- applyPhase(namerPhase, unit)
- }
-
- /** Return fully attributed tree at given position
- * (i.e. largest tree that's contained by position)
- */
- def typedTreeAt(pos: Position): Tree = {
- println("starting typedTreeAt")
- val tree = locateTree(pos)
- println("at pos "+pos+" was found: "+tree+tree.pos.show)
- if (stabilizedType(tree) ne null) {
- println("already attributed")
- tree
- } else {
- val unit = unitOf(pos)
- assert(unit.isParsed)
- unit.targetPos = pos
- try {
- println("starting targeted type check")
- typeCheck(unit)
- throw new FatalError("tree not found")
- } catch {
- case ex: TyperResult =>
- ex.tree
- } finally {
- unit.targetPos = NoPosition
- }
- }
- }
-
- def typedTree(unit: RichCompilationUnit): Tree = {
- assert(unit.isParsed)
- unit.targetPos = NoPosition
- typeCheck(unit)
- unit.body
- }
-
/** Apply a phase to a compilation unit
* @return true iff typechecked correctly
*/
@@ -616,11 +857,28 @@ self =>
}
}
- def newTyperRun = new TyperRun
+ def newTyperRun() {
+ currentTyperRun = new TyperRun
+ }
class TyperResult(val tree: Tree) extends ControlThrowable
assert(globalPhase.id == 0)
+
+ implicit def addOnTypeError[T](x: => T): OnTypeError[T] = new OnTypeError(x)
+
+ class OnTypeError[T](op: => T) {
+ def onTypeError(alt: => T) = try {
+ op
+ } catch {
+ case ex: TypeError =>
+ debugLog("type error caught: "+ex)
+ alt
+ case ex: DivergentImplicit =>
+ debugLog("divergent implicit caught: "+ex)
+ alt
+ }
+ }
}
object CancelException extends Exception
diff --git a/src/compiler/scala/tools/nsc/interactive/InteractiveReporter.scala b/src/compiler/scala/tools/nsc/interactive/InteractiveReporter.scala
new file mode 100644
index 0000000000..3c3d986f81
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/interactive/InteractiveReporter.scala
@@ -0,0 +1,43 @@
+package scala.tools.nsc
+package interactive
+
+import collection.mutable.ArrayBuffer
+import util.Position
+import reporters.Reporter
+
+case class Problem(pos: Position, msg: String, severityLevel: Int)
+
+abstract class InteractiveReporter extends Reporter {
+
+ def compiler: Global
+
+ val otherProblems = new ArrayBuffer[Problem]
+
+ override def info0(pos: Position, msg: String, severity: Severity, force: Boolean): Unit = try {
+ severity.count += 1
+ val problems =
+ if (compiler eq null) {
+ otherProblems
+ } else if (pos.isDefined) {
+ compiler.getUnit(pos.source) match {
+ case Some(unit) =>
+ compiler.debugLog(pos.source.file.name + ":" + pos.line + ": " + msg)
+ unit.problems
+ case None =>
+ compiler.debugLog(pos.source.file.name + "[not loaded] :" + pos.line + ": " + msg)
+ otherProblems
+ }
+ } else {
+ compiler.debugLog("[no position] :" + msg)
+ otherProblems
+ }
+ problems += Problem(pos, msg, severity.id)
+ } catch {
+ case ex: UnsupportedOperationException =>
+ }
+
+ override def reset() {
+ super.reset()
+ otherProblems.clear()
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/interactive/Picklers.scala b/src/compiler/scala/tools/nsc/interactive/Picklers.scala
new file mode 100644
index 0000000000..c22b0b8a29
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/interactive/Picklers.scala
@@ -0,0 +1,175 @@
+package scala.tools.nsc
+package interactive
+
+import util.{SourceFile, BatchSourceFile, InterruptReq}
+import io.{AbstractFile, PlainFile}
+
+import util.{Position, RangePosition, NoPosition, OffsetPosition, TransparentPosition, EmptyAction}
+import io.{Pickler, CondPickler}
+import io.Pickler._
+import collection.mutable
+import mutable.ListBuffer
+
+trait Picklers { self: Global =>
+
+ lazy val freshRunReq = singletonPickler(FreshRunReq)
+ lazy val shutdownReq = singletonPickler(ShutdownReq)
+
+ def defaultThrowable[T <: Throwable]: CondPickler[T] = javaInstancePickler[T] cond { _ => true }
+
+ implicit lazy val throwable: Pickler[Throwable] =
+ freshRunReq | shutdownReq | defaultThrowable
+
+ implicit def abstractFile: Pickler[AbstractFile] =
+ pkl[String]
+ .wrapped[AbstractFile] { new PlainFile(_) } { _.path }
+ .asClass (classOf[PlainFile])
+
+ private val sourceFilesSeen = new mutable.HashMap[AbstractFile, Array[Char]] {
+ override def default(key: AbstractFile) = Array()
+ }
+
+ type Diff = (Int /*start*/, Int /*end*/, String /*replacement*/)
+
+ def delta(f: AbstractFile, cs: Array[Char]): Diff = {
+ val bs = sourceFilesSeen(f)
+ var start = 0
+ while (start < bs.length && start < cs.length && bs(start) == cs(start)) start += 1
+ var end = bs.length
+ var end2 = cs.length
+ while (end > start && end2 > start && bs(end - 1) == cs(end2 - 1)) { end -= 1; end2 -= 1 }
+ sourceFilesSeen(f) = cs
+ (start, end, cs.slice(start, end2).mkString(""))
+ }
+
+ def patch(f: AbstractFile, d: Diff): Array[Char] = {
+ val (start, end, replacement) = d
+ val patched = sourceFilesSeen(f).patch(start, replacement, end - start)
+ sourceFilesSeen(f) = patched
+ patched
+ }
+
+ implicit lazy val sourceFile: Pickler[SourceFile] =
+ (pkl[AbstractFile] ~ pkl[Diff]).wrapped[SourceFile] {
+ case f ~ d => new BatchSourceFile(f, patch(f, d))
+ } {
+ f => f.file ~ delta(f.file, f.content)
+ }.asClass (classOf[BatchSourceFile])
+
+ lazy val offsetPosition: CondPickler[OffsetPosition] =
+ (pkl[SourceFile] ~ pkl[Int])
+ .wrapped { case x ~ y => new OffsetPosition(x, y) } { p => p.source ~ p.point }
+ .asClass (classOf[OffsetPosition])
+
+ lazy val rangePosition: CondPickler[RangePosition] =
+ (pkl[SourceFile] ~ pkl[Int] ~ pkl[Int] ~ pkl[Int])
+ .wrapped { case source ~ start ~ point ~ end => new RangePosition(source, start, point, end) } { p => p.source ~ p.start ~ p.point ~ p.end }
+ .asClass (classOf[RangePosition])
+
+ lazy val transparentPosition: CondPickler[TransparentPosition] =
+ (pkl[SourceFile] ~ pkl[Int] ~ pkl[Int] ~ pkl[Int])
+ .wrapped { case source ~ start ~ point ~ end => new TransparentPosition(source, start, point, end) } { p => p.source ~ p.start ~ p.point ~ p.end }
+ .asClass (classOf[TransparentPosition])
+
+ lazy val noPosition = singletonPickler(NoPosition)
+
+ implicit lazy val position: Pickler[Position] = transparentPosition | rangePosition | offsetPosition | noPosition
+
+ implicit lazy val namePickler: Pickler[Name] =
+ pkl[String] .wrapped {
+ str => if ((str.length > 1) && (str endsWith "!")) newTypeName(str.init) else newTermName(str)
+ } {
+ name => if (name.isTypeName) name.toString+"!" else name.toString
+ }
+
+ implicit lazy val symPickler: Pickler[Symbol] = {
+ def ownerNames(sym: Symbol, buf: ListBuffer[Name]): ListBuffer[Name] = {
+ if (!sym.isRoot) {
+ ownerNames(sym.owner, buf)
+ buf += (if (sym.isModuleClass) sym.sourceModule else sym).name
+ if (!sym.isType && !sym.isStable) {
+ val sym1 = sym.owner.info.decl(sym.name)
+ if (sym1.isOverloaded) {
+ val index = sym1.alternatives.indexOf(sym)
+ assert(index >= 0, sym1+" not found in alternatives "+sym1.alternatives)
+ buf += index.toString
+ }
+ }
+ }
+ buf
+ }
+ def makeSymbol(root: Symbol, names: List[Name]): Symbol = names match {
+ case List() =>
+ root
+ case name :: rest =>
+ val sym = root.info.decl(name)
+ if (sym.isOverloaded) makeSymbol(sym.alternatives(rest.head.toString.toInt), rest.tail)
+ else makeSymbol(sym, rest)
+ }
+ pkl[List[Name]] .wrapped { makeSymbol(definitions.RootClass, _) } { ownerNames(_, new ListBuffer).toList }
+ }
+
+ implicit def workEvent: Pickler[WorkEvent] = {
+ (pkl[Int] ~ pkl[Long])
+ .wrapped { case id ~ ms => WorkEvent(id, ms) } { w => w.atNode ~ w.atMillis }
+ }
+
+ implicit def interruptReq: Pickler[InterruptReq] = {
+ val emptyIR: InterruptReq = new InterruptReq { type R = Unit; val todo = () => () }
+ pkl[Unit] .wrapped { _ => emptyIR } { _ => () }
+ }
+
+ implicit def reloadItem: CondPickler[ReloadItem] =
+ pkl[List[SourceFile]]
+ .wrapped { ReloadItem(_, new Response) } { _.sources }
+ .asClass (classOf[ReloadItem])
+
+ implicit def askTypeAtItem: CondPickler[AskTypeAtItem] =
+ pkl[Position]
+ .wrapped { new AskTypeAtItem(_, new Response) } { _.pos }
+ .asClass (classOf[AskTypeAtItem])
+
+ implicit def askTypeItem: CondPickler[AskTypeItem] =
+ (pkl[SourceFile] ~ pkl[Boolean])
+ .wrapped { case source ~ forceReload => new AskTypeItem(source, forceReload, new Response) } { w => w.source ~ w.forceReload }
+ .asClass (classOf[AskTypeItem])
+
+ implicit def askTypeCompletionItem: CondPickler[AskTypeCompletionItem] =
+ pkl[Position]
+ .wrapped { new AskTypeCompletionItem(_, new Response) } { _.pos }
+ .asClass (classOf[AskTypeCompletionItem])
+
+ implicit def askScopeCompletionItem: CondPickler[AskScopeCompletionItem] =
+ pkl[Position]
+ .wrapped { new AskScopeCompletionItem(_, new Response) } { _.pos }
+ .asClass (classOf[AskScopeCompletionItem])
+
+ implicit def askToDoFirstItem: CondPickler[AskToDoFirstItem] =
+ pkl[SourceFile]
+ .wrapped { new AskToDoFirstItem(_) } { _.source }
+ .asClass (classOf[AskToDoFirstItem])
+
+ implicit def askLinkPosItem: CondPickler[AskLinkPosItem] =
+ (pkl[Symbol] ~ pkl[SourceFile])
+ .wrapped { case sym ~ source => new AskLinkPosItem(sym, source, new Response) } { item => item.sym ~ item.source }
+ .asClass (classOf[AskLinkPosItem])
+
+ implicit def askLoadedTypedItem: CondPickler[AskLoadedTypedItem] =
+ pkl[SourceFile]
+ .wrapped { new AskLoadedTypedItem(_, new Response) } { _.source }
+ .asClass (classOf[AskLoadedTypedItem])
+
+ implicit def askParsedEnteredItem: CondPickler[AskParsedEnteredItem] =
+ (pkl[SourceFile] ~ pkl[Boolean])
+ .wrapped { case source ~ keepLoaded => new AskParsedEnteredItem(source, keepLoaded, new Response) } { w => w.source ~ w.keepLoaded }
+ .asClass (classOf[AskParsedEnteredItem])
+
+ implicit def emptyAction: CondPickler[EmptyAction] =
+ pkl[Unit]
+ .wrapped { _ => new EmptyAction } { _ => () }
+ .asClass (classOf[EmptyAction])
+
+ implicit def action: Pickler[() => Unit] =
+ reloadItem | askTypeAtItem | askTypeItem | askTypeCompletionItem | askScopeCompletionItem |
+ askToDoFirstItem | askLinkPosItem | askLoadedTypedItem | askParsedEnteredItem | emptyAction
+}
diff --git a/src/compiler/scala/tools/nsc/interactive/PresentationCompilerThread.scala b/src/compiler/scala/tools/nsc/interactive/PresentationCompilerThread.scala
new file mode 100644
index 0000000000..f504427076
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/interactive/PresentationCompilerThread.scala
@@ -0,0 +1,55 @@
+package scala.tools.nsc.interactive
+
+/** A presentation compiler thread. This is a lightweight class, delegating most
+ * of its functionality to the compiler instance.
+ *
+ * @note This thread class may not be GCd, so it's important not to keep around
+ * large objects. For instance, the JDT weaving framework keeps threads around
+ * in a map, preventing them from being GCd. This prompted the separation between
+ * interactive.Global and this class.
+ */
+class PresentationCompilerThread(var compiler: Global, threadId: Int) extends Thread("Scala Presentation Compiler V"+threadId) {
+ /** The presentation compiler loop.
+ */
+ override def run() {
+ compiler.debugLog("starting new runner thread")
+ try {
+ while (true) {
+ compiler.log.logreplay("wait for more work", { compiler.scheduler.waitForMoreWork(); true })
+ compiler.pollForWork(compiler.NoPosition)
+ while (compiler.outOfDate) {
+ try {
+ compiler.backgroundCompile()
+ compiler.outOfDate = false
+ } catch {
+ case FreshRunReq =>
+ compiler.debugLog("fresh run req caught, starting new pass")
+ }
+ compiler.log.flush()
+ }
+ }
+ } catch {
+ case ex @ ShutdownReq =>
+ compiler.debugLog("exiting presentation compiler")
+ compiler.log.close()
+
+ // make sure we don't keep around stale instances
+ compiler = null
+ case ex =>
+ compiler.log.flush()
+ compiler.outOfDate = false
+ compiler.newRunnerThread()
+
+ ex match {
+ case FreshRunReq =>
+ compiler.debugLog("fresh run req caught outside presentation compiler loop; ignored") // This shouldn't be reported
+ case _ : Global#ValidateException => // This will have been reported elsewhere
+ compiler.debugLog("validate exception caught outside presentation compiler loop; ignored")
+ case _ => ex.printStackTrace(); compiler.informIDE("Fatal Error: "+ex)
+ }
+
+ // make sure we don't keep around stale instances
+ compiler = null
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/interactive/REPL.scala b/src/compiler/scala/tools/nsc/interactive/REPL.scala
index 775b979851..a33a77cd09 100644
--- a/src/compiler/scala/tools/nsc/interactive/REPL.scala
+++ b/src/compiler/scala/tools/nsc/interactive/REPL.scala
@@ -20,13 +20,13 @@ object REPL {
var reporter: ConsoleReporter = _
- def error(msg: String) {
+ private def replError(msg: String) {
reporter.error(/*new Position */FakePos("scalac"),
msg + "\n scalac -help gives more information")
}
def process(args: Array[String]) {
- val settings = new Settings(error)
+ val settings = new Settings(replError)
reporter = new ConsoleReporter(settings)
val command = new CompilerCommand(args.toList, settings)
if (command.settings.version.value)
@@ -83,6 +83,9 @@ object REPL {
val reloadResult = new Response[Unit]
val typeatResult = new Response[comp.Tree]
val completeResult = new Response[List[comp.Member]]
+ val typedResult = new Response[comp.Tree]
+ val structureResult = new Response[comp.Tree]
+
def makePos(file: String, off1: String, off2: String) = {
val source = toSourceFile(file)
comp.rangePos(source, off1.toInt, off1.toInt, off2.toInt)
@@ -95,11 +98,28 @@ object REPL {
comp.askTypeCompletion(pos, completeResult)
show(completeResult)
}
+ def doTypedTree(file: String) {
+ comp.askType(toSourceFile(file), true, typedResult)
+ show(typedResult)
+ }
+ def doStructure(file: String) {
+ comp.askParsedEntered(toSourceFile(file), false, structureResult)
+ show(structureResult)
+ }
+
loop { line =>
(line split " ").toList match {
case "reload" :: args =>
comp.askReload(args map toSourceFile, reloadResult)
show(reloadResult)
+ case "reloadAndAskType" :: file :: millis :: Nil =>
+ comp.askReload(List(toSourceFile(file)), reloadResult)
+ Thread.sleep(millis.toInt)
+ println("ask type now")
+ comp.askType(toSourceFile(file), false, typedResult)
+ typedResult.get
+ case List("typed", file) =>
+ doTypedTree(file)
case List("typeat", file, off1, off2) =>
doTypeAt(makePos(file, off1, off2))
case List("typeat", file, off1) =>
@@ -109,7 +129,10 @@ object REPL {
case List("complete", file, off1) =>
doComplete(makePos(file, off1, off1))
case List("quit") =>
+ comp.askShutdown()
System.exit(1)
+ case List("structure", file) =>
+ doStructure(file)
case _ =>
println("unrecongized command")
}
diff --git a/src/compiler/scala/tools/nsc/interactive/RangePositions.scala b/src/compiler/scala/tools/nsc/interactive/RangePositions.scala
index 6ef85b2f59..c451e0da5f 100644
--- a/src/compiler/scala/tools/nsc/interactive/RangePositions.scala
+++ b/src/compiler/scala/tools/nsc/interactive/RangePositions.scala
@@ -190,7 +190,7 @@ self: scala.tools.nsc.Global =>
inform("")
}
- def error(msg: String)(body : => Unit) {
+ def positionError(msg: String)(body : => Unit) {
inform("======= Bad positions: "+msg)
inform("")
body
@@ -205,15 +205,15 @@ self: scala.tools.nsc.Global =>
def validate(tree: Tree, encltree: Tree): Unit = {
if (!tree.isEmpty) {
if (!tree.pos.isDefined)
- error("Unpositioned tree ["+tree.id+"]") { reportTree("Unpositioned", tree) }
+ positionError("Unpositioned tree ["+tree.id+"]") { reportTree("Unpositioned", tree) }
if (tree.pos.isRange) {
if (!encltree.pos.isRange)
- error("Synthetic tree ["+encltree.id+"] contains nonsynthetic tree ["+tree.id+"]") {
+ positionError("Synthetic tree ["+encltree.id+"] contains nonsynthetic tree ["+tree.id+"]") {
reportTree("Enclosing", encltree)
reportTree("Enclosed", tree)
}
if (!(encltree.pos includes tree.pos))
- error("Enclosing tree ["+encltree.id+"] does not include tree ["+tree.id+"]") {
+ positionError("Enclosing tree ["+encltree.id+"] does not include tree ["+tree.id+"]") {
reportTree("Enclosing", encltree)
reportTree("Enclosed", tree)
}
@@ -221,7 +221,7 @@ self: scala.tools.nsc.Global =>
findOverlapping(tree.children flatMap solidDescendants) match {
case List() => ;
case xs => {
- error("Overlapping trees "+xs.map { case (x, y) => (x.id, y.id) }.mkString("", ", ", "")) {
+ positionError("Overlapping trees "+xs.map { case (x, y) => (x.id, y.id) }.mkString("", ", ", "")) {
reportTree("Ancestor", tree)
for((x, y) <- xs) {
reportTree("First overlapping", x)
@@ -253,18 +253,24 @@ self: scala.tools.nsc.Global =>
traverse(root)
this.last
}
+ protected def isEligible(t: Tree) = !t.pos.isTransparent
override def traverse(t: Tree) {
- if (t.pos includes pos) {
- if (!t.pos.isTransparent) last = t
- super.traverse(t)
- } else if (t.symbol != null) {
- for(annot <- t.symbol.annotations if (annot.pos includes pos) && !annot.pos.isTransparent) {
- last = Annotated(TypeTree(annot.atp) setPos annot.pos, t)
- last.setType(annot.atp)
- last.setPos(annot.pos)
- traverseTrees(annot.args)
- }
+ t match {
+ case tt : TypeTree if tt.original != null => traverse(tt.original)
+ case _ =>
+ if (t.pos includes pos) {
+ if (isEligible(t)) last = t
+ super.traverse(t)
+ } else t match {
+ case mdef: MemberDef =>
+ traverseTrees(mdef.mods.annotations)
+ case _ =>
+ }
}
}
}
+
+ class TypedLocator(pos: Position) extends Locator(pos) {
+ override protected def isEligible(t: Tree) = super.isEligible(t) && t.tpe != null
+ }
}
diff --git a/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala b/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala
index bde90bc347..6b74a35518 100644
--- a/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala
+++ b/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2010 LAMP/EPFL
+ * Copyright 2005-2011 LAMP/EPFL
* @author Martin Odersky
*/
@@ -12,8 +12,9 @@ import scala.util.control.Breaks._
import scala.tools.nsc.symtab.Flags
import dependencies._
-import util.FakePos
+import util.{FakePos, ClassPath}
import io.AbstractFile
+import scala.tools.util.PathResolver
/** A more defined build manager, based on change sets. For each
* updated source file, it computes the set of changes to its
@@ -33,9 +34,15 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana
phasesSet += dependencyAnalysis
}
+ override def classPath: ClassPath[_] = new NoSourcePathPathResolver(settings).result
+
def newRun() = new Run()
}
+ class NoSourcePathPathResolver(settings: Settings) extends PathResolver(settings) {
+ override def containers = Calculated.basis.dropRight(1).flatten.distinct
+ }
+
protected def newCompiler(settings: Settings) = new BuilderGlobal(settings)
val compiler = newCompiler(settings)
@@ -101,7 +108,7 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana
private def update(files: Set[AbstractFile]) = {
val coll: mutable.Map[AbstractFile, immutable.Set[AbstractFile]] =
mutable.HashMap[AbstractFile, immutable.Set[AbstractFile]]()
- compiler.reporter.reset
+ compiler.reporter.reset()
// See if we really have corresponding symbols, not just those
// which share the name
@@ -180,6 +187,8 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana
}
update0(files)
+ // remove the current run in order to save some memory
+ compiler.dropRun()
}
// Attempt to break the cycling reference deps as soon as possible and reduce
diff --git a/src/compiler/scala/tools/nsc/interactive/Response.scala b/src/compiler/scala/tools/nsc/interactive/Response.scala
index 56a48e44cd..67bb1633ad 100644
--- a/src/compiler/scala/tools/nsc/interactive/Response.scala
+++ b/src/compiler/scala/tools/nsc/interactive/Response.scala
@@ -62,7 +62,7 @@ class Response[T] {
* When timeout ends, will return last stored provisional result,
* or else None if no provisional result was stored.
*/
- def get(timeout: Long): Option[Either[T, Throwable]] = {
+ def get(timeout: Long): Option[Either[T, Throwable]] = synchronized {
val start = System.currentTimeMillis
var current = start
while (!complete && start + timeout > current) {
@@ -95,3 +95,7 @@ class Response[T] {
cancelled = false
}
}
+
+
+
+
diff --git a/src/compiler/scala/tools/nsc/interactive/RichCompilationUnits.scala b/src/compiler/scala/tools/nsc/interactive/RichCompilationUnits.scala
index 40f9209c1b..993277cad1 100644
--- a/src/compiler/scala/tools/nsc/interactive/RichCompilationUnits.scala
+++ b/src/compiler/scala/tools/nsc/interactive/RichCompilationUnits.scala
@@ -2,14 +2,18 @@ package scala.tools.nsc
package interactive
import scala.tools.nsc.util.{SourceFile, Position, NoPosition}
+import collection.mutable.ArrayBuffer
trait RichCompilationUnits { self: Global =>
/** The status value of a unit that has not yet been loaded */
- final val NotLoaded = -1
+ final val NotLoaded = -2
/** The status value of a unit that has not yet been typechecked */
- final val JustParsed = 0
+ final val JustParsed = -1
+
+ /** The status value of a unit that has been partially typechecked */
+ final val PartiallyChecked = 0
class RichCompilationUnit(source: SourceFile) extends CompilationUnit(source) {
@@ -30,6 +34,9 @@ trait RichCompilationUnits { self: Global =>
/** the current edit point offset */
var editPoint: Int = -1
+ /** The problems reported for this unit */
+ val problems = new ArrayBuffer[Problem]
+
/** The position of a targeted type check
* If this is different from NoPosition, the type checking
* will stop once a tree that contains this position range
@@ -40,5 +47,8 @@ trait RichCompilationUnits { self: Global =>
def targetPos_=(p: Position) { _targetPos = p }
var contexts: Contexts = new Contexts
+
+ /** The last fully type-checked body of this unit */
+ var lastBody: Tree = EmptyTree
}
}
diff --git a/src/compiler/scala/tools/nsc/io/Lexer.scala b/src/compiler/scala/tools/nsc/io/Lexer.scala
new file mode 100644
index 0000000000..262aac7809
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/io/Lexer.scala
@@ -0,0 +1,301 @@
+package scala.tools.nsc.io
+
+import java.io.{Reader, Writer, StringReader, StringWriter}
+import scala.collection.mutable.{Buffer, ArrayBuffer}
+import scala.math.BigInt
+
+/** Companion object of class `Lexer` which defines tokens and some utility concepts
+ * used for tokens and lexers
+ */
+object Lexer {
+
+ /** An exception raised if a if input does not correspond to what's expected
+ * @param rdr the lexer form which the bad input is read
+ * @param msg the error message
+ */
+ class MalformedInput(val rdr: Lexer, val msg: String) extends Exception("Malformed JSON input at "+rdr.tokenPos+": "+msg)
+
+ /** The class of tokens, i.e. descriptions of input words (or: lexemes).
+ * @param str the characters making up this token
+ */
+ class Token(val str: String) {
+ override def toString = str
+ }
+
+ /** A subclass of `Token` representing single-character delimiters
+ * @param char the delimiter character making up this token
+ */
+ case class Delim(char: Char) extends Token("'"+char.toString+"'")
+
+ /** A subclass of token representing integer literals */
+ case class IntLit(override val str: String) extends Token(str)
+
+ /** A subclass of token representaing floating point literals */
+ case class FloatLit(override val str: String) extends Token(str)
+
+ /** A subclass of token represenating string literals */
+ case class StringLit(override val str: String) extends Token(str) {
+ override def toString = quoted(str)
+ }
+
+ /** The `true` token */
+ val TrueLit = new Token("true")
+
+ /** The `false` token */
+ val FalseLit = new Token("false")
+
+ /** The `null` token */
+ val NullLit = new Token("null")
+
+ /** The '`(`' token */
+ val LParen = new Delim('(')
+
+ /** The '`(`' token */
+ val RParen = new Delim(')')
+
+ /** The '`{`' token */
+ val LBrace = new Delim('{')
+
+ /** The '`}`' token */
+ val RBrace = new Delim('}')
+
+ /** The '`[`' token */
+ val LBracket = new Delim('[')
+
+ /** The '`]`' token */
+ val RBracket = new Delim(']')
+
+ /** The '`,`' token */
+ val Comma = new Delim(',')
+
+ /** The '`:`' token */
+ val Colon = new Delim(':')
+
+ /** The token representing end of input */
+ val EOF = new Token("<end of input>")
+
+ private def toUDigit(ch: Int): Char = {
+ val d = ch & 0xF
+ (if (d < 10) d + '0' else d - 10 + 'A').toChar
+ }
+
+ private def addToStr(buf: StringBuilder, ch: Char) {
+ ch match {
+ case '"' => buf ++= "\\\""
+ case '\b' => buf ++= "\\b"
+ case '\f' => buf ++= "\\f"
+ case '\n' => buf ++= "\\n"
+ case '\r' => buf ++= "\\r"
+ case '\t' => buf ++= "\\t"
+ case '\\' => buf ++= "\\\\"
+ case _ =>
+ if (' ' <= ch && ch < 128) buf += ch
+ else buf ++= "\\u" += toUDigit(ch >>> 12) += toUDigit(ch >>> 8) += toUDigit(ch >>> 4) += toUDigit(ch)
+ }
+ }
+
+ /** Returns given string enclosed in `"`-quotes with all string characters escaped
+ * so that they correspond to the JSON standard.
+ * Characters that escaped are: `"`, `\b`, `\f`, `\n`, `\r`, `\t`, `\`.
+ * Furthermore, every other character which is not in the ASCII range 32-127 is
+ * escaped as a four hex-digit unicode character of the form `\ u x x x x`.
+ * @param str the string to be quoted
+ */
+ def quoted(str: String): String = {
+ val buf = new StringBuilder += '\"'
+ str foreach (addToStr(buf, _))
+ buf += '\"'
+ buf.toString
+ }
+
+ private val BUF_SIZE = 2 << 16
+}
+
+import Lexer._
+
+/** A simple lexer for tokens as they are used in JSON, plus parens `(`, `)`
+ * Tokens understood are:
+ *
+ * `(`, `)`, `[`, `]`, `{`, `}`, `:`, `,`, `true`, `false`, `null`,
+ * strings (syntax as in JSON),
+ * integer numbers (syntax as in JSON: -?(0|\d+)
+ * floating point numbers (syntax as in JSON: -?(0|\d+)(\.\d+)?((e|E)(+|-)?\d+)?)
+ * The end of input is represented as its own token, EOF.
+ * Lexers can keep one token lookahead
+ *
+ * @param rd the reader from which characters are read.
+ */
+class Lexer(rd: Reader) {
+
+ /** The last-read character */
+ var ch: Char = 0
+
+ /** The number of characters read so far */
+ var pos: Long = 0
+
+ /** The last-read token */
+ var token: Token = _
+
+ /** The number of characters read before the start of the last-read token */
+ var tokenPos: Long = 0
+
+ private var atEOF: Boolean = false
+ private val buf = new Array[Char](BUF_SIZE)
+ private var nread: Int = 0
+ private var bp = 0
+
+ /** Reads next character into `ch` */
+ def nextChar() {
+ assert(!atEOF)
+ if (bp == nread) {
+ nread = rd.read(buf)
+ bp = 0
+ if (nread <= 0) { ch = 0; atEOF = true; return }
+ }
+ ch = buf(bp)
+ bp += 1
+ pos += 1
+ }
+
+ /** If last-read character equals given character, reads next character,
+ * otherwise raises an error
+ * @param c the given character to compare with last-read character
+ * @throws MalformedInput if character does not match
+ */
+ def acceptChar(c: Char) = if (ch == c) nextChar() else error("'"+c+"' expected")
+
+ private val sb = new StringBuilder
+
+ private def putChar() {
+ sb += ch; nextChar()
+ }
+
+ private def putAcceptString(str: String) {
+ str foreach acceptChar
+ sb ++= str
+ }
+
+ /** Skips whitespace and reads next lexeme into `token`
+ * @throws MalformedInput if lexeme not recognized as a valid token
+ */
+ def nextToken() {
+ sb.clear()
+ while (!atEOF && ch <= ' ') nextChar()
+ tokenPos = pos - 1
+ if (atEOF) token = EOF
+ else ch match {
+ case '(' => putChar(); token = LParen
+ case ')' => putChar(); token = RParen
+ case '{' => putChar(); token = LBrace
+ case '}' => putChar(); token = RBrace
+ case '[' => putChar(); token = LBracket
+ case ']' => putChar(); token = RBracket
+ case ',' => putChar(); token = Comma
+ case ':' => putChar(); token = Colon
+ case 't' => putAcceptString("true"); token = TrueLit
+ case 'f' => putAcceptString("false"); token = FalseLit
+ case 'n' => putAcceptString("null"); token = NullLit
+ case '"' => getString()
+ case '-' | '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' => getNumber()
+ case _ => error("unrecoginezed start of token: '"+ch+"'")
+ }
+ //println("["+token+"]")
+ }
+
+ /** Reads a string literal, and forms a `StringLit` token from it.
+ * Last-read input character `ch` must be opening `"`-quote.
+ * @throws MalformedInput if lexeme not recognized as a string literal.
+ */
+ def getString() {
+ def udigit() = {
+ nextChar()
+ if ('0' <= ch && ch <= '9') ch - '9'
+ else if ('A' <= ch && ch <= 'F') ch - 'A' + 10
+ else if ('a' <= ch && ch <= 'f') ch - 'a' + 10
+ else error("illegal unicode escape character: '"+ch+"'")
+ }
+ val delim = ch
+ nextChar()
+ while (ch != delim && ch >= ' ') {
+ if (ch == '\\') {
+ nextChar()
+ ch match {
+ case '\'' => sb += '\''
+ case '"' => sb += '"'
+ case '\\' => sb += '\\'
+ case '/' => sb += '/'
+ case 'b' => sb += '\b'
+ case 'f' => sb += '\f'
+ case 'n' => sb += '\n'
+ case 'r' => sb += '\r'
+ case 't' => sb += '\t'
+ case 'u' => sb += (udigit() << 12 | udigit() << 8 | udigit() << 4 | udigit()).toChar
+ case _ => error("illegal escape character: '"+ch+"'")
+ }
+ nextChar()
+ } else {
+ putChar()
+ }
+ }
+ acceptChar(delim)
+ token = StringLit(sb.toString)
+ }
+
+ /** Reads a numeric literal, and forms an `IntLit` or `FloatLit` token from it.
+ * Last-read input character `ch` must be either `-` or a digit.
+ * @throws MalformedInput if lexeme not recognized as a numeric literal.
+ */
+ def getNumber() {
+ def digit() =
+ if ('0' <= ch && ch <= '9') putChar()
+ else error("<digit> expected")
+ def digits() =
+ do { digit() } while ('0' <= ch && ch <= '9')
+ var isFloating = false
+ if (ch == '-') putChar()
+ if (ch == '0') digit()
+ else digits()
+ if (ch == '.') {
+ isFloating = true
+ putChar()
+ digits()
+ }
+ if (ch == 'e' || ch == 'E') {
+ isFloating = true
+ putChar()
+ if (ch == '+' || ch == '-') putChar()
+ digits()
+ }
+ token = if (isFloating) FloatLit(sb.toString) else IntLit(sb.toString)
+ }
+
+ /** If current token equals given token, reads next token, otherwise raises an error.
+ * @param t the given token to compare current token with
+ * @throws MalformedInput if the two tokens do not match.
+ */
+ def accept(t: Token) {
+ if (token == t) nextToken()
+ else error(t+" expected, but "+token+" found")
+ }
+
+ /** The current token is a delimiter consisting of given character, reads next token,
+ * otherwise raises an error.
+ * @param c the given delimiter character to compare current token with
+ * @throws MalformedInput if the the current token `token` is not a delimiter, or
+ * consists of a character different from `c`.
+ */
+ def accept(ch: Char) {
+ token match {
+ case Delim(`ch`) => nextToken()
+ case _ => accept(Delim(ch))
+ }
+ }
+
+ /** Always throws a `MalformedInput` exception with given error message.
+ * @param msg the error message
+ */
+ def error(msg: String) = throw new MalformedInput(this, msg)
+
+ nextChar()
+ nextToken()
+}
diff --git a/src/compiler/scala/tools/nsc/io/Pickler.scala b/src/compiler/scala/tools/nsc/io/Pickler.scala
new file mode 100644
index 0000000000..78c1369b64
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/io/Pickler.scala
@@ -0,0 +1,455 @@
+package scala.tools.nsc.io
+
+import annotation.unchecked
+import Lexer._
+import java.io.Writer
+
+/** An abstract class for writing and reading Scala objects to and
+ * from a legible representation. The presesentation follows the folloing grammar:
+ * {{{
+ * Pickled = `true' | `false' | `null' | NumericLit | StringLit |
+ * Labelled | Pickled `,' Pickled
+ * Labelled = StringLit `(' Pickled? `)'
+ * }}}
+ *
+ * All ...Lit classes are as in JSON. @see scala.tools.nsc.io.Lexer
+ *
+ * Subclasses of `Pickler` each can write and read individual classes
+ * of values.
+ *
+ * @param T the type of values handled by this pickler.
+ *
+ * These Picklers build on the work of Andrew Kennedy. They are most closely inspired by
+ * Iulian Dragos' picklers for Scala to XML. See:
+ *
+ * <a href="http://code.google.com/p/gdata-scala-client/wiki/DevelopersGuide">
+ * http://code.google.com/p/gdata-scala-client/wiki/DevelopersGuide
+ * </a>
+ */
+abstract class Pickler[T] {
+
+ import Pickler._
+
+ /** Writes value in pickled form
+ * @param wr the writer to which pickled form is written
+ * @param x the value to write
+ */
+ def pickle(wr: Writer, x: T)
+
+ /** Reads value from pickled form.
+ *
+ * @param rd the lexer from which lexemes are read
+ * @return An `UnpickleSuccess value if the current input corresponds to the
+ * kind of value that is unpickled by the current subclass of `Pickler`,
+ * an `UnpickleFailure` value otherwise.
+ * @throws `Lexer.MalformedInput` if input is invalid, or if
+ * an `Unpickle
+ */
+ def unpickle(rd: Lexer): Unpickled[T]
+
+ /** A pickler representing a `~`-pair of values as two consecutive pickled
+ * strings, separated by a comma.
+ * @param that the second pickler which together with the current pickler makes
+ * up the pair `this ~ that` to be pickled.
+ */
+ def ~ [U] (that: => Pickler[U]): Pickler[T ~ U] = seqPickler(this, that)
+
+ /** A pickler that adds a label to the current pickler, using the representation
+ * `label ( <current pickler> )`
+ *
+ * @label the string to be added as a label.
+ */
+ def labelled(label: String): Pickler[T] = labelledPickler(label, this)
+
+ /** A pickler obtained from the current pickler by a pair of transformer functions
+ * @param in the function that maps values handled by the current pickler to
+ * values handled by the wrapped pickler.
+ * @param out the function that maps values handled by the wrapped pickler to
+ * values handled by the current pickler.
+ */
+ def wrapped [U] (in: T => U)(out: U => T): Pickler[U] = wrappedPickler(this)(in)(out)
+
+ /** A pickler obtained from the current pickler by also admitting `null` as
+ * a handled value, represented as the token `null`.
+ *
+ * @param fromNull an implicit evidence parameter ensuring that the type of values
+ * handled by this pickler contains `null`.
+ */
+ def orNull(implicit fromNull: Null <:< T): Pickler[T] = nullablePickler(this)
+
+ /** A conditional pickler obtained from the current pickler.
+ * @param cond the condition to test to find out whether pickler can handle
+ * some Scala value.
+ */
+ def cond(p: Any => Boolean): CondPickler[T] = conditionalPickler(this, p)
+
+ /** A conditional pickler handling values of some Scala class. It adds the
+ * class name as a label to the representation of the current pickler and
+ * @param c the class of values handled by this pickler.
+ */
+ def asClass[U <: T](c: Class[U]): CondPickler[T] = this.labelled(c.getName).cond(c isInstance _)
+}
+
+object Pickler {
+
+ var picklerDebugMode = false
+
+ /** A base class representing unpickler result. It has two subclasses:
+ * `UnpickleSucess` for successful unpicklings and `UnpickleFailure` for failures,
+ * where a value of the given type `T` could not be unpickled from input.
+ * @param T the type of unpickled values in case of success.
+ */
+ abstract class Unpickled[+T] {
+ /** Transforms success values to success values using given function,
+ * leaves failures alone
+ * @param f the function to apply.
+ */
+ def map[U](f: T => U): Unpickled[U] = this match {
+ case UnpickleSuccess(x) => UnpickleSuccess(f(x))
+ case f: UnpickleFailure => f
+ }
+ /** Transforms success values to successes or failures using given function,
+ * leaves failures alone.
+ * @param f the function to apply.
+ */
+ def flatMap[U](f: T => Unpickled[U]): Unpickled[U] = this match {
+ case UnpickleSuccess(x) => f(x)
+ case f: UnpickleFailure => f
+ }
+ /** Tries alternate expression if current result is a failure
+ * @param alt the alternate expression to be tried in case of failure
+ */
+ def orElse[U >: T](alt: => Unpickled[U]): Unpickled[U] = this match {
+ case UnpickleSuccess(x) => this
+ case f: UnpickleFailure => alt
+ }
+
+ /** Transforms failures into thrown `MalformedInput` exceptions.
+ * @throws MalformedInput if current result is a failure
+ */
+ def requireSuccess: UnpickleSuccess[T] = this match {
+ case s @ UnpickleSuccess(x) => s
+ case f: UnpickleFailure =>
+ throw new MalformedInput(f.rd, "Unrecoverable unpickle failure:\n"+f.errMsg)
+ }
+ }
+
+ /** A class representing successful unpicklings
+ * @param T the type of the unpickled value
+ * @param result the unpickled value
+ */
+ case class UnpickleSuccess[+T](result: T) extends Unpickled[T]
+
+ /** A class representing unpickle failures
+ * @param msg an error message describing what failed.
+ * @param rd the lexer unpickled values were read from (can be used to get
+ * error position, for instance).
+ */
+ class UnpickleFailure(msg: => String, val rd: Lexer) extends Unpickled[Nothing] {
+ def errMsg = msg
+ override def toString = "Failure at "+rd.tokenPos+":\n"+msg
+ }
+
+ private def errorExpected(rd: Lexer, msg: => String) =
+ new UnpickleFailure("expected: "+msg+"\n" +
+ "found : "+rd.token,
+ rd)
+
+ private def nextSuccess[T](rd: Lexer, result: T) = {
+ rd.nextToken()
+ UnpickleSuccess(result)
+ }
+
+ /** The implicit `Pickler` value for type `T`. Equivalent to `implicitly[Pickler[T]]`.
+ */
+ def pkl[T: Pickler] = implicitly[Pickler[T]]
+
+ /** A class represenenting `~`-pairs */
+ case class ~[S, T](fst: S, snd: T)
+
+ /** A wrapper class to be able to use `~` s an infix method */
+ class TildeDecorator[S](x: S) {
+ /** Infix method that forms a `~`-pair. */
+ def ~ [T](y: T): S ~ T = new ~ (x, y)
+ }
+
+ /** An implicit wrapper that adds `~` as a method to any value. */
+ implicit def tildeDecorator[S](x: S): TildeDecorator[S] = new TildeDecorator(x)
+
+ /** A converter from binary functions to functions over `~`-pairs
+ */
+ implicit def fromTilde[T1, T2, R](f: (T1, T2) => R): T1 ~ T2 => R = { case x1 ~ x2 => f(x1, x2) }
+
+ /** An converter from unctions returning Options over pair to functions returning `~`-pairs
+ * The converted function will raise a `MatchError` where the original function returned
+ * a `None`. This converter is useful for turning `unapply` methods of case classes
+ * into wrapper methods that can be passed as second argument to `wrap`.
+ */
+ implicit def toTilde[T1, T2, S](f: S => Option[(T1, T2)]): S => T1 ~ T2 = { x => (f(x): @unchecked) match { case Some((x1, x2)) => x1 ~ x2 } }
+
+ /** Same as `p.labelled(label)`.
+ */
+ def labelledPickler[T](label: String, p: Pickler[T]): Pickler[T] = new Pickler[T] {
+ def pickle(wr: Writer, x: T) = {
+ wr.write(quoted(label));
+ wr.write("(")
+ p.pickle(wr, x)
+ wr.write(")")
+ }
+ def unpickle(rd: Lexer): Unpickled[T] =
+ rd.token match {
+ case StringLit(`label`) =>
+ rd.nextToken()
+ rd.accept('(')
+ val result = p.unpickle(rd).requireSuccess
+ rd.accept(')')
+ result
+ case _ =>
+ errorExpected(rd, quoted(label)+"(...)")
+ }
+ }
+
+ /** Same as `p.wrap(in)(out)`
+ */
+ def wrappedPickler[S, T](p: Pickler[S])(in: S => T)(out: T => S) = new Pickler[T] {
+ def pickle(wr: Writer, x: T) = p.pickle(wr, out(x))
+ def unpickle(rd: Lexer) = p.unpickle(rd) map in
+ }
+
+ /** Same as `p.cond(condition)`
+ */
+ def conditionalPickler[T](p: Pickler[T], condition: Any => Boolean) = new CondPickler[T](condition) {
+ def pickle(wr: Writer, x: T) = p.pickle(wr, x)
+ def unpickle(rd: Lexer) = p.unpickle(rd)
+ }
+
+ /** Same as `p ~ q`
+ */
+ def seqPickler[T, U](p: Pickler[T], q: => Pickler[U]) = new Pickler[T ~ U] {
+ lazy val qq = q
+ def pickle(wr: Writer, x: T ~ U) = {
+ p.pickle(wr, x.fst)
+ wr.write(',')
+ q.pickle(wr, x.snd)
+ }
+ def unpickle(rd: Lexer) =
+ for (x <- p.unpickle(rd); y <- { rd.accept(','); qq.unpickle(rd).requireSuccess })
+ yield x ~ y
+ }
+
+ /** Same as `p | q`
+ */
+ def eitherPickler[T, U <: T, V <: T](p: CondPickler[U], q: => CondPickler[V]) =
+ new CondPickler[T](x => p.canPickle(x) || q.canPickle(x)) {
+ lazy val qq = q
+ override def tryPickle(wr: Writer, x: Any): Boolean =
+ p.tryPickle(wr, x) || qq.tryPickle(wr, x)
+ def pickle(wr: Writer, x: T) =
+ require(tryPickle(wr, x),
+ "no pickler found for "+x+" of class "+x.asInstanceOf[AnyRef].getClass.getName)
+ def unpickle(rd: Lexer) = p.unpickle(rd) orElse qq.unpickle(rd)
+ }
+
+ /** Same as `p.orNull`
+ */
+ def nullablePickler[T](p: Pickler[T])(implicit fromNull: Null <:< T): Pickler[T] = new Pickler[T] {
+ def pickle(wr: Writer, x: T) =
+ if (x == null) wr.write("null") else p.pickle(wr, x)
+ def unpickle(rd: Lexer): Unpickled[T] =
+ if (rd.token == NullLit) nextSuccess(rd, fromNull(null))
+ else p.unpickle(rd)
+ }
+
+ /** A conditional pickler for singleton objects. It represents these
+ * with the object's underlying class as a label.
+ * Example: Object scala.None would be represented as `scala.None$()`.
+ */
+ def singletonPickler[T <: AnyRef](x: T): CondPickler[T] =
+ unitPickler
+ .wrapped { _ => x } { x => () }
+ .labelled (x.getClass.getName)
+ .cond (x eq _.asInstanceOf[AnyRef])
+
+ /** A pickler the handles instances of classes that have an empty constructor.
+ * It represents than as `$new ( <name of class> )`.
+ * When unpickling, a new instance of the class is created using the empty
+ * constructor of the class via `Class.forName(<name of class>).newInstance()`.
+ */
+ def javaInstancePickler[T <: AnyRef]: Pickler[T] =
+ (stringPickler labelled "$new")
+ .wrapped { name => Class.forName(name).newInstance().asInstanceOf[T] } { _.getClass.getName }
+
+ /** A picklers that handles iterators. It pickles all values
+ * returned by an iterator separated by commas.
+ * When unpickling, it always returns an `UnpickleSuccess` containing an iterator.
+ * This iterator returns 0 or more values that are obtained by unpickling
+ * until a closing parenthesis, bracket or brace or the end of input is encountered.
+ *
+ * This means that iterator picklers should not be directly followed by `~`
+ * because the pickler would also read any values belonging to the second
+ * part of the `~`-pair.
+ *
+ * What's usually done instead is that the iterator pickler is wrapped and labelled
+ * to handle other kinds of sequences.
+ */
+ implicit def iterPickler[T: Pickler]: Pickler[Iterator[T]] = new Pickler[Iterator[T]] {
+ lazy val p = pkl[T]
+ def pickle(wr: Writer, xs: Iterator[T]) {
+ var first = true
+ for (x <- xs) {
+ if (first) first = false else wr.write(',')
+ p.pickle(wr, x)
+ }
+ }
+ def unpickle(rd: Lexer): Unpickled[Iterator[T]] = UnpickleSuccess(new Iterator[T] {
+ var first = true
+ def hasNext = {
+ val t = rd.token
+ t != EOF && t != RParen && t != RBrace && t != RBracket
+ }
+ def next(): T = {
+ if (first) first = false else rd.accept(',')
+ p.unpickle(rd).requireSuccess.result
+ }
+ })
+ }
+
+ /** A pickler that handles values that can be represented as a single token.
+ * @param kind the kind of token representing the value, used in error messages
+ * for unpickling.
+ * @param matcher A partial function from tokens to handled values. Unpickling
+ * succeeds if the matcher function is defined on the current token.
+ */
+ private def tokenPickler[T](kind: String)(matcher: PartialFunction[Token, T]) = new Pickler[T] {
+ def pickle(wr: Writer, x: T) = wr.write(x.toString)
+ def unpickle(rd: Lexer) =
+ if (matcher isDefinedAt rd.token) nextSuccess(rd, matcher(rd.token))
+ else errorExpected(rd, kind)
+ }
+
+ /** A pickler for values of type `Long`, represented as integer literals */
+ implicit val longPickler: Pickler[Long] =
+ tokenPickler("integer literal") { case IntLit(s) => s.toLong }
+
+ /** A pickler for values of type `Double`, represented as floating point literals */
+ implicit val doublePickler: Pickler[Double] =
+ tokenPickler("floating point literal") { case FloatLit(s) => s.toDouble }
+
+ /** A pickler for values of type `Byte`, represented as integer literals */
+ implicit val bytePickler: Pickler[Byte] = longPickler.wrapped { _.toByte } { _.toLong }
+
+ /** A pickler for values of type `Short`, represented as integer literals */
+ implicit val shortPickler: Pickler[Short] = longPickler.wrapped { _.toShort } { _.toLong }
+
+ /** A pickler for values of type `Int`, represented as integer literals */
+ implicit val intPickler: Pickler[Int] = longPickler.wrapped { _.toInt } { _.toLong }
+
+ /** A pickler for values of type `Float`, represented as floating point literals */
+ implicit val floatPickler: Pickler[Float] = doublePickler.wrapped { _.toFloat } { _.toLong }
+
+ /** A conditional pickler for the boolean value `true` */
+ private val truePickler =
+ tokenPickler("boolean literal") { case TrueLit => true } cond { _ == true }
+
+ /** A conditional pickler for the boolean value `false` */
+ private val falsePickler =
+ tokenPickler("boolean literal") { case FalseLit => false } cond { _ == false }
+
+ /** A pickler for values of type `Boolean`, represented as the literals `true` or `false`. */
+ implicit def booleanPickler: Pickler[Boolean] = truePickler | falsePickler
+
+ /** A pickler for values of type `Unit`, represented by the empty character string */
+ implicit val unitPickler: Pickler[Unit] = new Pickler[Unit] {
+ def pickle(wr: Writer, x: Unit) {}
+ def unpickle(rd: Lexer): Unpickled[Unit] = UnpickleSuccess(())
+ }
+
+ /** A pickler for values of type `String`, represented as string literals */
+ implicit val stringPickler: Pickler[String] = new Pickler[String] {
+ def pickle(wr: Writer, x: String) = wr.write(if (x == null) "null" else quoted(x))
+ def unpickle(rd: Lexer) = rd.token match {
+ case StringLit(s) => nextSuccess(rd, s)
+ case NullLit => nextSuccess(rd, null)
+ case _ => errorExpected(rd, "string literal")
+ }
+ }
+
+ /** A pickler for values of type `Char`, represented as string literals of length 1 */
+ implicit val charPickler: Pickler[Char] =
+ stringPickler
+ .wrapped { s => require(s.length == 1, "single character string literal expected, but "+quoted(s)+" found"); s(0) } { _.toString }
+
+ /** A pickler for pairs, represented as `~`-pairs */
+ implicit def tuple2Pickler[T1: Pickler, T2: Pickler]: Pickler[(T1, T2)] =
+ (pkl[T1] ~ pkl[T2])
+ .wrapped { case x1 ~ x2 => (x1, x2) } { case (x1, x2) => x1 ~ x2 }
+ .labelled ("tuple2")
+
+ /** A pickler for 3-tuples, represented as `~`-tuples */
+ implicit def tuple3Pickler[T1, T2, T3](implicit p1: Pickler[T1], p2: Pickler[T2], p3: Pickler[T3]): Pickler[(T1, T2, T3)] =
+ (p1 ~ p2 ~ p3)
+ .wrapped { case x1 ~ x2 ~ x3 => (x1, x2, x3) } { case (x1, x2, x3) => x1 ~ x2 ~ x3 }
+ .labelled ("tuple3")
+
+ /** A pickler for 4-tuples, represented as `~`-tuples */
+ implicit def tuple4Pickler[T1, T2, T3, T4](implicit p1: Pickler[T1], p2: Pickler[T2], p3: Pickler[T3], p4: Pickler[T4]): Pickler[(T1, T2, T3, T4)] =
+ (p1 ~ p2 ~ p3 ~ p4)
+ .wrapped { case x1 ~ x2 ~ x3 ~ x4 => (x1, x2, x3, x4) } { case (x1, x2, x3, x4) => x1 ~ x2 ~ x3 ~ x4 }
+ .labelled ("tuple4")
+
+ /** A conditional pickler for the `scala.None` object */
+ implicit val nonePickler = singletonPickler(None)
+
+ /** A conditional pickler for instances of class `scala.Some` */
+ implicit def somePickler[T: Pickler]: CondPickler[Some[T]] =
+ pkl[T]
+ .wrapped { Some(_) } { _.get }
+ .asClass (classOf[Some[T]])
+
+ /** A pickler for optional values */
+ implicit def optionPickler[T: Pickler]: Pickler[Option[T]] = nonePickler | somePickler[T]
+
+ /** A pickler for list values */
+ implicit def listPickler[T: Pickler]: Pickler[List[T]] =
+ iterPickler[T] .wrapped { _.toList } { _.iterator } .labelled ("scala.List")
+
+ /** A pickler for vector values */
+ implicit def vectorPickler[T: Pickler]: Pickler[Vector[T]] =
+ iterPickler[T] .wrapped { Vector() ++ _ } { _.iterator } .labelled ("scala.Vector")
+
+ /** A pickler for array values */
+ implicit def array[T : ClassManifest : Pickler]: Pickler[Array[T]] =
+ iterPickler[T] .wrapped { _.toArray} { _.iterator } .labelled ("scala.Array")
+}
+
+/** A subclass of Pickler can indicate whether a particular value can be pickled by instances
+ * of this class.
+ * @param canPickle The predicate that indicates whether a given value
+ * can be pickled by instances of this class.
+ */
+abstract class CondPickler[T](val canPickle: Any => Boolean) extends Pickler[T] {
+ import Pickler._
+
+ /** Pickles given value `x` if possible, as indicated by `canPickle(x)`.
+ */
+ def tryPickle(wr: Writer, x: Any): Boolean = {
+ val result = canPickle(x)
+ if (result) pickle(wr, x.asInstanceOf[T])
+ result
+ }
+
+ /** A pickler obtained from this pickler and an alternative pickler.
+ * To pickle a value, this pickler is tried first. If it cannot handle
+ * the object (as indicated by its `canPickle` test), then the
+ * alternative pickler is tried.
+ * To unpickle a value, this unpickler is tried first. If it cannot read
+ * the input (as indicated by a `UnpickleFailure` result), then the
+ * alternative pickler is tried.
+ * @param V The handled type of the returned pickler.
+ * @param U The handled type of the alternative pickler.
+ * @param that The alternative pickler.
+ */
+ def | [V >: T, U <: V] (that: => CondPickler[U]): CondPickler[V] =
+ eitherPickler[V, T, U](this, that)
+}
+
diff --git a/src/compiler/scala/tools/nsc/io/PrettyWriter.scala b/src/compiler/scala/tools/nsc/io/PrettyWriter.scala
new file mode 100644
index 0000000000..acd4847469
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/io/PrettyWriter.scala
@@ -0,0 +1,41 @@
+package scala.tools.nsc.io
+
+import java.io.Writer
+
+class PrettyWriter(wr: Writer) extends Writer {
+ protected val indentStep = " "
+ private var indent = 0
+ private def newLine() {
+ wr.write('\n')
+ wr.write(indentStep * indent)
+ }
+ def close() = wr.close()
+ def flush() = wr.flush()
+ def write(str: Array[Char], off: Int, len: Int): Unit = {
+ if (off < str.length && off < len) {
+ str(off) match {
+ case '{' | '[' | '(' =>
+ indent += 1
+ wr.write(str(off))
+ newLine()
+ wr.write(str, off + 1, len - 1)
+ case '}' | ']' | ')' =>
+ wr.write(str, off, len)
+ indent -= 1
+ case ',' =>
+ wr.write(',')
+ newLine()
+ wr.write(str, off + 1, len - 1)
+ case ':' =>
+ wr.write(':')
+ wr.write(' ')
+ wr.write(str, off + 1, len - 1)
+ case _ =>
+ wr.write(str, off, len)
+ }
+ } else {
+ wr.write(str, off, len)
+ }
+ }
+ override def toString = wr.toString
+}
diff --git a/src/compiler/scala/tools/nsc/io/Replayer.scala b/src/compiler/scala/tools/nsc/io/Replayer.scala
new file mode 100644
index 0000000000..5cb61b6cb1
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/io/Replayer.scala
@@ -0,0 +1,74 @@
+package scala.tools.nsc.io
+
+import java.io.{Reader, Writer}
+
+import Pickler._
+import Lexer.{Token, EOF}
+
+abstract class LogReplay {
+ def logreplay(event: String, x: => Boolean): Boolean
+ def logreplay[T: Pickler](event: String, x: => Option[T]): Option[T]
+ def close()
+ def flush()
+}
+
+class Logger(wr0: Writer) extends LogReplay {
+ val wr = new PrettyWriter(wr0)
+ private var first = true
+ private def insertComma() = if (first) first = false else wr.write(",")
+
+ def logreplay(event: String, x: => Boolean) = {
+ val xx = x
+ if (xx) { insertComma(); pkl[Unit].labelled(event).pickle(wr, ()) }
+ xx
+ }
+ def logreplay[T: Pickler](event: String, x: => Option[T]) = {
+ val xx = x
+ xx match {
+ case Some(y) => insertComma(); pkl[T].labelled(event).pickle(wr, y)
+ case None =>
+ }
+ xx
+ }
+ def close() { wr.close() }
+ def flush() { wr.flush() }
+}
+
+object NullLogger extends LogReplay {
+ def logreplay(event: String, x: => Boolean) = x
+ def logreplay[T: Pickler](event: String, x: => Option[T]) = x
+ def close() {}
+ def flush() {}
+}
+
+class Replayer(raw: Reader) extends LogReplay {
+ private val rd = new Lexer(raw)
+ private var nextComma = false
+
+ private def eatComma() =
+ if (nextComma) { rd.accept(','); nextComma = false }
+
+ def logreplay(event: String, x: => Boolean) =
+ if (rd.token == EOF) NullLogger.logreplay(event, x)
+ else {
+ eatComma()
+ pkl[Unit].labelled(event).unpickle(rd) match {
+ case UnpickleSuccess(_) => nextComma = true; true
+ case _ => false
+ }
+ }
+
+ def logreplay[T: Pickler](event: String, x: => Option[T]) =
+ if (rd.token == EOF) NullLogger.logreplay(event, x)
+ else {
+ eatComma()
+ pkl[T].labelled(event).unpickle(rd) match {
+ case UnpickleSuccess(y) => nextComma = true; Some(y)
+ case _ => None
+ }
+ }
+
+ def close() { raw.close() }
+ def flush() {}
+}
+
diff --git a/src/compiler/scala/tools/nsc/reporters/Reporter.scala b/src/compiler/scala/tools/nsc/reporters/Reporter.scala
index 8dcb5183bb..cb8ef01ddd 100644
--- a/src/compiler/scala/tools/nsc/reporters/Reporter.scala
+++ b/src/compiler/scala/tools/nsc/reporters/Reporter.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2002-2010 LAMP/EPFL
+ * Copyright 2002-2011 LAMP/EPFL
* @author Martin Odersky
*/
@@ -22,7 +22,7 @@ abstract class Reporter {
val WARNING = new Severity(1)
val ERROR = new Severity(2)
- def reset {
+ def reset() {
INFO.count = 0
ERROR.count = 0
WARNING.count = 0
@@ -51,6 +51,19 @@ abstract class Reporter {
}
}
+ /** Whether very long lines can be truncated. This exists so important
+ * debugging information (like printing the classpath) is not rendered
+ * invisible due to the max message length.
+ */
+ private var _truncationOK: Boolean = true
+ def truncationOK = _truncationOK
+ def withoutTruncating[T](body: => T): T = {
+ val saved = _truncationOK
+ _truncationOK = false
+ try body
+ finally _truncationOK = saved
+ }
+
def info(pos: Position, msg: String, force: Boolean) { info0(pos, msg, INFO, force) }
def warning(pos: Position, msg: String ) { info0(pos, msg, WARNING, false) }
def error(pos: Position, msg: String ) { info0(pos, msg, ERROR, false) }
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
index b175cb24ee..51401304b7 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
@@ -151,9 +151,14 @@ trait ScalaSettings extends AbsScalaSettings with StandardScalaSettings {
val Ywarndeadcode = BooleanSetting ("-Ywarn-dead-code", "Emit warnings for dead code")
/**
- * "fsc-specific" settings.
+ * IDE-specific settings
*/
- val fscShutdown = BooleanSetting ("-shutdown", "Shutdown the fsc daemon")
+ val YpresentationVerbose = BooleanSetting("-Ypresentation-verbose", "Print information about presentation compiler tasks.")
+ val YpresentationDebug = BooleanSetting("-Ypresentation-debug", "Enable debugging output for the presentation compiler.")
+
+ val YpresentationLog = StringSetting("-Ypresentation-log", "file", "Log presentation compiler events into file", "")
+ val YpresentationReplay = StringSetting("-Ypresentation-replay", "file", "Replay presentation compiler events from file", "")
+ val YpresentationDelay = IntSetting("-Ypresentation-delay", "Wait number of ms after typing before starting typechecking", 0, Some(0, 999), str => Some(str.toInt))
/**
* -P "Plugin" settings
diff --git a/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala b/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala
new file mode 100644
index 0000000000..b024d550b7
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala
@@ -0,0 +1,118 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2011 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools.nsc
+package symtab
+
+import scala.tools.nsc.io.AbstractFile
+import scala.tools.nsc.util.BatchSourceFile
+
+/** A subclass of SymbolLoaders that implements browsing behavior.
+ * This class should be used whenever file dependencies and recompile sets
+ * are managed automoatically.
+ */
+abstract class BrowsingLoaders extends SymbolLoaders {
+ import global._
+
+ import syntaxAnalyzer.{OutlineParser, MalformedInput}
+
+ /** In browse mode, it can happen that an encountered symbol is already
+ * present. For instance, if the source file has a name different from
+ * the classes and objects it contains, the symbol loader will always
+ * reparse the source file. The symbols it encounters might already be loaded
+ * as class files. In this case we return the one which has a sourcefile
+ * (and the other has not), and issue an error if both have sourcefiles.
+ */
+ override protected def enterIfNew(owner: Symbol, member: Symbol, completer: SymbolLoader): Symbol = {
+ completer.sourcefile match {
+ case Some(src) =>
+ (if (member.isModule) member.moduleClass else member).sourceFile = src
+ case _ =>
+ }
+ val decls = owner.info.decls
+ val existing = decls.lookup(member.name)
+ if (existing == NoSymbol) {
+ decls enter member
+ member
+ } else if (existing.sourceFile == null) {
+ decls unlink existing
+ decls enter member
+ member
+ } else {
+ if (member.sourceFile != null) {
+ if (existing.sourceFile != member.sourceFile)
+ error(member+"is defined twice,"+
+ "\n in "+existing.sourceFile+
+ "\n and also in "+member.sourceFile)
+ }
+ existing
+ }
+ }
+
+ /** Browse the top-level of given abstract file `src` and enter
+ * eny encountered top-level classes and modules in `root`
+ */
+ def browseTopLevel(root: Symbol, src: AbstractFile) {
+
+ class BrowserTraverser extends Traverser {
+ var packagePrefix = ""
+ var entered = 0
+ def addPackagePrefix(pkg: Tree): Unit = pkg match {
+ case Select(pre, name) =>
+ addPackagePrefix(pre)
+ packagePrefix += ("." + name)
+ case Ident(name) =>
+ if (packagePrefix.length != 0) packagePrefix += "."
+ packagePrefix += name
+ case _ =>
+ throw new MalformedInput(pkg.pos.point, "illegal tree node in package prefix: "+pkg)
+ }
+ override def traverse(tree: Tree): Unit = tree match {
+ case PackageDef(pkg, body) =>
+ addPackagePrefix(pkg)
+ body foreach traverse
+ case ClassDef(_, name, _, _) =>
+ if (packagePrefix == root.fullName) {
+ enterClass(root, name.toString, new SourcefileLoader(src))
+ entered += 1
+ } else println("prefixes differ: "+packagePrefix+","+root.fullName)
+ case ModuleDef(_, name, _) =>
+ if (packagePrefix == root.fullName) {
+ val module = enterModule(root, name.toString, new SourcefileLoader(src))
+ entered += 1
+ if (name == nme.PACKAGEkw) {
+ println("open package module: "+module)
+ loaders.openPackageModule(module)()
+ }
+ } else println("prefixes differ: "+packagePrefix+","+root.fullName)
+ case _ =>
+ }
+ }
+
+// System.out.println("Browsing "+src)
+ val source = new BatchSourceFile(src)
+ val body = new OutlineParser(source).parse()
+// System.out.println(body)
+ val browser = new BrowserTraverser
+ browser.traverse(body)
+ if (browser.entered == 0)
+ warning("No classes or objects found in "+source+" that go in "+root)
+ }
+
+ /** Enter top-level symbols from a source file
+ */
+ override def enterToplevelsFromSource(root: Symbol, name: String, src: AbstractFile) {
+ try {
+ if (root == definitions.RootClass || root == definitions.EmptyPackageClass)
+ super.enterToplevelsFromSource(root, name, src)
+ else
+ browseTopLevel(root, src)
+ } catch {
+ case ex: syntaxAnalyzer.MalformedInput =>
+ println("caught malformed input exception at offset "+ex.offset+": "+ex.msg)
+ super.enterToplevelsFromSource(root, name, src)
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
index a95c8ada8f..741aaa4718 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
@@ -1,22 +1,19 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2010 LAMP/EPFL
+ * Copyright 2005-2011 LAMP/EPFL
* @author Martin Odersky
*/
package scala.tools.nsc
package symtab
-import java.io.{File, IOException}
+import java.io.IOException
+import ch.epfl.lamp.compiler.msil.{ Type => MSILType, Attribute => MSILAttribute }
-import ch.epfl.lamp.compiler.msil.{Type => MSILType, Attribute => MSILAttribute}
-
-import scala.collection.mutable.{HashMap, HashSet, ListBuffer}
import scala.compat.Platform.currentTime
import scala.tools.nsc.io.AbstractFile
-import scala.tools.nsc.util.{ ClassPath, JavaClassPath }
+import scala.tools.nsc.util.{ ClassPath }
import classfile.ClassfileParser
import Flags._
-
import util.Statistics._
/** This class ...
@@ -28,6 +25,59 @@ abstract class SymbolLoaders {
val global: Global
import global._
+ protected def enterIfNew(owner: Symbol, member: Symbol, completer: SymbolLoader): Symbol = {
+ assert(owner.info.decls.lookup(member.name) == NoSymbol, owner.fullName + "." + member.name)
+ owner.info.decls enter member
+ member
+ }
+
+ private def realOwner(root: Symbol): Symbol = {
+ if (root.isRoot) definitions.EmptyPackageClass else root
+ }
+
+ /** Enter class with given `name` into scope of `root`
+ * and give them `completer` as type.
+ */
+ def enterClass(root: Symbol, name: String, completer: SymbolLoader): Symbol = {
+ val owner = realOwner(root)
+ val clazz = owner.newClass(NoPosition, newTypeName(name))
+ clazz setInfo completer
+ enterIfNew(owner, clazz, completer)
+ }
+
+ /** Enter module with given `name` into scope of `root`
+ * and give them `completer` as type.
+ */
+ def enterModule(root: Symbol, name: String, completer: SymbolLoader): Symbol = {
+ val owner = realOwner(root)
+ val module = owner.newModule(NoPosition, newTermName(name))
+ module setInfo completer
+ module.moduleClass setInfo moduleClassLoader
+ enterIfNew(owner, module, completer)
+ }
+
+ /** Enter class and module with given `name` into scope of `root`
+ * and give them `completer` as type.
+ */
+ def enterClassAndModule(root: Symbol, name: String, completer: SymbolLoader) {
+ val clazz = enterClass(root, name, completer)
+ val module = enterModule(root, name, completer)
+ if (!clazz.isAnonymousClass) {
+ assert(clazz.companionModule == module, module)
+ assert(module.companionClass == clazz, clazz)
+ }
+ }
+
+ /** In batch mode: Enter class and module with given `name` into scope of `root`
+ * and give them a source completer for given `src` as type.
+ * In IDE mode: Find all toplevel definitions in `src` and enter then into scope of `root`
+ * with source completer for given `src` as type.
+ * (overridden in interactive.Global).
+ */
+ def enterToplevelsFromSource(root: Symbol, name: String, src: AbstractFile) {
+ enterClassAndModule(root, name, new SourcefileLoader(src))
+ }
+
/**
* A lazy type that completes itself by calling parameter doComplete.
* Any linked modules/classes or module classes are also initialized.
@@ -37,7 +87,7 @@ abstract class SymbolLoaders {
/** Load source or class file for `root', return */
protected def doComplete(root: Symbol): Unit
- protected def sourcefile: Option[AbstractFile] = None
+ def sourcefile: Option[AbstractFile] = None
/**
* Description of the resource (ClassPath, AbstractFile, MSILType)
@@ -79,13 +129,17 @@ abstract class SymbolLoaders {
override def load(root: Symbol) { complete(root) }
+ private def markAbsent(sym: Symbol): Unit = {
+ val tpe: Type = if (ok) NoType else ErrorType
+
+ if (sym != NoSymbol)
+ sym setInfo tpe
+ }
private def initRoot(root: Symbol) {
- if (root.rawInfo == this) {
- def markAbsent(sym: Symbol) =
- if (sym != NoSymbol) sym.setInfo(if (ok) NoType else ErrorType);
- markAbsent(root)
- markAbsent(root.moduleClass)
- } else if (root.isClass && !root.isModuleClass) root.rawInfo.load(root)
+ if (root.rawInfo == this)
+ List(root, root.moduleClass) foreach markAbsent
+ else if (root.isClass && !root.isModuleClass)
+ root.rawInfo.load(root)
}
}
@@ -106,21 +160,6 @@ abstract class SymbolLoaders {
root.info.decls.enter(pkg)
}
- def enterClassAndModule(root: Symbol, name: String, completer: SymbolLoader) {
- val owner = if (root.isRoot) definitions.EmptyPackageClass else root
- val className = newTermName(name)
- assert(owner.info.decls.lookup(name) == NoSymbol, owner.fullName + "." + name)
- val clazz = owner.newClass(NoPosition, name.toTypeName)
- val module = owner.newModule(NoPosition, name)
- clazz setInfo completer
- module setInfo completer
- module.moduleClass setInfo moduleClassLoader
- owner.info.decls enter clazz
- owner.info.decls enter module
- assert(clazz.companionModule == module, module)
- assert(module.companionClass == clazz, clazz)
- }
-
/**
* Tells whether a class with both a binary and a source representation
* (found in classpath and in sourcepath) should be re-compiled. Behaves
@@ -147,15 +186,15 @@ abstract class SymbolLoaders {
val sourcepaths = classpath.sourcepaths
for (classRep <- classpath.classes if doLoad(classRep)) {
- if (classRep.binary.isDefined && classRep.source.isDefined) {
- val (bin, src) = (classRep.binary.get, classRep.source.get)
- val loader = if (needCompile(bin, src)) new SourcefileLoader(src)
- else newClassLoader(bin)
- enterClassAndModule(root, classRep.name, loader)
- } else if (classRep.binary.isDefined) {
- enterClassAndModule(root, classRep.name, newClassLoader(classRep.binary.get))
- } else if (classRep.source.isDefined) {
- enterClassAndModule(root, classRep.name, new SourcefileLoader(classRep.source.get))
+ ((classRep.binary, classRep.source) : @unchecked) match {
+ case (Some(bin), Some(src)) if needCompile(bin, src) =>
+ if (settings.verbose.value) inform("[symloader] picked up newer source file for " + src.path)
+ enterToplevelsFromSource(root, classRep.name, src)
+ case (None, Some(src)) =>
+ if (settings.verbose.value) inform("[symloader] no class, picked up source file for " + src.path)
+ enterToplevelsFromSource(root, classRep.name, src)
+ case (Some(bin), _) =>
+ enterClassAndModule(root, classRep.name, newClassLoader(bin))
}
}
@@ -247,7 +286,7 @@ abstract class SymbolLoaders {
classfileParser.parse(classfile, root)
stopTimer(classReadNanos, start)
}
- override protected def sourcefile = classfileParser.srcfile
+ override def sourcefile = classfileParser.srcfile
}
class MSILTypeLoader(typ: MSILType) extends SymbolLoader {
@@ -261,7 +300,7 @@ abstract class SymbolLoaders {
class SourcefileLoader(val srcfile: AbstractFile) extends SymbolLoader {
protected def description = "source file "+ srcfile.toString
- override protected def sourcefile = Some(srcfile)
+ override def sourcefile = Some(srcfile)
protected def doComplete(root: Symbol): Unit = global.currentRun.compileLate(srcfile)
}
diff --git a/src/compiler/scala/tools/nsc/symtab/Symbols.scala b/src/compiler/scala/tools/nsc/symtab/Symbols.scala
index 32931dde1b..046c5c596a 100644
--- a/src/compiler/scala/tools/nsc/symtab/Symbols.scala
+++ b/src/compiler/scala/tools/nsc/symtab/Symbols.scala
@@ -25,7 +25,10 @@ trait Symbols extends reflect.generic.Symbols { self: SymbolTable =>
val emptySymbolArray = new Array[Symbol](0)
/** Used for deciding in the IDE whether we can interrupt the compiler */
- protected var activeLocks = 0
+ //protected var activeLocks = 0
+
+ /** Used for debugging only */
+ //protected var lockedSyms = collection.immutable.Set[Symbol]()
/** Used to keep track of the recursion depth on locked symbols */
private var recursionTable = Map.empty[Symbol, Int]
@@ -312,7 +315,6 @@ trait Symbols extends reflect.generic.Symbols { self: SymbolTable =>
final def newAnonymousClass(pos: Position) =
newClass(pos, nme.ANON_CLASS_NAME.toTypeName)
-
final def newAnonymousFunctionClass(pos: Position) =
newClass(pos, nme.ANON_FUN_NAME.toTypeName)
@@ -369,14 +371,16 @@ trait Symbols extends reflect.generic.Symbols { self: SymbolTable =>
} else { handler }
} else {
rawflags |= LOCKED
- activeLocks += 1
+// activeLocks += 1
+// lockedSyms += this
}
}
// Unlock a symbol
def unlock() = {
if ((rawflags & LOCKED) != 0L) {
- activeLocks -= 1
+// activeLocks -= 1
+// lockedSyms -= this
rawflags = rawflags & ~LOCKED
if (settings.Yrecursion.value != 0)
recursionTable -= this
@@ -512,7 +516,6 @@ trait Symbols extends reflect.generic.Symbols { self: SymbolTable =>
final def isCaseApplyOrUnapply =
isMethod && hasFlag(CASE) && hasFlag(SYNTHETIC)
-
/** Is this symbol a trait which needs an implementation class? */
final def needsImplClass: Boolean =
isTrait && (!hasFlag(INTERFACE) || hasFlag(lateINTERFACE)) && !isImplClass
@@ -727,7 +730,8 @@ trait Symbols extends reflect.generic.Symbols { self: SymbolTable =>
}
} else {
rawflags |= LOCKED
- activeLocks += 1
+// activeLocks += 1
+ // lockedSyms += this
}
val current = phase
try {
@@ -1214,23 +1218,19 @@ trait Symbols extends reflect.generic.Symbols { self: SymbolTable =>
/** Is this symbol defined in the same scope and compilation unit as `that' symbol?
*/
def isCoDefinedWith(that: Symbol) =
- (this.rawInfo ne NoType) && {
- val res =
- !this.owner.isPackageClass ||
- (this.sourceFile eq null) ||
- (that.sourceFile eq null) ||
- (this.sourceFile eq that.sourceFile) ||
- (this.sourceFile == that.sourceFile)
-
- // recognize companion object in separate file and fail, else compilation
- // appears to succeed but highly opaque errors come later: see bug #1286
- if (res == false) {
- val (f1, f2) = (this.sourceFile, that.sourceFile)
- if (f1 != null && f2 != null && f1.path != f2.path)
- throw FatalError("Companions '" + this + "' and '" + that + "' must be defined in same file.")
+ (this.rawInfo ne NoType) &&
+ (this.owner == that.owner) && {
+ !this.owner.isPackageClass ||
+ (this.sourceFile eq null) ||
+ (that.sourceFile eq null) ||
+ (this.sourceFile == that.sourceFile) || {
+ // recognize companion object in separate file and fail, else compilation
+ // appears to succeed but highly opaque errors come later: see bug #1286
+ if (this.sourceFile.path != that.sourceFile.path)
+ throw InvalidCompanions(this, that)
+
+ false
}
-
- res
}
/** @PP: Added diagram because every time I come through here I end up
@@ -2033,7 +2033,16 @@ trait Symbols extends reflect.generic.Symbols { self: SymbolTable =>
sourceModule = module
}
override def sourceModule = module
- lazy val implicitMembers = info.implicitMembers
+ private var implicitMembersCacheValue: List[Symbol] = List()
+ private var implicitMembersCacheKey: Type = NoType
+ def implicitMembers: List[Symbol] = {
+ val tp = info
+ if (implicitMembersCacheKey ne tp) {
+ implicitMembersCacheKey = tp
+ implicitMembersCacheValue = tp.implicitMembers
+ }
+ implicitMembersCacheValue
+ }
override def sourceModule_=(module: Symbol) { this.module = module }
}
@@ -2083,6 +2092,11 @@ trait Symbols extends reflect.generic.Symbols { self: SymbolTable =>
// printStackTrace() // debug
}
+ case class InvalidCompanions(sym1: Symbol, sym2: Symbol)
+ extends Throwable("Companions '" + sym1 + "' and '" + sym2 + "' must be defined in same file") {
+ override def toString = getMessage
+ }
+
/** A class for type histories */
private sealed case class TypeHistory(var validFrom: Period, info: Type, prev: TypeHistory) {
assert((prev eq null) || phaseId(validFrom) > phaseId(prev.validFrom), this)
diff --git a/src/compiler/scala/tools/nsc/symtab/Types.scala b/src/compiler/scala/tools/nsc/symtab/Types.scala
index a7f8db28b2..5a5244123b 100644
--- a/src/compiler/scala/tools/nsc/symtab/Types.scala
+++ b/src/compiler/scala/tools/nsc/symtab/Types.scala
@@ -8,7 +8,9 @@ package scala.tools.nsc
package symtab
import scala.collection.immutable
-import scala.collection.mutable.{ListBuffer, HashMap, WeakHashMap}
+import scala.ref.WeakReference
+import scala.collection.mutable
+import scala.collection.mutable.{ListBuffer, HashMap}
import ast.TreeGen
import util.{HashSet, Position, NoPosition}
import util.Statistics._
@@ -106,7 +108,7 @@ trait Types extends reflect.generic.Types { self: SymbolTable =>
}
private[Types] def record(tv: TypeVar) = {log = (tv, tv.constr.cloneInternal) :: log}
- private[Types] def clear {log = List()}
+ private[nsc] def clear() {log = List()}
// `block` should not affect constraints on typevars
def undo[T](block: => T): T = {
@@ -137,7 +139,7 @@ trait Types extends reflect.generic.Types { self: SymbolTable =>
* It makes use of the fact that these two operations depend only on the parents,
* not on the refinement.
*/
- val intersectionWitness = new WeakHashMap[List[Type], Type]
+ val intersectionWitness = new mutable.WeakHashMap[List[Type], WeakReference[Type]]
private object gen extends {
val global : Types.this.type = Types.this
@@ -1293,12 +1295,21 @@ trait Types extends reflect.generic.Types { self: SymbolTable =>
baseClassesCache
}
- def memo[A](op1: => A)(op2: Type => A) = intersectionWitness get parents match {
- case Some(w) =>
- if (w eq this) op1 else op2(w)
- case none =>
- intersectionWitness(parents) = this
+ def memo[A](op1: => A)(op2: Type => A): A = {
+ def updateCache(): A = {
+ intersectionWitness(parents) = new WeakReference(this)
op1
+ }
+
+ intersectionWitness get parents match {
+ case Some(ref) =>
+ ref.get match {
+ case Some(w) => if (w eq this) op1 else op2(w)
+ case None => updateCache()
+ }
+ case None => updateCache()
+ }
+
}
override def baseType(sym: Symbol): Type = {
@@ -3172,10 +3183,19 @@ A type's typeSymbol should never be inspected directly.
* in ClassFileparser.sigToType (where it is usually done)
*/
object rawToExistential extends TypeMap {
+ private var expanded = immutable.Set[Symbol]()
def apply(tp: Type): Type = tp match {
case TypeRef(pre, sym, List()) if isRawIfWithoutArgs(sym) =>
- val eparams = typeParamsToExistentials(sym, sym.typeParams)
- existentialAbstraction(eparams, TypeRef(pre, sym, eparams map (_.tpe)))
+ if (expanded contains sym) AnyRefClass.tpe
+ else try {
+ expanded += sym
+ val eparams = mapOver(typeParamsToExistentials(sym, sym.typeParams))
+ existentialAbstraction(eparams, typeRef(apply(pre), sym, eparams map (_.tpe)))
+ } finally {
+ expanded -= sym
+ }
+ case ExistentialType(_, _) => // stop to avoid infinite expansions
+ tp
case _ =>
mapOver(tp)
}
@@ -3704,41 +3724,71 @@ A type's typeSymbol should never be inspected directly.
object adaptToNewRunMap extends TypeMap {
private def adaptToNewRun(pre: Type, sym: Symbol): Symbol = {
- if (sym.isModuleClass && !phase.flatClasses) {
- adaptToNewRun(pre, sym.sourceModule).moduleClass
- } else if ((pre eq NoPrefix) || (pre eq NoType) || sym.owner.isPackageClass) {
+ if (phase.flatClasses) {
+ sym
+ } else if (sym.isModuleClass) {
+ val adaptedSym = adaptToNewRun(pre, sym.sourceModule)
+ // Handle nested objects properly
+ val result0 = if (adaptedSym.isLazy) adaptedSym.lazyAccessor else adaptedSym.moduleClass
+ val result = if (result0 == NoSymbol)
+ // The only possible way we got here is when
+ // object is defined inside the method and unfortunately
+ // we have no way of retrieving that information (and using it)
+ // at this point, so just use the old symbol.
+ // This also means that sym.sourceModule == adaptedSym since
+ // pre == NoPrefix. see #4215
+ sym
+ else result0
+
+ result
+ } else if ((pre eq NoPrefix) || (pre eq NoType) || sym.isPackageClass) {
sym
} else {
var rebind0 = pre.findMember(sym.name, BRIDGE, 0, true)
if (rebind0 == NoSymbol) {
if (sym.isAliasType) throw missingAliasException
- throw new MissingTypeControl // For build manager purposes
+ if (settings.debug.value) println(pre+"."+sym+" does no longer exist, phase = "+phase)
+ throw new MissingTypeControl // For build manager and presentation compiler purposes
//assert(false, pre+"."+sym+" does no longer exist, phase = "+phase)
}
/** The two symbols have the same fully qualified name */
def corresponds(sym1: Symbol, sym2: Symbol): Boolean =
sym1.name == sym2.name && (sym1.isPackageClass || corresponds(sym1.owner, sym2.owner))
if (!corresponds(sym.owner, rebind0.owner)) {
- if (settings.debug.value) Console.println("ADAPT1 pre = "+pre+", sym = "+sym+sym.locationString+", rebind = "+rebind0+rebind0.locationString)
+ if (settings.debug.value)
+ log("ADAPT1 pre = "+pre+", sym = "+sym+sym.locationString+", rebind = "+rebind0+rebind0.locationString)
val bcs = pre.baseClasses.dropWhile(bc => !corresponds(bc, sym.owner));
if (bcs.isEmpty)
assert(pre.typeSymbol.isRefinementClass, pre) // if pre is a refinementclass it might be a structural type => OK to leave it in.
else
rebind0 = pre.baseType(bcs.head).member(sym.name)
- if (settings.debug.value) Console.println("ADAPT2 pre = "+pre+", bcs.head = "+bcs.head+", sym = "+sym+sym.locationString+", rebind = "+rebind0+(if (rebind0 == NoSymbol) "" else rebind0.locationString))
+ if (settings.debug.value) log(
+ "ADAPT2 pre = " + pre +
+ ", bcs.head = " + bcs.head +
+ ", sym = " + sym+sym.locationString +
+ ", rebind = " + rebind0 + (
+ if (rebind0 == NoSymbol) ""
+ else rebind0.locationString
+ )
+ )
}
val rebind = rebind0.suchThat(sym => sym.isType || sym.isStable)
if (rebind == NoSymbol) {
- if (settings.debug.value) Console.println("" + phase + " " +phase.flatClasses+sym.owner+sym.name+" "+sym.isType)
+ if (settings.debug.value) log("" + phase + " " +phase.flatClasses+sym.owner+sym.name+" "+sym.isType)
throw new MalformedType(pre, sym.nameString)
}
rebind
}
}
def apply(tp: Type): Type = tp match {
- case ThisType(sym) if (sym.isModuleClass) =>
- val sym1 = adaptToNewRun(sym.owner.thisType, sym)
- if (sym1 == sym) tp else ThisType(sym1)
+ case ThisType(sym) =>
+ try {
+ val sym1 = adaptToNewRun(sym.owner.thisType, sym)
+ if (sym1 == sym) tp else ThisType(sym1)
+ } catch {
+ case ex: MissingTypeControl =>
+ tp
+ }
case SingleType(pre, sym) =>
if (sym.isPackage) tp
else {
@@ -3760,7 +3810,7 @@ A type's typeSymbol should never be inspected directly.
case ex: MissingAliasControl =>
apply(tp.dealias)
case _: MissingTypeControl =>
- NoType
+ tp
}
}
case MethodType(params, restp) =>
diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
index 80f833f03d..23553e6879 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2010 LAMP/EPFL
+ * Copyright 2005-2011 LAMP/EPFL
* @author Martin Odersky
*/
@@ -48,6 +48,7 @@ trait Analyzer extends AnyRef
val runsRightAfter= Some("namer")
def newPhase(_prev: Phase): StdPhase = new StdPhase(_prev) {
+ override val checkable = false
import global._
val openPackageObjectsTraverser = new Traverser {
@@ -74,10 +75,17 @@ trait Analyzer extends AnyRef
val runsRightAfter = Some("packageobjects")
def newPhase(_prev: Phase): StdPhase = new StdPhase(_prev) {
override def keepsTypeParams = false
- resetTyper() // this does not in fact to the reset for each compilation run!
+ resetTyper()
+ // the log accumulates entries over time, even though it should not (Adriaan, Martin said so).
+ // Lacking a better fix, we clear it here (before the phase is created, meaning for each
+ // compiler run). This is good enough for the resident compiler, which was the most affected.
+ undoLog.clear()
override def run {
val start = startTimer(typerNanos)
currentRun.units foreach applyPhase
+ undoLog.clear()
+ // need to clear it after as well or 10K+ accumulated entries are
+ // uncollectable the rest of the way.
stopTimer(typerNanos, start)
}
def apply(unit: CompilationUnit) {
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index 900f7e471b..16ac0254b7 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2010 LAMP/EPFL
+ * Copyright 2005-2011 LAMP/EPFL
* @author Martin Odersky
*/
@@ -90,8 +90,8 @@ trait Contexts { self: Analyzer =>
}
class Context private[typechecker] {
var unit: CompilationUnit = _
- var tree: Tree = _ // Tree associated with this context
- var owner: Symbol = NoSymbol// The current owner
+ var tree: Tree = _ // Tree associated with this context
+ var owner: Symbol = NoSymbol // The current owner
var scope: Scope = _ // The current scope
var outer: Context = _ // The next outer context
var enclClass: Context = _ // The next outer context whose tree is a
@@ -110,7 +110,10 @@ trait Contexts { self: Analyzer =>
var inConstructorSuffix = false // are we in a secondary constructor
// after the this constructor call?
var returnsSeen = false // for method context: were returns encountered?
- var inSelfSuperCall = false // is this a context for a constructor self or super call?
+ var inSelfSuperCall = false // is this context (enclosed in) a constructor call?
+ // (the call to the super or self constructor in the first line of a constructor)
+ // in this context the object's fields should not be in scope
+
var reportAmbiguousErrors = false
var reportGeneralErrors = false
var diagnostic: List[String] = Nil // these messages are printed when issuing an error
@@ -121,6 +124,8 @@ trait Contexts { self: Analyzer =>
var savedTypeBounds: List[(Symbol, Type)] = List() // saved type bounds
// for type parameters which are narrowed in a GADT
+ var typingIndent: String = ""
+
def undetparams = _undetparams
def undetparams_=(ps: List[Symbol]) = {
//System.out.println("undetparams = " + ps);//debug
@@ -133,6 +138,13 @@ trait Contexts { self: Analyzer =>
tparams
}
+ def withImplicitsDisabled[T](op: => T): T = {
+ val saved = implicitsEnabled
+ implicitsEnabled = false
+ try op
+ finally implicitsEnabled = saved
+ }
+
/**
* @param unit ...
* @param tree ...
@@ -145,7 +157,7 @@ trait Contexts { self: Analyzer =>
scope: Scope, imports: List[ImportInfo]): Context = {
val c = new Context
c.unit = unit
- c.tree = /*sanitize*/(tree) // used to be for IDE
+ c.tree = tree
c.owner = owner
c.scope = scope
@@ -172,9 +184,11 @@ trait Contexts { self: Analyzer =>
c.variance = this.variance
c.depth = if (scope == this.scope) this.depth else this.depth + 1
c.imports = imports
+ c.inSelfSuperCall = inSelfSuperCall
c.reportAmbiguousErrors = this.reportAmbiguousErrors
c.reportGeneralErrors = this.reportGeneralErrors
c.diagnostic = this.diagnostic
+ c.typingIndent = typingIndent
c.implicitsEnabled = this.implicitsEnabled
c.checking = this.checking
c.retyping = this.retyping
@@ -262,7 +276,7 @@ trait Contexts { self: Analyzer =>
}
private def unitError(pos: Position, msg: String) =
- unit.error(pos, if (checking) "**** ERROR DURING INTERNAL CHECKING ****\n" + msg else msg)
+ unit.error(pos, if (checking) "\n**** ERROR DURING INTERNAL CHECKING ****\n" + msg else msg)
def error(pos: Position, err: Throwable) =
if (reportGeneralErrors) unitError(pos, addDiagString(err.getMessage()))
@@ -321,6 +335,12 @@ trait Contexts { self: Analyzer =>
" " + scope.toList + "\n:: " + outer.toString()
}
+ /** Is `sub' a subclass of `base' or a companion object of such a subclass?
+ */
+ def isSubClassOrCompanion(sub: Symbol, base: Symbol) =
+ sub.isNonBottomSubClass(base) ||
+ sub.isModuleClass && sub.linkedClassOfClass.isNonBottomSubClass(base)
+
/** Return closest enclosing context that defines a superclass of `clazz', or a
* companion module of a superclass of `clazz', or NoContext if none exists */
def enclosingSuperClassContext(clazz: Symbol): Context = {
@@ -332,11 +352,12 @@ trait Contexts { self: Analyzer =>
c
}
- /** Return closest enclosing context that defines a subclass of `clazz', or NoContext
- * if none exists */
+ /** Return closest enclosing context that defines a subclass of `clazz' or a companion
+ * object thereof, or NoContext if no such context exists
+ */
def enclosingSubClassContext(clazz: Symbol): Context = {
var c = this.enclClass
- while (c != NoContext && !c.owner.isNonBottomSubClass(clazz))
+ while (c != NoContext && !isSubClassOrCompanion(c.owner, clazz))
c = c.outer.enclClass
c
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index 4d0cc0c041..6f54c96952 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -975,6 +975,6 @@ self: Analyzer =>
}
}
}
-
- private val DivergentImplicit = new Exception()
}
+class DivergentImplicit extends Exception
+object DivergentImplicit extends DivergentImplicit
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index 262a760958..27f8df18b3 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -6,7 +6,8 @@
package scala.tools.nsc
package typechecker
-import scala.collection.mutable.HashMap
+import scala.collection.mutable.{HashMap, WeakHashMap}
+import scala.ref.WeakReference
import symtab.Flags
import symtab.Flags._
@@ -47,8 +48,10 @@ trait Namers { self: Analyzer =>
// synthetic `copy' (reps `apply', `unapply') methods are added. To compute
// their signatures, the corresponding ClassDef is needed.
// During naming, for each case class module symbol, the corresponding ClassDef
- // is stored in this map.
- private[typechecker] val caseClassOfModuleClass = new HashMap[Symbol, ClassDef]
+ // is stored in this map. The map is cleared lazily, i.e. when the new symbol
+ // is created with the same name, the old one (if present) is wiped out, or the
+ // entry is deleted when it is used and no longer needed.
+ private val caseClassOfModuleClass = new WeakHashMap[Symbol, WeakReference[ClassDef]]
// Default getters of constructors are added to the companion object in the
// typeCompleter of the constructor (methodSig). To compute the signature,
@@ -59,7 +62,6 @@ trait Namers { self: Analyzer =>
private[typechecker] val classAndNamerOfModule = new HashMap[Symbol, (ClassDef, Namer)]
def resetNamer() {
- caseClassOfModuleClass.clear
classAndNamerOfModule.clear
}
@@ -332,13 +334,32 @@ trait Namers { self: Analyzer =>
} else setInfo(sym)(ltype)
}
+ def enterIfNotThere(sym: Symbol) {
+ val scope = context.scope
+ var e = scope.lookupEntry(sym.name)
+ while ((e ne null) && (e.owner eq scope) && (e.sym ne sym)) e = e.tail
+ if (!((e ne null) && (e.owner eq scope))) context.scope.enter(sym)
+ }
+
def enterSym(tree: Tree): Context = {
def finishWith(tparams: List[TypeDef]) { enterSymFinishWith(tree, tparams) }
def finish = finishWith(Nil)
def sym = tree.symbol
- if (sym != NoSymbol)
+ if (sym != NoSymbol) {
+ if (forInteractive && sym != null && sym.owner.isTerm) {
+ // this logic is needed in case typer was interrupted half way through and then comes
+ // back to do the tree again. In that case the definitions that were already
+ // attributed as well as any default parameters of such methods need to be
+ // re-entered in the current scope.
+ enterIfNotThere(sym)
+ if (sym.isLazy) {
+ val acc = sym.lazyAccessor
+ if (acc != NoSymbol) enterIfNotThere(acc)
+ }
+ defaultParametersOfMethod(sym) foreach enterIfNotThere
+ }
return this.context
-
+ }
try {
val owner = context.owner
tree match {
@@ -356,7 +377,7 @@ trait Namers { self: Analyzer =>
context.error(tree.pos, "Implementation restriction: case classes cannot have more than " + MaxFunctionArity + " parameters.")
val m = ensureCompanionObject(tree, caseModuleDef(tree))
- caseClassOfModuleClass(m.moduleClass) = tree
+ caseClassOfModuleClass(m.moduleClass) = new WeakReference(tree)
}
val hasDefault = impl.body flatMap {
case DefDef(_, nme.CONSTRUCTOR, _, vparamss, _, _) => vparamss.flatten
@@ -406,7 +427,7 @@ trait Namers { self: Analyzer =>
val vsym =
if (!context.owner.isClass) {
assert(mods1.isLazy) // if not a field, it has to be a lazy val
- owner.newValue(tree.pos, name + "$lzy" ).setFlag(mods1.flags | MUTABLE)
+ owner.newValue(tree.pos, name + "$lzy" ).setFlag((mods1.flags | MUTABLE) & ~IMPLICIT)
} else {
val mFlag = if (mods1.isLazy) MUTABLE else 0
val lFlag = if (mods.hasFlag(PRIVATE) && mods.hasFlag(LOCAL)) 0 else LOCAL
@@ -442,7 +463,7 @@ trait Namers { self: Analyzer =>
case imp @ Import(_, _) =>
tree.symbol = NoSymbol.newImport(tree.pos)
setInfo(sym)(namerOf(sym).typeCompleter(tree))
- return (context.makeNewImport(imp))
+ return context.makeNewImport(imp)
case _ =>
}
}
@@ -719,7 +740,8 @@ trait Namers { self: Analyzer =>
// unless they exist already; here, "clazz" is the module class
if (clazz.isModuleClass) {
Namers.this.caseClassOfModuleClass get clazz match {
- case Some(cdef) =>
+ case Some(cdefRef) =>
+ val cdef = cdefRef()
addApplyUnapply(cdef, templateNamer)
caseClassOfModuleClass -= clazz
case None =>
@@ -733,7 +755,8 @@ trait Namers { self: Analyzer =>
// module class: the one from the module class removes the entry form caseClassOfModuleClass (see above).
if (clazz.isClass && !clazz.hasFlag(MODULE)) {
Namers.this.caseClassOfModuleClass get companionModuleOf(clazz, context).moduleClass match {
- case Some(cdef) =>
+ case Some(cdefRef) =>
+ val cdef = cdefRef()
def hasCopy(decls: Scope) = {
decls.iterator exists (_.name == nme.copy)
}
@@ -833,7 +856,7 @@ trait Namers { self: Analyzer =>
var resultPt = if (tpt.isEmpty) WildcardType else typer.typedType(tpt).tpe
val site = meth.owner.thisType
- def overriddenSymbol = intersectionType(meth.owner.info.parents).member(meth.name).filter(sym => {
+ def overriddenSymbol = intersectionType(meth.owner.info.parents).nonPrivateMember(meth.name).filter(sym => {
// luc: added .substSym from skolemized to deSkolemized
// site.memberType(sym): PolyType(tparams, MethodType(..., ...)) ==> all references to tparams are deSkolemized
// thisMethodType: tparams in PolyType are deSkolemized, the references in the MethodTypes are skolemized. ==> the two didn't match
@@ -1017,6 +1040,13 @@ trait Namers { self: Analyzer =>
if (!isConstr)
meth.owner.resetFlag(INTERFACE) // there's a concrete member now
val default = parentNamer.enterSyntheticSym(defaultTree)
+ if (forInteractive && default.owner.isTerm) {
+ // enter into map from method symbols to default arguments.
+ // if compiling the same local block several times (which can happen in interactive mode)
+ // we might otherwise not find the default symbol, because the second time it the
+ // method symbol will be re-entered in the scope but the default parameter will not.
+ defaultParametersOfMethod(meth) += default
+ }
} else if (baseHasDefault) {
// the parameter does not have a default itself, but the corresponding parameter
// in the base class does.
@@ -1209,6 +1239,7 @@ trait Namers { self: Analyzer =>
}
checkSelectors(selectors)
+ transformed(tree) = treeCopy.Import(tree, expr1, selectors)
ImportType(expr1)
}
} catch {
@@ -1304,9 +1335,16 @@ trait Namers { self: Analyzer =>
val tree: Tree
}
+ var lockedCount = 0
+
def mkTypeCompleter(t: Tree)(c: Symbol => Unit) = new TypeCompleter {
val tree = t
- override def complete(sym: Symbol) = c(sym)
+ override def complete(sym: Symbol) = try {
+ lockedCount += 1
+ c(sym)
+ } finally {
+ lockedCount -= 1
+ }
}
/** A class representing a lazy type with known type parameters.
@@ -1314,10 +1352,13 @@ trait Namers { self: Analyzer =>
class PolyTypeCompleter(tparams: List[Tree], restp: TypeCompleter, owner: Tree, ownerSym: Symbol, ctx: Context) extends TypeCompleter {
override val typeParams: List[Symbol]= tparams map (_.symbol) //@M
override val tree = restp.tree
- override def complete(sym: Symbol) {
+ override def complete(sym: Symbol) = try {
+ lockedCount += 1
if(ownerSym.isAbstractType) //@M an abstract type's type parameters are entered -- TODO: change to isTypeMember ?
newNamer(ctx.makeNewScope(owner, ownerSym)).enterSyms(tparams) //@M
restp.complete(sym)
+ } finally {
+ lockedCount -= 1
}
}
@@ -1341,21 +1382,38 @@ trait Namers { self: Analyzer =>
* Finds the companion module of a class symbol. Calling .companionModule
* does not work for classes defined inside methods.
*/
- def companionModuleOf(clazz: Symbol, context: Context) = {
- var res = clazz.companionModule
- if (res == NoSymbol)
- res = context.lookup(clazz.name.toTermName, clazz.owner).suchThat(sym =>
- sym.hasFlag(MODULE) && sym.isCoDefinedWith(clazz))
- res
+ def companionModuleOf(clazz: Symbol, context: Context): Symbol = {
+ try {
+ var res = clazz.companionModule
+ if (res == NoSymbol)
+ res = context.lookup(clazz.name.toTermName, clazz.owner).suchThat(sym =>
+ sym.hasFlag(MODULE) && sym.isCoDefinedWith(clazz))
+ res
+ } catch {
+ case e: InvalidCompanions =>
+ context.error(clazz.pos, e.getMessage)
+ NoSymbol
+ }
}
- def companionClassOf(module: Symbol, context: Context) = {
- var res = module.companionClass
- if (res == NoSymbol)
- res = context.lookup(module.name.toTypeName, module.owner).suchThat(_.isCoDefinedWith(module))
- res
+ def companionClassOf(module: Symbol, context: Context): Symbol = {
+ try {
+ var res = module.companionClass
+ if (res == NoSymbol)
+ res = context.lookup(module.name.toTypeName, module.owner).suchThat(_.isCoDefinedWith(module))
+ res
+ } catch {
+ case e: InvalidCompanions =>
+ context.error(module.pos, e.getMessage)
+ NoSymbol
+ }
}
+ def companionSymbolOf(sym: Symbol, context: Context) =
+ if (sym.isTerm) companionClassOf(sym, context)
+ else if (sym.isClass) companionModuleOf(sym, context)
+ else NoSymbol
+
/** An explanatory note to be added to error messages
* when there's a problem with abstract var defs */
def varNotice(sym: Symbol): String =
diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
index fdad0f7bfb..c08c614e1e 100644
--- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2010 LAMP/EPFL
+ * Copyright 2005-2011 LAMP/EPFL
* @author Martin Odersky
*/
@@ -8,7 +8,8 @@ package typechecker
import symtab.Flags._
-import scala.collection.mutable.ListBuffer
+import scala.collection.mutable.{ListBuffer, WeakHashMap}
+import scala.collection.immutable.Set
/**
* @author Lukas Rytz
@@ -19,6 +20,10 @@ trait NamesDefaults { self: Analyzer =>
import global._
import definitions._
+ val defaultParametersOfMethod = new WeakHashMap[Symbol, Set[Symbol]] {
+ override def default(key: Symbol) = Set()
+ }
+
case class NamedApplyInfo(qual: Option[Tree], targs: List[Tree],
vargss: List[List[Tree]], blockTyper: Typer)
val noApplyInfo = NamedApplyInfo(None, Nil, Nil, null)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index d58f881200..745809ccbb 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -32,7 +32,7 @@ trait Typers { self: Analyzer =>
// namer calls typer.computeType(rhs) on DefDef / ValDef when tpt is empty. the result
// is cached here and re-used in typedDefDef / typedValDef
- private val transformed = new HashMap[Tree, Tree]
+ val transformed = new HashMap[Tree, Tree]
// currently not used at all (March 09)
private val superDefs = new HashMap[Symbol, ListBuffer[Tree]]
@@ -358,13 +358,13 @@ trait Typers { self: Analyzer =>
}
}
- def checkNonCyclic(pos: Position, tp: Type, lockedSym: Symbol): Boolean = {
+ def checkNonCyclic(pos: Position, tp: Type, lockedSym: Symbol): Boolean = try {
lockedSym.lock {
throw new TypeError("illegal cyclic reference involving " + lockedSym)
}
- val result = checkNonCyclic(pos, tp)
+ checkNonCyclic(pos, tp)
+ } finally {
lockedSym.unlock()
- result
}
def checkNonCyclic(sym: Symbol) {
@@ -460,7 +460,8 @@ trait Typers { self: Analyzer =>
!o.isLocal && !o.hasFlag(PRIVATE) &&
!o.privateWithin.hasTransOwner(sym.owner))
o = o.owner
- if (o == sym.owner) addHidden(sym)
+ if (o == sym.owner || o == sym.owner.linkedClassOfClass)
+ addHidden(sym)
} else if (sym.owner.isTerm && !sym.isTypeParameterOrSkolem) {
var e = scope.lookupEntry(sym.name)
var found = false
@@ -632,7 +633,6 @@ trait Typers { self: Analyzer =>
* 1. Check that non-function pattern expressions are stable
* 2. Check that packages and static modules are not used as values
* 3. Turn tree type into stable type if possible and required by context.
- * </ol>
*/
private def stabilize(tree: Tree, pre: Type, mode: Int, pt: Type): Tree = {
if (tree.symbol.hasFlag(OVERLOADED) && (mode & FUNmode) == 0)
@@ -695,12 +695,15 @@ trait Typers { self: Analyzer =>
}
/** The member with given name of given qualifier tree */
- def member(qual: Tree, name: Name) = qual.tpe match {
- case ThisType(clazz) if (context.enclClass.owner.hasTransOwner(clazz)) =>
- qual.tpe.member(name)
- case _ =>
- if (phase.next.erasedTypes) qual.tpe.member(name)
- else qual.tpe.nonLocalMember(name)
+ def member(qual: Tree, name: Name) = {
+ def callSiteWithinClass(clazz: Symbol) = context.enclClass.owner hasTransOwner clazz
+ val includeLocals = qual.tpe match {
+ case ThisType(clazz) if callSiteWithinClass(clazz) => true
+ case SuperType(clazz, _) if callSiteWithinClass(clazz.typeSymbol) => true
+ case _ => phase.next.erasedTypes
+ }
+ if (includeLocals) qual.tpe member name
+ else qual.tpe nonLocalMember name
}
def silent[T](op: Typer => T,
@@ -758,7 +761,7 @@ trait Typers { self: Analyzer =>
/** Perform the following adaptations of expression, pattern or type `tree' wrt to
* given mode `mode' and given prototype `pt':
* (-1) For expressions with annotated types, let AnnotationCheckers decide what to do
- * (0) Convert expressions with constant types to literals
+ * (0) Convert expressions with constant types to literals (unless in interactive/scaladoc mode)
* (1) Resolve overloading, unless mode contains FUNmode
* (2) Apply parameterless functions
* (3) Apply polymorphic types to fresh instances of their type parameters and
@@ -794,7 +797,7 @@ trait Typers { self: Analyzer =>
protected def adapt(tree: Tree, mode: Int, pt: Type, original: Tree = EmptyTree): Tree = tree.tpe match {
case atp @ AnnotatedType(_, _, _) if canAdaptAnnotations(tree, mode, pt) => // (-1)
adaptAnnotations(tree, mode, pt)
- case ct @ ConstantType(value) if ((mode & (TYPEmode | FUNmode)) == 0 && (ct <:< pt) && !onlyPresentation) => // (0)
+ case ct @ ConstantType(value) if ((mode & (TYPEmode | FUNmode)) == 0 && (ct <:< pt) && !forScaladoc && !forInteractive) => // (0)
treeCopy.Literal(tree, value)
case OverloadedType(pre, alts) if ((mode & FUNmode) == 0) => // (1)
inferExprAlternative(tree, pt)
@@ -1814,6 +1817,7 @@ trait Typers { self: Analyzer =>
// call typedTypeDef instead
// a TypeDef with type parameters must always be type checked in a new scope
private def typedTypeDef0(tdef: TypeDef): TypeDef = {
+ tdef.symbol.initialize
reenterTypeParams(tdef.tparams)
val tparams1 = tdef.tparams mapConserve {typedTypeDef(_)}
val typedMods = removeAnnotations(tdef.mods)
@@ -1875,8 +1879,6 @@ trait Typers { self: Analyzer =>
}
}
- protected def typedFunctionIDE(fun : Function, txt : Context) = {}
-
/**
* @param block ...
* @param mode ...
@@ -1887,14 +1889,7 @@ trait Typers { self: Analyzer =>
val syntheticPrivates = new ListBuffer[Symbol]
try {
namer.enterSyms(block.stats)
- for (stat <- block.stats) {
- if (onlyPresentation && stat.isDef) {
- var e = context.scope.lookupEntry(stat.symbol.name)
- while ((e ne null) && (e.sym ne stat.symbol)) e = e.tail
- if (e eq null) context.scope.enter(stat.symbol)
- }
- enterLabelDef(stat)
- }
+ for (stat <- block.stats) enterLabelDef(stat)
if (phaseId(currentPeriod) <= currentRun.typerPhase.id) {
// This is very tricky stuff, because we are navigating
@@ -1910,7 +1905,7 @@ trait Typers { self: Analyzer =>
// The cleanest way forward is if we would find a way to suppress
// structural type checking for these members and maybe defer
// type errors to the places where members are called. But that would
- // be a bug refactoring and also a big departure from existing code.
+ // be a big refactoring and also a big departure from existing code.
// The probably safest fix for 2.8 is to keep members of an anonymous
// class that are not mentioned in a parent type private (as before)
// but to disable escape checking for code that's in the same anonymous class.
@@ -1968,6 +1963,13 @@ trait Typers { self: Analyzer =>
error(x.pos, "_* may only come last")
val pat1: Tree = typedPattern(cdef.pat, pattpe)
+
+ if (forInteractive) {
+ for (bind @ Bind(name, _) <- cdef.pat)
+ if (name.toTermName != nme.WILDCARD && bind.symbol != null && bind.symbol != NoSymbol)
+ namer.enterIfNotThere(bind.symbol)
+ }
+
val guard1: Tree = if (cdef.guard == EmptyTree) EmptyTree
else typed(cdef.guard, BooleanClass.tpe)
var body1: Tree = typed(cdef.body, pt)
@@ -2086,7 +2088,10 @@ trait Typers { self: Analyzer =>
namer.enterSyms(stats)
// need to delay rest of typedRefinement to avoid cyclic reference errors
unit.toCheck += { () =>
- val stats1 = typedStats(stats, NoSymbol)
+ // go to next outer context which is not silent, see #3614
+ var c = context
+ while (!c.reportGeneralErrors) c = c.outer
+ val stats1 = newTyper(c).typedStats(stats, NoSymbol)
for (stat <- stats1 if stat.isDef) {
val member = stat.symbol
if (!(context.owner.ancestors forall
@@ -2097,7 +2102,10 @@ trait Typers { self: Analyzer =>
}
}
- def typedImport(imp : Import) : Import = imp
+ def typedImport(imp : Import) : Import = (transformed remove imp) match {
+ case Some(imp1: Import) => imp1
+ case None => log("unhandled import: "+imp+" in "+unit); imp
+ }
def typedStats(stats: List[Tree], exprOwner: Symbol): List[Tree] = {
val inBlock = exprOwner == context.owner
@@ -2110,13 +2118,9 @@ trait Typers { self: Analyzer =>
else
stat match {
case imp @ Import(_, _) =>
- val imp0 = typedImport(imp)
- if (imp0 ne null) {
- context = context.makeNewImport(imp0)
- imp0.symbol.initialize
- imp0
- } else
- EmptyTree
+ context = context.makeNewImport(imp)
+ imp.symbol.initialize
+ typedImport(imp)
case _ =>
if (localTarget && !includesTargetPos(stat)) {
// skip typechecking of statements in a sequence where some other statement includes
@@ -2453,7 +2457,7 @@ trait Typers { self: Analyzer =>
}
}
- if (fun.symbol == List_apply && args.isEmpty && !onlyPresentation) {
+ if (fun.symbol == List_apply && args.isEmpty && !forInteractive) {
atPos(tree.pos) { gen.mkNil setType restpe }
} else {
constfold(treeCopy.Apply(tree, fun, args1).setType(ifPatternSkipFormals(restpe)))
@@ -2844,10 +2848,12 @@ trait Typers { self: Analyzer =>
val name = if (sym.isType) sym.name else newTypeName(sym.name+".type")
val bound = sym.existentialBound
val sowner = if (isRawParameter(sym)) context.owner else sym.owner
- val quantified: Symbol = sowner.newAbstractType(sym.pos, name)
- trackSetInfo(quantified setFlag EXISTENTIAL)(bound.cloneInfo(quantified))
+ val quantified: Symbol = sowner.newAbstractType(sym.pos, name).setFlag(EXISTENTIAL)
+
+ quantified.setInfo(bound.cloneInfo(quantified))
+ quantified
}
- val typeParamTypes = typeParams map (_.tpe) // don't trackSetInfo here, since type already set!
+ val typeParamTypes = typeParams map (_.tpe)
//println("ex trans "+rawSyms+" . "+tp+" "+typeParamTypes+" "+(typeParams map (_.info)))//DEBUG
for (tparam <- typeParams) tparam.setInfo(tparam.info.subst(rawSyms, typeParamTypes))
(typeParams, tp.subst(rawSyms, typeParamTypes))
@@ -3128,7 +3134,7 @@ trait Typers { self: Analyzer =>
vble = namer.enterInScope(vble)
}
val body1 = typed(body, mode, pt)
- trackSetInfo(vble)(
+ vble.setInfo(
if (treeInfo.isSequenceValued(body)) seqType(body1.tpe)
else body1.tpe)
treeCopy.Bind(tree, name, body1) setSymbol vble setType body1.tpe // burak, was: pt
@@ -3592,13 +3598,13 @@ trait Typers { self: Analyzer =>
setError(tree1)
}
- if (name == nme.ERROR && onlyPresentation)
+ if (name == nme.ERROR && forInteractive)
return makeErrorTree
if (!qual.tpe.widen.isErroneous)
notAMember(tree, qual, name)
- if (onlyPresentation) makeErrorTree else setError(tree)
+ if (forInteractive) makeErrorTree else setError(tree)
} else {
val tree1 = tree match {
case Select(_, _) => treeCopy.Select(tree, qual, name)
@@ -3656,7 +3662,7 @@ trait Typers { self: Analyzer =>
// case x :: xs in class List would return the :: method).
def qualifies(sym: Symbol): Boolean = {
reallyExists(sym) &&
- ((mode & PATTERNmode | FUNmode) != (PATTERNmode | FUNmode) || !sym.isSourceMethod)
+ ((mode & PATTERNmode | FUNmode) != (PATTERNmode | FUNmode) || !sym.isSourceMethod || sym.hasFlag(ACCESSOR))
}
if (defSym == NoSymbol) {
@@ -3811,8 +3817,8 @@ trait Typers { self: Analyzer =>
glb(List(arg.symbol.info.bounds.hi, tparam.info.bounds.hi.subst(tparams, argtypes))))
case _ =>
}}
-
- val result = TypeTree(appliedType(tpt1.tpe, argtypes)) setOriginal(tree) // setPos tree.pos (done by setOriginal)
+ val original = treeCopy.AppliedTypeTree(tree, tpt1, args1)
+ val result = TypeTree(appliedType(tpt1.tpe, argtypes)) setOriginal original
if(tpt1.tpe.isInstanceOf[PolyType]) // did the type application (performed by appliedType) involve an unchecked beta-reduction?
(TypeTreeWithDeferredRefCheck(){ () =>
// wrap the tree and include the bounds check -- refchecks will perform this check (that the beta reduction was indeed allowed) and unwrap
@@ -3868,7 +3874,7 @@ trait Typers { self: Analyzer =>
labelTyper(ldef).typedLabelDef(ldef)
case ddef @ DocDef(comment, defn) =>
- if (onlyPresentation && (sym ne null) && (sym ne NoSymbol)) {
+ if (forScaladoc && (sym ne null) && (sym ne NoSymbol)) {
docComments(sym) = comment
comment.defineVariables(sym)
val typer1 = newTyper(context.makeNewScope(tree, context.owner))
@@ -3900,7 +3906,7 @@ trait Typers { self: Analyzer =>
case Star(elem) =>
checkStarPatOK(tree.pos, mode)
val elem1 = typed(elem, mode, pt)
- treeCopy.Star(tree, elem1) setType pt
+ treeCopy.Star(tree, elem1) setType makeFullyDefined(pt)
case Bind(name, body) =>
typedBind(name, body)
@@ -4159,7 +4165,7 @@ trait Typers { self: Analyzer =>
// whatever type to tree; we just have to survive until a real error message is issued.
tree setType AnyClass.tpe
case Import(expr, selectors) =>
- assert(onlyPresentation) // should not happen in normal circumstances.
+ assert(forInteractive) // should not happen in normal circumstances.
tree setType tree.symbol.tpe
case _ =>
abort("unexpected tree: " + tree.getClass + "\n" + tree)//debug
@@ -4175,7 +4181,8 @@ trait Typers { self: Analyzer =>
def typed(tree: Tree, mode: Int, pt: Type): Tree = { indentTyping()
def dropExistential(tp: Type): Type = tp match {
case ExistentialType(tparams, tpe) =>
- if (settings.debug.value) println("drop ex "+tree+" "+tp)
+ if (settings.debug.value)
+ log("Dropping existential: " + tree + " " + tp)
new SubstWildcardMap(tparams).apply(tp)
case TypeRef(_, sym, _) if sym.isAliasType =>
val tp0 = tp.normalize
diff --git a/src/compiler/scala/tools/nsc/util/MultiHashMap.scala b/src/compiler/scala/tools/nsc/util/MultiHashMap.scala
new file mode 100644
index 0000000000..719d18cd2e
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/util/MultiHashMap.scala
@@ -0,0 +1,10 @@
+package scala.tools.nsc.util
+
+import collection.mutable.HashMap
+import collection.immutable
+
+/** A hashmap with set-valued values, and an empty set as default value
+ */
+class MultiHashMap[K, V] extends HashMap[K, immutable.Set[V]] {
+ override def default(key: K): immutable.Set[V] = Set()
+}
diff --git a/src/compiler/scala/tools/nsc/util/Position.scala b/src/compiler/scala/tools/nsc/util/Position.scala
index 31202eec8f..c97dc930ab 100644
--- a/src/compiler/scala/tools/nsc/util/Position.scala
+++ b/src/compiler/scala/tools/nsc/util/Position.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2010 LAMP/EPFL
+ * Copyright 2005-2011 LAMP/EPFL
* @author Martin Odersky
*
*/
@@ -132,7 +132,7 @@ trait Position {
def focusEnd = this
/** Does this position include the given position `pos`.
- * This holds this is a range position and its range [start..end]
+ * This holds if this is a range position and its range [start..end]
* is the same or covers the range of the given position.
*/
def includes(pos: Position) = false
@@ -174,6 +174,9 @@ trait Position {
def column: Int = throw new UnsupportedOperationException("Position.column")
+ /** Convert this to a position around `point` that spans a single source line */
+ def toSingleLine: Position = this
+
def lineContent: String =
if (isDefined) source.lineToString(line - 1)
else "NO_LINE"
@@ -254,6 +257,15 @@ extends OffsetPosition(source, point) {
override def includes(pos: Position) = pos.isDefined && start <= pos.startOrPoint && pos.endOrPoint <= end
override def union(pos: Position) =
if (pos.isRange) new RangePosition(source, start min pos.start, point, end max pos.end) else this
+
+ override def toSingleLine: Position = source match {
+ case bs: BatchSourceFile
+ if end > 0 && bs.offsetToLine(start) < bs.offsetToLine(end - 1) =>
+ val pointLine = bs.offsetToLine(point)
+ new RangePosition(source, bs.lineToOffset(pointLine), point, bs.lineToOffset(pointLine + 1))
+ case _ => this
+ }
+
override def toString = "RangePosition("+source+", "+start+", "+point+", "+end+")"
override def show = "["+start+":"+end+"]"
private var focusCache: Position = NoPosition
diff --git a/src/compiler/scala/tools/nsc/util/SourceFile.scala b/src/compiler/scala/tools/nsc/util/SourceFile.scala
index 8c1d308209..90a9057f01 100644
--- a/src/compiler/scala/tools/nsc/util/SourceFile.scala
+++ b/src/compiler/scala/tools/nsc/util/SourceFile.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2010 LAMP/EPFL
+ * Copyright 2005-2011 LAMP/EPFL
* @author Martin Odersky
*/
@@ -46,7 +46,7 @@ abstract class SourceFile {
final def skipWhitespace(offset: Int): Int =
if (content(offset).isWhitespace) skipWhitespace(offset + 1) else offset
- def identifier(pos: Position, compiler: Global): Option[String] = None
+ def identifier(pos: Position): Option[String] = None
}
object ScriptSourceFile {
@@ -101,12 +101,12 @@ class BatchSourceFile(val file : AbstractFile, val content: Array[Char]) extends
def start = 0
def isSelfContained = true
- override def identifier(pos: Position, compiler: Global) =
+ override def identifier(pos: Position) =
if (pos.isDefined && pos.source == this && pos.point != -1) {
def isOK(c: Char) = isIdentifierPart(c) || isOperatorPart(c)
Some(new String(content drop pos.point takeWhile isOK))
} else {
- super.identifier(pos, compiler)
+ super.identifier(pos)
}
def isLineBreak(idx: Int) =
diff --git a/src/compiler/scala/tools/nsc/util/WorkScheduler.scala b/src/compiler/scala/tools/nsc/util/WorkScheduler.scala
index f520aec8bc..aa1bb734ea 100644
--- a/src/compiler/scala/tools/nsc/util/WorkScheduler.scala
+++ b/src/compiler/scala/tools/nsc/util/WorkScheduler.scala
@@ -17,7 +17,7 @@ class WorkScheduler {
}
/** called from Server: test whether one of todo list, throwables, or InterruptReqs is nonempty */
- def moreWork(): Boolean = synchronized {
+ def moreWork: Boolean = synchronized {
todo.nonEmpty || throwables.nonEmpty || interruptReqs.nonEmpty
}
@@ -26,6 +26,10 @@ class WorkScheduler {
if (todo.isEmpty) None else Some(todo.dequeue())
}
+ def dequeueAll[T](f: Action => Option[T]): Seq[T] = synchronized {
+ todo.dequeueAll(a => f(a).isDefined).map(a => f(a).get)
+ }
+
/** Called from server: return optional exception posted by client
* Reset to no exception.
*/
@@ -73,7 +77,11 @@ class WorkScheduler {
*/
def raise(exc: Throwable) = synchronized {
throwables enqueue exc
- postWorkItem { () => }
+ postWorkItem { new EmptyAction }
}
}
+class EmptyAction extends (() => Unit) {
+ def apply() {}
+}
+
diff --git a/test/files/neg/bug1286.check b/test/files/neg/bug1286.check
index 9bf63252cc..d45d8094fd 100644
--- a/test/files/neg/bug1286.check
+++ b/test/files/neg/bug1286.check
@@ -1,2 +1,3 @@
-error: fatal error: Companions 'object Foo' and 'trait Foo' must be defined in same file.
-one error found
+a.scala:1: error: Companions 'object Foo' and 'trait Foo' must be defined in same file
+trait Foo {
+ ^
diff --git a/test/files/neg/names-defaults-neg.check b/test/files/neg/names-defaults-neg.check
index 5d8f2e2e5d..af1e4babf7 100644
--- a/test/files/neg/names-defaults-neg.check
+++ b/test/files/neg/names-defaults-neg.check
@@ -36,11 +36,7 @@ names-defaults-neg.scala:22: error: parameter specified twice: a
names-defaults-neg.scala:23: error: parameter specified twice: b
test1(b = 1, b = "2")
^
-names-defaults-neg.scala:26: error: {
- val x$5: Int(3) = 3;
- val x$6: Int(1) = 1;
- Test.this.test3(1, 3)
-} of type Int does not take parameters
+names-defaults-neg.scala:26: error: Int does not take parameters
test3(b = 3, a = 1)(3)
^
names-defaults-neg.scala:35: error: ambiguous reference to overloaded definition,
@@ -89,13 +85,13 @@ names-defaults-neg.scala:76: error: no type parameters for method test4: (x: T[T
--- because ---
argument expression's type is not compatible with formal parameter type;
found : List[Int]
- required: ?T[ ?T[ scala.List[?T[ X forSome { type X } ]] ] ]
+ required: ?T[ ?T[ List[?T[ X forSome { type X } ]] ] ]
Error occurred in an application involving default arguments.
test4()
^
names-defaults-neg.scala:79: error: type mismatch;
found : List[Int]
- required: scala.List[scala.List[?]]
+ required: List[List[?]]
def test6[T](x: List[List[T]] = List(1,2)) = x
^
names-defaults-neg.scala:82: error: type mismatch;
@@ -120,26 +116,26 @@ names-defaults-neg.scala:124: error: parameter specified twice: a
names-defaults-neg.scala:125: error: wrong number of parameters; expected = 2
val taf4: (Int, String) => Unit = testAnnFun(_, b = _)
^
-names-defaults-neg.scala:133: error: variable definition needs type because the name is used as named argument the definition.
+names-defaults-neg.scala:133: error: variable definition needs type because 'x' is used as a named argument in its body.
def t3 { var x = t.f(x = 1) }
^
-names-defaults-neg.scala:136: error: variable definition needs type because the name is used as named argument the definition.
+names-defaults-neg.scala:136: error: variable definition needs type because 'x' is used as a named argument in its body.
object t6 { var x = t.f(x = 1) }
^
-names-defaults-neg.scala:139: error: variable definition needs type because the name is used as named argument the definition.
+names-defaults-neg.scala:139: error: variable definition needs type because 'x' is used as a named argument in its body.
class t9 { var x = t.f(x = 1) }
^
-names-defaults-neg.scala:153: error: variable definition needs type because the name is used as named argument the definition.
+names-defaults-neg.scala:153: error: variable definition needs type because 'x' is used as a named argument in its body.
def u3 { var x = u.f(x = 1) }
^
-names-defaults-neg.scala:156: error: variable definition needs type because the name is used as named argument the definition.
+names-defaults-neg.scala:156: error: variable definition needs type because 'x' is used as a named argument in its body.
def u6 { var x = u.f(x = "32") }
^
names-defaults-neg.scala:159: error: reference to x is ambiguous; it is both, a parameter
name of the method and the name of a variable currently in scope.
def u9 { var x: Int = u.f(x = 1) }
^
-names-defaults-neg.scala:166: error: variable definition needs type because the name is used as named argument the definition.
+names-defaults-neg.scala:166: error: variable definition needs type because 'x' is used as a named argument in its body.
class u15 { var x = u.f(x = 1) }
^
names-defaults-neg.scala:169: error: reference to x is ambiguous; it is both, a parameter