summaryrefslogtreecommitdiff
path: root/src/interactive
diff options
context:
space:
mode:
Diffstat (limited to 'src/interactive')
-rw-r--r--src/interactive/scala/tools/nsc/interactive/CompilerControl.scala439
-rw-r--r--src/interactive/scala/tools/nsc/interactive/ContextTrees.scala165
-rw-r--r--src/interactive/scala/tools/nsc/interactive/Global.scala1241
-rw-r--r--src/interactive/scala/tools/nsc/interactive/InteractiveReporter.scala47
-rw-r--r--src/interactive/scala/tools/nsc/interactive/Lexer.scala299
-rw-r--r--src/interactive/scala/tools/nsc/interactive/Main.scala34
-rw-r--r--src/interactive/scala/tools/nsc/interactive/Pickler.scala377
-rw-r--r--src/interactive/scala/tools/nsc/interactive/Picklers.scala189
-rw-r--r--src/interactive/scala/tools/nsc/interactive/PresentationCompilerThread.scala51
-rw-r--r--src/interactive/scala/tools/nsc/interactive/PrettyWriter.scala41
-rw-r--r--src/interactive/scala/tools/nsc/interactive/REPL.scala164
-rw-r--r--src/interactive/scala/tools/nsc/interactive/RangePositions.scala14
-rw-r--r--src/interactive/scala/tools/nsc/interactive/Replayer.scala74
-rw-r--r--src/interactive/scala/tools/nsc/interactive/Response.scala107
-rw-r--r--src/interactive/scala/tools/nsc/interactive/RichCompilationUnits.scala58
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/InteractiveTest.scala113
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala69
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/Tester.scala209
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/core/AskCommand.scala122
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala128
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala33
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala62
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala18
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/core/Reporter.scala15
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala20
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/core/TestMarker.scala29
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/core/TestResources.scala12
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/core/TestSettings.scala19
28 files changed, 4149 insertions, 0 deletions
diff --git a/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala b/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala
new file mode 100644
index 0000000000..69cae24808
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala
@@ -0,0 +1,439 @@
+/* NSC -- new Scala compiler
+ * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
+ * @author Martin Odersky
+ */
+package scala.tools.nsc
+package interactive
+
+import scala.util.control.ControlThrowable
+import scala.tools.nsc.io.AbstractFile
+import scala.tools.nsc.util.FailedInterrupt
+import scala.tools.nsc.util.EmptyAction
+import scala.tools.nsc.util.WorkScheduler
+import scala.reflect.internal.util.{SourceFile, Position}
+import scala.tools.nsc.util.InterruptReq
+
+/** Interface of interactive compiler to a client such as an IDE
+ * The model the presentation compiler consists of the following parts:
+ *
+ * unitOfFile: The map from sourcefiles to loaded units. A sourcefile/unit is loaded if it occurs in that map.
+ *
+ * manipulated by: removeUnitOf, reloadSources.
+ *
+ * A call to reloadSources will add the given sources to the loaded units, and
+ * start a new background compiler pass to compile all loaded units (with the indicated sources first).
+ * Each background compiler pass has its own typer run.
+ * The background compiler thread can be interrupted each time an AST node is
+ * completely typechecked in the following ways:
+
+ * 1. by a new call to reloadSources. This starts a new background compiler pass with a new typer run.
+ * 2. by a call to askTypeTree. This starts a new typer run if the forceReload parameter = true
+ * 3. by a call to askTypeAt, askTypeCompletion, askScopeCompletion, askToDoFirst, askLinkPos, askLastType.
+ * 4. by raising an exception in the scheduler.
+ * 5. by passing a high-priority action wrapped in ask { ... }.
+ *
+ * Actions under 1-3 can themselves be interrupted if they involve typechecking
+ * AST nodes. High-priority actions under 5 cannot; they always run to completion.
+ * So these high-priority actions should to be short.
+ *
+ * Normally, an interrupted action continues after the interrupting action is finished.
+ * However, if the interrupting action created a new typer run, the interrupted
+ * action is aborted. If there's an outstanding response, it will be set to
+ * a Right value with a FreshRunReq exception.
+ */
+trait CompilerControl { self: Global =>
+
+ type Response[T] = scala.tools.nsc.interactive.Response[T]
+
+ /** The scheduler by which client and compiler communicate
+ * Must be initialized before starting compilerRunner
+ */
+ @volatile protected[interactive] var scheduler = new WorkScheduler
+
+ /** Return the compilation unit attached to a source file, or None
+ * if source is not loaded.
+ */
+ def getUnitOf(s: SourceFile): Option[RichCompilationUnit] = getUnit(s)
+
+ /** Run operation `op` on a compilation unit associated with given `source`.
+ * If source has a loaded compilation unit, this one is passed to `op`.
+ * Otherwise a new compilation unit is created, but not added to the set of loaded units.
+ */
+ def onUnitOf[T](source: SourceFile)(op: RichCompilationUnit => T): T =
+ op(unitOfFile.getOrElse(source.file, new RichCompilationUnit(source)))
+
+ /** Removes the CompilationUnit corresponding to the given SourceFile
+ * from consideration for recompilation.
+ */
+ def removeUnitOf(s: SourceFile): Option[RichCompilationUnit] = { toBeRemoved += s.file; unitOfFile get s.file }
+
+ /** Returns the top level classes and objects that were deleted
+ * in the editor since last time recentlyDeleted() was called.
+ */
+ def recentlyDeleted(): List[Symbol] = deletedTopLevelSyms.synchronized {
+ val result = deletedTopLevelSyms
+ deletedTopLevelSyms.clear()
+ result.toList
+ }
+
+ /** Locate smallest tree that encloses position
+ * @pre Position must be loaded
+ */
+ def locateTree(pos: Position): Tree = onUnitOf(pos.source) { unit => new Locator(pos) locateIn unit.body }
+
+ /** Locates smallest context that encloses position as an optional value.
+ */
+ def locateContext(pos: Position): Option[Context] =
+ for (unit <- getUnit(pos.source); cx <- locateContext(unit.contexts, pos)) yield cx
+
+ /** Returns the smallest context that contains given `pos`, throws FatalError if none exists.
+ */
+ def doLocateContext(pos: Position): Context = locateContext(pos) getOrElse {
+ throw new FatalError("no context found for "+pos)
+ }
+
+ private def postWorkItem(item: WorkItem) =
+ if (item.onCompilerThread) item() else scheduler.postWorkItem(item)
+
+ /** Makes sure a set of compilation units is loaded and parsed.
+ * Returns () to syncvar `response` on completion.
+ * Afterwards a new background compiler run is started with
+ * the given sources at the head of the list of to-be-compiled sources.
+ */
+ def askReload(sources: List[SourceFile], response: Response[Unit]) = {
+ val superseeded = scheduler.dequeueAll {
+ case ri: ReloadItem if ri.sources == sources => Some(ri)
+ case _ => None
+ }
+ superseeded.foreach(_.response.set(()))
+ postWorkItem(new ReloadItem(sources, response))
+ }
+
+ /** Removes source files and toplevel symbols, and issues a new typer run.
+ * Returns () to syncvar `response` on completion.
+ */
+ def askFilesDeleted(sources: List[SourceFile], response: Response[Unit]) = {
+ postWorkItem(new FilesDeletedItem(sources, response))
+ }
+
+ /** Sets sync var `response` to the smallest fully attributed tree that encloses position `pos`.
+ * Note: Unlike for most other ask... operations, the source file belonging to `pos` needs not be loaded.
+ */
+ def askTypeAt(pos: Position, response: Response[Tree]) =
+ postWorkItem(new AskTypeAtItem(pos, response))
+
+ /** Sets sync var `response` to the fully attributed & typechecked tree contained in `source`.
+ * @pre `source` needs to be loaded.
+ * @note Deprecated because of race conditions in the typechecker when the background compiler
+ * is interrupted while typing the same `source`.
+ * @see SI-6578
+ */
+ @deprecated("Use `askLoadedTyped` instead to avoid race conditions in the typechecker", "2.10.1")
+ def askType(source: SourceFile, forceReload: Boolean, response: Response[Tree]) =
+ postWorkItem(new AskTypeItem(source, forceReload, response))
+
+ /** Sets sync var `response` to the position of the definition of the given link in
+ * the given sourcefile.
+ *
+ * @param sym The symbol referenced by the link (might come from a classfile)
+ * @param source The source file that's supposed to contain the definition
+ * @param response A response that will be set to the following:
+ * If `source` contains a definition that is referenced by the given link
+ * the position of that definition, otherwise NoPosition.
+ * Note: This operation does not automatically load `source`. If `source`
+ * is unloaded, it stays that way.
+ */
+ def askLinkPos(sym: Symbol, source: SourceFile, response: Response[Position]) =
+ postWorkItem(new AskLinkPosItem(sym, source, response))
+
+ /** Sets sync var `response` to doc comment information for a given symbol.
+ *
+ * @param sym The symbol whose doc comment should be retrieved (might come from a classfile)
+ * @param source The source file that's supposed to contain the definition
+ * @param site The symbol where 'sym' is observed
+ * @param fragments All symbols that can contribute to the generated documentation
+ * together with their source files.
+ * @param response A response that will be set to the following:
+ * If `source` contains a definition of a given symbol that has a doc comment,
+ * the (expanded, raw, position) triplet for a comment, otherwise ("", "", NoPosition).
+ * Note: This operation does not automatically load sources that are not yet loaded.
+ */
+ def askDocComment(sym: Symbol, source: SourceFile, site: Symbol, fragments: List[(Symbol,SourceFile)], response: Response[(String, String, Position)]): Unit =
+ postWorkItem(new AskDocCommentItem(sym, source, site, fragments, response))
+
+ @deprecated("Use method that accepts fragments", "2.10.2")
+ def askDocComment(sym: Symbol, site: Symbol, source: SourceFile, response: Response[(String, String, Position)]): Unit =
+ askDocComment(sym, source, site, (sym,source)::Nil, response)
+
+ /** Sets sync var `response` to list of members that are visible
+ * as members of the tree enclosing `pos`, possibly reachable by an implicit.
+ * @pre source is loaded
+ */
+ def askTypeCompletion(pos: Position, response: Response[List[Member]]) =
+ postWorkItem(new AskTypeCompletionItem(pos, response))
+
+ /** Sets sync var `response` to list of members that are visible
+ * as members of the scope enclosing `pos`.
+ * @pre source is loaded
+ */
+ def askScopeCompletion(pos: Position, response: Response[List[Member]]) =
+ postWorkItem(new AskScopeCompletionItem(pos, response))
+
+ /** Asks to do unit corresponding to given source file on present and subsequent type checking passes.
+ * If the file is in the 'crashedFiles' ignore list it is removed and typechecked normally.
+ */
+ def askToDoFirst(source: SourceFile) =
+ postWorkItem(new AskToDoFirstItem(source))
+
+ /** If source is not yet loaded, loads it, and starts a new run, otherwise
+ * continues with current pass.
+ * Waits until source is fully type checked and returns body in response.
+ * @param source The source file that needs to be fully typed.
+ * @param response The response, which is set to the fully attributed tree of `source`.
+ * If the unit corresponding to `source` has been removed in the meantime
+ * the a NoSuchUnitError is raised in the response.
+ */
+ def askLoadedTyped(source: SourceFile, response: Response[Tree]) =
+ postWorkItem(new AskLoadedTypedItem(source, response))
+
+ /** If source if not yet loaded, get an outline view with askParseEntered.
+ * If source is loaded, wait for it to be typechecked.
+ * In both cases, set response to parsed (and possibly typechecked) tree.
+ * @param keepSrcLoaded If set to `true`, source file will be kept as a loaded unit afterwards.
+ */
+ def askStructure(keepSrcLoaded: Boolean)(source: SourceFile, response: Response[Tree]) = {
+ getUnit(source) match {
+ case Some(_) => askLoadedTyped(source, response)
+ case None => askParsedEntered(source, keepSrcLoaded, response)
+ }
+ }
+
+ /** Set sync var `response` to the parse tree of `source` with all top-level symbols entered.
+ * @param source The source file to be analyzed
+ * @param keepLoaded If set to `true`, source file will be kept as a loaded unit afterwards.
+ * If keepLoaded is `false` the operation is run at low priority, only after
+ * everything is brought up to date in a regular type checker run.
+ * @param response The response.
+ */
+ def askParsedEntered(source: SourceFile, keepLoaded: Boolean, response: Response[Tree]) =
+ postWorkItem(new AskParsedEnteredItem(source, keepLoaded, response))
+
+
+ /** Cancels current compiler run and start a fresh one where everything will be re-typechecked
+ * (but not re-loaded).
+ */
+ def askReset() = scheduler raise (new FreshRunReq)
+
+ /** Tells the compile server to shutdown, and not to restart again */
+ def askShutdown() = scheduler raise ShutdownReq
+
+ /** Returns parse tree for source `source`. No symbols are entered. Syntax errors are reported.
+ *
+ * This method is thread-safe and as such can safely run outside of the presentation
+ * compiler thread.
+ */
+ def parseTree(source: SourceFile): Tree = {
+ newUnitParser(new CompilationUnit(source)).parse()
+ }
+
+ /** Asks for a computation to be done quickly on the presentation compiler thread */
+ def ask[A](op: () => A): A = if (self.onCompilerThread) op() else scheduler doQuickly op
+
+ /** Asks for a computation to be done on presentation compiler thread, returning
+ * a response with the result or an exception
+ */
+ def askForResponse[A](op: () => A): Response[A] = {
+ val r = new Response[A]
+ if (self.onCompilerThread) {
+ try { r set op() }
+ catch { case exc: Throwable => r raise exc }
+ r
+ } else {
+ val ir = scheduler askDoQuickly op
+ ir onComplete {
+ case Left(result) => r set result
+ case Right(exc) => r raise exc
+ }
+ r
+ }
+ }
+
+ def onCompilerThread = Thread.currentThread == compileRunner
+
+ /** Info given for every member found by completion
+ */
+ abstract class Member {
+ val sym: Symbol
+ val tpe: Type
+ val accessible: Boolean
+ def implicitlyAdded = false
+
+ private def accessible_s = if (accessible) "" else "[inaccessible] "
+ def forceInfoString = {
+ definitions.fullyInitializeSymbol(sym)
+ definitions.fullyInitializeType(tpe)
+ infoString
+ }
+ def infoString = s"$accessible_s${sym.defStringSeenAs(tpe)}"
+ }
+
+ case class TypeMember(
+ sym: Symbol,
+ tpe: Type,
+ accessible: Boolean,
+ inherited: Boolean,
+ viaView: Symbol) extends Member {
+ override def implicitlyAdded = viaView != NoSymbol
+ }
+
+ case class ScopeMember(
+ sym: Symbol,
+ tpe: Type,
+ accessible: Boolean,
+ viaImport: Tree) extends Member
+
+ // items that get sent to scheduler
+
+ abstract class WorkItem extends (() => Unit) {
+ val onCompilerThread = self.onCompilerThread
+
+ /** Raise a MissingReponse, if the work item carries a response. */
+ def raiseMissing(): Unit
+ }
+
+ case class ReloadItem(sources: List[SourceFile], response: Response[Unit]) extends WorkItem {
+ def apply() = reload(sources, response)
+ override def toString = "reload "+sources
+
+ def raiseMissing() =
+ response raise new MissingResponse
+ }
+
+ case class FilesDeletedItem(sources: List[SourceFile], response: Response[Unit]) extends WorkItem {
+ def apply() = filesDeleted(sources, response)
+ override def toString = "files deleted "+sources
+
+ def raiseMissing() =
+ response raise new MissingResponse
+ }
+
+ case class AskTypeAtItem(pos: Position, response: Response[Tree]) extends WorkItem {
+ def apply() = self.getTypedTreeAt(pos, response)
+ override def toString = "typeat "+pos.source+" "+pos.show
+
+ def raiseMissing() =
+ response raise new MissingResponse
+ }
+
+ case class AskTypeItem(source: SourceFile, forceReload: Boolean, response: Response[Tree]) extends WorkItem {
+ def apply() = self.getTypedTree(source, forceReload, response)
+ override def toString = "typecheck"
+
+ def raiseMissing() =
+ response raise new MissingResponse
+ }
+
+ case class AskTypeCompletionItem(pos: Position, response: Response[List[Member]]) extends WorkItem {
+ def apply() = self.getTypeCompletion(pos, response)
+ override def toString = "type completion "+pos.source+" "+pos.show
+
+ def raiseMissing() =
+ response raise new MissingResponse
+ }
+
+ case class AskScopeCompletionItem(pos: Position, response: Response[List[Member]]) extends WorkItem {
+ def apply() = self.getScopeCompletion(pos, response)
+ override def toString = "scope completion "+pos.source+" "+pos.show
+
+ def raiseMissing() =
+ response raise new MissingResponse
+ }
+
+ class AskToDoFirstItem(val source: SourceFile) extends WorkItem {
+ def apply() = {
+ moveToFront(List(source))
+ enableIgnoredFile(source.file)
+ }
+ override def toString = "dofirst "+source
+
+ def raiseMissing() = ()
+ }
+
+ case class AskLinkPosItem(sym: Symbol, source: SourceFile, response: Response[Position]) extends WorkItem {
+ def apply() = self.getLinkPos(sym, source, response)
+ override def toString = "linkpos "+sym+" in "+source
+
+ def raiseMissing() =
+ response raise new MissingResponse
+ }
+
+ case class AskDocCommentItem(sym: Symbol, source: SourceFile, site: Symbol, fragments: List[(Symbol,SourceFile)], response: Response[(String, String, Position)]) extends WorkItem {
+ def apply() = self.getDocComment(sym, source, site, fragments, response)
+ override def toString = "doc comment "+sym+" in "+source+" with fragments:"+fragments.mkString("(", ",", ")")
+
+ def raiseMissing() =
+ response raise new MissingResponse
+ }
+
+ case class AskLoadedTypedItem(source: SourceFile, response: Response[Tree]) extends WorkItem {
+ def apply() = self.waitLoadedTyped(source, response, this.onCompilerThread)
+ override def toString = "wait loaded & typed "+source
+
+ def raiseMissing() =
+ response raise new MissingResponse
+ }
+
+ case class AskParsedEnteredItem(source: SourceFile, keepLoaded: Boolean, response: Response[Tree]) extends WorkItem {
+ def apply() = self.getParsedEntered(source, keepLoaded, response, this.onCompilerThread)
+ override def toString = "getParsedEntered "+source+", keepLoaded = "+keepLoaded
+
+ def raiseMissing() =
+ response raise new MissingResponse
+ }
+
+ /** A do-nothing work scheduler that responds immediately with MissingResponse.
+ *
+ * Used during compiler shutdown.
+ */
+ class NoWorkScheduler extends WorkScheduler {
+
+ override def postWorkItem(action: Action) = synchronized {
+ action match {
+ case w: WorkItem => w.raiseMissing()
+ case e: EmptyAction => // do nothing
+ case _ => println("don't know what to do with this " + action.getClass)
+ }
+ }
+
+ override def doQuickly[A](op: () => A): A = {
+ throw new FailedInterrupt(new Exception("Posted a work item to a compiler that's shutting down"))
+ }
+
+ override def askDoQuickly[A](op: () => A): InterruptReq { type R = A } = {
+ val ir = new InterruptReq {
+ type R = A
+ val todo = () => throw new MissingResponse
+ }
+ ir.execute()
+ ir
+ }
+
+ }
+
+}
+
+ // ---------------- Interpreted exceptions -------------------
+
+/** Signals a request for a fresh background compiler run.
+ * Note: The object has to stay top-level so that the PresentationCompilerThread may access it.
+ */
+class FreshRunReq extends ControlThrowable
+
+/** Signals a request for a shutdown of the presentation compiler.
+ * Note: The object has to stay top-level so that the PresentationCompilerThread may access it.
+ */
+object ShutdownReq extends ControlThrowable
+
+class NoSuchUnitError(file: AbstractFile) extends Exception("no unit found for file "+file)
+
+class MissingResponse extends Exception("response missing")
diff --git a/src/interactive/scala/tools/nsc/interactive/ContextTrees.scala b/src/interactive/scala/tools/nsc/interactive/ContextTrees.scala
new file mode 100644
index 0000000000..4f67a22b8f
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/ContextTrees.scala
@@ -0,0 +1,165 @@
+/* NSC -- new Scala compiler
+ * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
+ * @author Martin Odersky
+ */
+package scala.tools.nsc
+package interactive
+
+import scala.collection.mutable.ArrayBuffer
+import scala.annotation.tailrec
+
+trait ContextTrees { self: Global =>
+
+ type Context = analyzer.Context
+ lazy val NoContext = analyzer.NoContext
+ type Contexts = ArrayBuffer[ContextTree]
+
+ /** A context tree contains contexts that are indexed by positions.
+ * It satisfies the following properties:
+ * 1. All context come from compiling the same unit.
+ * 2. Child contexts have parent contexts in their outer chain.
+ * 3. The `pos` field of a context is the same as `context.tree.pos`, unless that
+ * position is transparent. In that case, `pos` equals the position of
+ * one of the solid descendants of `context.tree`.
+ * 4. Children of a context have non-overlapping increasing positions.
+ * 5. No context in the tree has a transparent position.
+ */
+ class ContextTree(val pos: Position, val context: Context, val children: ArrayBuffer[ContextTree]) {
+ def this(pos: Position, context: Context) = this(pos, context, new ArrayBuffer[ContextTree])
+ override def toString = "ContextTree("+pos+", "+children+")"
+ }
+
+ /** Returns the most precise context possible for the given `pos`.
+ *
+ * It looks for the finest ContextTree containing `pos`, and then look inside
+ * this ContextTree for a child ContextTree located immediately before `pos`.
+ * If such a child exists, returns its context, otherwise returns the context of
+ * the parent ContextTree.
+ *
+ * This is required to always return a context which contains the all the imports
+ * declared up to `pos` (see SI-7280 for a test case).
+ *
+ * Can return None if `pos` is before any valid Scala code.
+ */
+ def locateContext(contexts: Contexts, pos: Position): Option[Context] = synchronized {
+ @tailrec
+ def locateFinestContextTree(context: ContextTree): ContextTree = {
+ if (context.pos includes pos) {
+ locateContextTree(context.children, pos) match {
+ case Some(x) =>
+ locateFinestContextTree(x)
+ case None =>
+ context
+ }
+ } else {
+ context
+ }
+ }
+ locateContextTree(contexts, pos) map locateFinestContextTree map (_.context)
+ }
+
+ /** Returns the ContextTree containing `pos`, or the ContextTree positioned just before `pos`,
+ * or None if `pos` is located before all ContextTrees.
+ */
+ def locateContextTree(contexts: Contexts, pos: Position): Option[ContextTree] = {
+ if (contexts.isEmpty) None
+ else {
+ @tailrec
+ def loop(lo: Int, hi: Int, previousSibling: Option[ContextTree]): Option[ContextTree] = {
+ if (pos properlyPrecedes contexts(lo).pos)
+ previousSibling
+ else if (contexts(hi).pos properlyPrecedes pos)
+ Some(contexts(hi))
+ else {
+ val mid = (lo + hi) / 2
+ val midpos = contexts(mid).pos
+ if (midpos includes pos)
+ Some(contexts(mid))
+ else if (midpos properlyPrecedes pos)
+ loop(mid + 1, hi, Some(contexts(mid)))
+ else
+ loop(lo, mid, previousSibling)
+ }
+ }
+ loop(0, contexts.length - 1, None)
+ }
+ }
+
+ /** Insert a context at correct position into a buffer of context trees.
+ * If the `context` has a transparent position, add it multiple times
+ * at the positions of all its solid descendant trees.
+ */
+ def addContext(contexts: Contexts, context: Context): Unit = {
+ val cpos = context.tree.pos
+ if (cpos.isTransparent)
+ for (t <- context.tree.children flatMap solidDescendants)
+ addContext(contexts, context, t.pos)
+ else
+ addContext(contexts, context, cpos)
+ }
+
+ /** Insert a context with non-transparent position `cpos`
+ * at correct position into a buffer of context trees.
+ */
+ def addContext(contexts: Contexts, context: Context, cpos: Position): Unit = synchronized {
+ try {
+ if (!cpos.isRange) {}
+ else if (contexts.isEmpty) contexts += new ContextTree(cpos, context)
+ else {
+ val hi = contexts.length - 1
+ if (contexts(hi).pos precedes cpos)
+ contexts += new ContextTree(cpos, context)
+ else if (contexts(hi).pos properlyIncludes cpos) // fast path w/o search
+ addContext(contexts(hi).children, context, cpos)
+ else if (cpos precedes contexts(0).pos)
+ new ContextTree(cpos, context) +=: contexts
+ else {
+ def insertAt(idx: Int): Boolean = {
+ val oldpos = contexts(idx).pos
+ if (oldpos sameRange cpos) {
+ contexts(idx) = new ContextTree(cpos, context, contexts(idx).children)
+ true
+ } else if (oldpos includes cpos) {
+ addContext(contexts(idx).children, context, cpos)
+ true
+ } else if (cpos includes oldpos) {
+ val start = contexts.indexWhere(cpos includes _.pos)
+ val last = contexts.lastIndexWhere(cpos includes _.pos)
+ contexts(start) = new ContextTree(cpos, context, contexts.slice(start, last + 1))
+ contexts.remove(start + 1, last - start)
+ true
+ } else false
+ }
+ def loop(lo: Int, hi: Int) {
+ if (hi - lo > 1) {
+ val mid = (lo + hi) / 2
+ val midpos = contexts(mid).pos
+ if (cpos precedes midpos)
+ loop(lo, mid)
+ else if (midpos precedes cpos)
+ loop(mid, hi)
+ else
+ addContext(contexts(mid).children, context, cpos)
+ } else if (!insertAt(lo) && !insertAt(hi)) {
+ val lopos = contexts(lo).pos
+ val hipos = contexts(hi).pos
+ if ((lopos precedes cpos) && (cpos precedes hipos))
+ contexts.insert(hi, new ContextTree(cpos, context))
+ else
+ inform("internal error? skewed positions: "+lopos+" !< "+cpos+" !< "+hipos)
+ }
+ }
+ loop(0, hi)
+ }
+ }
+ } catch {
+ case ex: Throwable =>
+ println(ex)
+ ex.printStackTrace()
+ println("failure inserting "+cpos+" into "+contexts+"/"+contexts(contexts.length - 1).pos+"/"+
+ (contexts(contexts.length - 1).pos includes cpos))
+ throw ex
+ }
+ }
+}
+
diff --git a/src/interactive/scala/tools/nsc/interactive/Global.scala b/src/interactive/scala/tools/nsc/interactive/Global.scala
new file mode 100644
index 0000000000..6b46a3b6dc
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/Global.scala
@@ -0,0 +1,1241 @@
+/* NSC -- new Scala compiler
+ * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
+ * @author Martin Odersky
+ */
+package scala.tools.nsc
+package interactive
+
+import java.io.{ PrintWriter, StringWriter, FileReader, FileWriter }
+import scala.collection.mutable
+import mutable.{LinkedHashMap, SynchronizedMap, HashSet, SynchronizedSet}
+import scala.util.control.ControlThrowable
+import scala.tools.nsc.io.AbstractFile
+import scala.reflect.internal.util.{ SourceFile, BatchSourceFile, Position, NoPosition }
+import scala.tools.nsc.reporters._
+import scala.tools.nsc.symtab._
+import scala.tools.nsc.typechecker.Analyzer
+import symtab.Flags.{ACCESSOR, PARAMACCESSOR}
+import scala.annotation.{ elidable, tailrec }
+import scala.language.implicitConversions
+import scala.tools.nsc.typechecker.Typers
+import scala.util.control.Breaks._
+
+/**
+ * This trait allows the IDE to have an instance of the PC that
+ * does not clear the comments table at every new typer run (those
+ * being many and close between in this context).
+ */
+
+trait CommentPreservingTypers extends Typers {
+ self: Analyzer =>
+
+ override def resetDocComments() = {}
+}
+
+trait InteractiveAnalyzer extends Analyzer {
+ val global : Global
+ import global._
+
+ override def newTyper(context: Context): InteractiveTyper = new Typer(context) with InteractiveTyper
+ override def newNamer(context: Context): InteractiveNamer = new Namer(context) with InteractiveNamer
+
+ trait InteractiveTyper extends Typer {
+ override def canAdaptConstantTypeToLiteral = false
+ override def canTranslateEmptyListToNil = false
+ override def missingSelectErrorTree(tree: Tree, qual: Tree, name: Name): Tree = tree match {
+ case Select(_, _) => treeCopy.Select(tree, qual, name)
+ case SelectFromTypeTree(_, _) => treeCopy.SelectFromTypeTree(tree, qual, name)
+ }
+ }
+
+ trait InteractiveNamer extends Namer {
+ override def saveDefaultGetter(meth: Symbol, default: Symbol) {
+ // save the default getters as attachments in the method symbol. if compiling the
+ // same local block several times (which can happen in interactive mode) we might
+ // otherwise not find the default symbol, because the second time it the method
+ // symbol will be re-entered in the scope but the default parameter will not.
+ meth.attachments.get[DefaultsOfLocalMethodAttachment] match {
+ case Some(att) => att.defaultGetters += default
+ case None => meth.updateAttachment(new DefaultsOfLocalMethodAttachment(default))
+ }
+ }
+ // this logic is needed in case typer was interrupted half
+ // way through and then comes back to do the tree again. In
+ // that case the definitions that were already attributed as
+ // well as any default parameters of such methods need to be
+ // re-entered in the current scope.
+ override def enterExistingSym(sym: Symbol): Context = {
+ if (sym != null && sym.owner.isTerm) {
+ enterIfNotThere(sym)
+ if (sym.isLazy)
+ sym.lazyAccessor andAlso enterIfNotThere
+
+ for (defAtt <- sym.attachments.get[DefaultsOfLocalMethodAttachment])
+ defAtt.defaultGetters foreach enterIfNotThere
+ }
+ super.enterExistingSym(sym)
+ }
+ override def enterIfNotThere(sym: Symbol) {
+ val scope = context.scope
+ @tailrec def search(e: ScopeEntry) {
+ if ((e eq null) || (e.owner ne scope))
+ scope enter sym
+ else if (e.sym ne sym) // otherwise, aborts since we found sym
+ search(e.tail)
+ }
+ search(scope lookupEntry sym.name)
+ }
+ }
+}
+
+
+/** The main class of the presentation compiler in an interactive environment such as an IDE
+ */
+class Global(settings: Settings, _reporter: Reporter, projectName: String = "") extends {
+ /* Is the compiler initializing? Early def, so that the field is true during the
+ * execution of the super constructor.
+ */
+ private var initializing = true
+ override val useOffsetPositions = false
+} with scala.tools.nsc.Global(settings, _reporter)
+ with CompilerControl
+ with ContextTrees
+ with RichCompilationUnits
+ with Picklers {
+
+ import definitions._
+
+ val debugIDE: Boolean = settings.YpresentationDebug.value
+ val verboseIDE: Boolean = settings.YpresentationVerbose.value
+
+ private def replayName = settings.YpresentationReplay.value
+ private def logName = settings.YpresentationLog.value
+ private def afterTypeDelay = settings.YpresentationDelay.value
+ private final val SleepTime = 10
+
+ val log =
+ if (replayName != "") new Replayer(new FileReader(replayName))
+ else if (logName != "") new Logger(new FileWriter(logName))
+ else NullLogger
+
+ import log.logreplay
+ debugLog("logger: " + log.getClass + " writing to " + (new java.io.File(logName)).getAbsolutePath)
+ debugLog("classpath: "+classPath)
+
+ private var curTime = System.nanoTime
+ private def timeStep = {
+ val last = curTime
+ curTime = System.nanoTime
+ ", delay = " + (curTime - last) / 1000000 + "ms"
+ }
+
+ /** Print msg only when debugIDE is true. */
+ @inline final def debugLog(msg: => String) =
+ if (debugIDE) println("[%s] %s".format(projectName, msg))
+
+ /** Inform with msg only when verboseIDE is true. */
+ @inline final def informIDE(msg: => String) =
+ if (verboseIDE) println("[%s][%s]".format(projectName, msg))
+
+ // don't keep the original owner in presentation compiler runs
+ // (the map will grow indefinitely, and the only use case is the backend)
+ override protected def saveOriginalOwner(sym: Symbol) { }
+ override protected def originalEnclosingMethod(sym: Symbol) =
+ abort("originalOwner is not kept in presentation compiler runs.")
+
+ override def forInteractive = true
+ override protected def synchronizeNames = true
+
+ override def newAsSeenFromMap(pre: Type, clazz: Symbol): AsSeenFromMap =
+ new InteractiveAsSeenFromMap(pre, clazz)
+
+ class InteractiveAsSeenFromMap(pre: Type, clazz: Symbol) extends AsSeenFromMap(pre, clazz) {
+ /** The method formerly known as 'instParamsRelaxed' goes here if it's still necessary,
+ * which it is currently supposed it is not.
+ *
+ * If it is, change AsSeenFromMap method correspondingTypeArgument to call an overridable
+ * method rather than aborting in the failure case.
+ */
+ }
+
+ /** A map of all loaded files to the rich compilation units that correspond to them.
+ */
+ val unitOfFile = new LinkedHashMap[AbstractFile, RichCompilationUnit] with
+ SynchronizedMap[AbstractFile, RichCompilationUnit] {
+ override def put(key: AbstractFile, value: RichCompilationUnit) = {
+ val r = super.put(key, value)
+ if (r.isEmpty) debugLog("added unit for "+key)
+ r
+ }
+ override def remove(key: AbstractFile) = {
+ val r = super.remove(key)
+ if (r.nonEmpty) debugLog("removed unit for "+key)
+ r
+ }
+ }
+
+ /** A set containing all those files that need to be removed
+ * Units are removed by getUnit, typically once a unit is finished compiled.
+ */
+ protected val toBeRemoved: mutable.Set[AbstractFile] =
+ new HashSet[AbstractFile] with SynchronizedSet[AbstractFile]
+
+ /** A set containing all those files that need to be removed after a full background compiler run
+ */
+ protected val toBeRemovedAfterRun: mutable.Set[AbstractFile] =
+ new HashSet[AbstractFile] with SynchronizedSet[AbstractFile]
+
+ class ResponseMap extends mutable.HashMap[SourceFile, Set[Response[Tree]]] {
+ override def default(key: SourceFile): Set[Response[Tree]] = Set()
+ override def += (binding: (SourceFile, Set[Response[Tree]])) = {
+ assert(interruptsEnabled, "delayed operation within an ask")
+ super.+=(binding)
+ }
+ }
+
+ /** A map that associates with each abstract file the set of responses that are waiting
+ * (via waitLoadedTyped) for the unit associated with the abstract file to be loaded and completely typechecked.
+ */
+ protected val waitLoadedTypeResponses = new ResponseMap
+
+ /** A map that associates with each abstract file the set of responses that ware waiting
+ * (via build) for the unit associated with the abstract file to be parsed and entered
+ */
+ protected var getParsedEnteredResponses = new ResponseMap
+
+ private def cleanResponses(rmap: ResponseMap): Unit = {
+ for ((source, rs) <- rmap.toList) {
+ for (r <- rs) {
+ if (getUnit(source).isEmpty)
+ r raise new NoSuchUnitError(source.file)
+ if (r.isComplete)
+ rmap(source) -= r
+ }
+ if (rmap(source).isEmpty)
+ rmap -= source
+ }
+ }
+
+ override lazy val analyzer = new {
+ val global: Global.this.type = Global.this
+ } with InteractiveAnalyzer
+
+ private def cleanAllResponses() {
+ cleanResponses(waitLoadedTypeResponses)
+ cleanResponses(getParsedEnteredResponses)
+ }
+
+ private def checkNoOutstanding(rmap: ResponseMap): Unit =
+ for ((_, rs) <- rmap.toList; r <- rs) {
+ debugLog("ERROR: missing response, request will be discarded")
+ r raise new MissingResponse
+ }
+
+ def checkNoResponsesOutstanding() {
+ checkNoOutstanding(waitLoadedTypeResponses)
+ checkNoOutstanding(getParsedEnteredResponses)
+ }
+
+ /** The compilation unit corresponding to a source file
+ * if it does not yet exist create a new one atomically
+ * Note: We want to remove this.
+ */
+ protected[interactive] def getOrCreateUnitOf(source: SourceFile): RichCompilationUnit =
+ unitOfFile.getOrElse(source.file, { println("precondition violated: "+source+" is not loaded"); new Exception().printStackTrace(); new RichCompilationUnit(source) })
+
+ /** Work through toBeRemoved list to remove any units.
+ * Then return optionally unit associated with given source.
+ */
+ protected[interactive] def getUnit(s: SourceFile): Option[RichCompilationUnit] = {
+ toBeRemoved.synchronized {
+ for (f <- toBeRemoved) {
+ informIDE("removed: "+s)
+ unitOfFile -= f
+ allSources = allSources filter (_.file != f)
+ }
+ toBeRemoved.clear()
+ }
+ unitOfFile get s.file
+ }
+
+ /** A list giving all files to be typechecked in the order they should be checked.
+ */
+ protected var allSources: List[SourceFile] = List()
+
+ private var lastException: Option[Throwable] = None
+
+ /** A list of files that crashed the compiler. They will be ignored during background
+ * compilation until they are removed from this list.
+ */
+ private var ignoredFiles: Set[AbstractFile] = Set()
+
+ /** Flush the buffer of sources that are ignored during background compilation. */
+ def clearIgnoredFiles() {
+ ignoredFiles = Set()
+ }
+
+ /** Remove a crashed file from the ignore buffer. Background compilation will take it into account
+ * and errors will be reported against it. */
+ def enableIgnoredFile(file: AbstractFile) {
+ ignoredFiles -= file
+ debugLog("Removed crashed file %s. Still in the ignored buffer: %s".format(file, ignoredFiles))
+ }
+
+ /** The currently active typer run */
+ private var currentTyperRun: TyperRun = _
+ newTyperRun()
+
+ /** Is a background compiler run needed?
+ * Note: outOfDate is true as long as there is a background compile scheduled or going on.
+ */
+ private var outOfDate = false
+
+ def isOutOfDate: Boolean = outOfDate
+
+ def demandNewCompilerRun() = {
+ if (outOfDate) throw new FreshRunReq // cancel background compile
+ else outOfDate = true // proceed normally and enable new background compile
+ }
+
+ protected[interactive] var minRunId = 1
+
+ private[interactive] var interruptsEnabled = true
+
+ private val NoResponse: Response[_] = new Response[Any]
+
+ /** The response that is currently pending, i.e. the compiler
+ * is working on providing an asnwer for it.
+ */
+ private var pendingResponse: Response[_] = NoResponse
+
+ // ----------- Overriding hooks in nsc.Global -----------------------
+
+ /** Called from parser, which signals hereby that a method definition has been parsed.
+ */
+ override def signalParseProgress(pos: Position) {
+ // We only want to be interruptible when running on the PC thread.
+ if(onCompilerThread) {
+ checkForMoreWork(pos)
+ }
+ }
+
+ /** Called from typechecker, which signals hereby that a node has been completely typechecked.
+ * If the node includes unit.targetPos, abandons run and returns newly attributed tree.
+ * Otherwise, if there's some higher priority work to be done, also abandons run with a FreshRunReq.
+ * @param context The context that typechecked the node
+ * @param old The original node
+ * @param result The transformed node
+ */
+ override def signalDone(context: Context, old: Tree, result: Tree) {
+ val canObserveTree = (
+ interruptsEnabled
+ && analyzer.lockedCount == 0
+ && !context.bufferErrors // SI-7558 look away during exploratory typing in "silent mode"
+ )
+ if (canObserveTree) {
+ if (context.unit.exists &&
+ result.pos.isOpaqueRange &&
+ (result.pos includes context.unit.targetPos)) {
+ var located = new TypedLocator(context.unit.targetPos) locateIn result
+ if (located == EmptyTree) {
+ println("something's wrong: no "+context.unit+" in "+result+result.pos)
+ located = result
+ }
+ throw new TyperResult(located)
+ }
+ else {
+ try {
+ checkForMoreWork(old.pos)
+ } catch {
+ case ex: ValidateException => // Ignore, this will have been reported elsewhere
+ debugLog("validate exception caught: "+ex)
+ case ex: Throwable =>
+ log.flush()
+ throw ex
+ }
+ }
+ }
+ }
+
+ /** Called from typechecker every time a context is created.
+ * Registers the context in a context tree
+ */
+ override def registerContext(c: Context) = c.unit match {
+ case u: RichCompilationUnit => addContext(u.contexts, c)
+ case _ =>
+ }
+
+ /** The top level classes and objects currently seen in the presentation compiler
+ */
+ private val currentTopLevelSyms = new mutable.LinkedHashSet[Symbol]
+
+ /** The top level classes and objects no longer seen in the presentation compiler
+ */
+ val deletedTopLevelSyms = new mutable.LinkedHashSet[Symbol] with mutable.SynchronizedSet[Symbol]
+
+ /** Called from typechecker every time a top-level class or object is entered.
+ */
+ override def registerTopLevelSym(sym: Symbol) { currentTopLevelSyms += sym }
+
+ protected type SymbolLoadersInInteractive = GlobalSymbolLoaders {
+ val global: Global.this.type
+ val platform: Global.this.platform.type
+ }
+ /** Symbol loaders in the IDE parse all source files loaded from a package for
+ * top-level idents. Therefore, we can detect top-level symbols that have a name
+ * different from their source file
+ */
+ override lazy val loaders: SymbolLoadersInInteractive = new {
+ val global: Global.this.type = Global.this
+ val platform: Global.this.platform.type = Global.this.platform
+ } with BrowsingLoaders
+
+ // ----------------- Polling ---------------------------------------
+
+ case class WorkEvent(atNode: Int, atMillis: Long)
+
+ private var moreWorkAtNode: Int = -1
+ private var nodesSeen = 0
+ private var lastWasReload = false
+
+ /** The number of pollForWorks after which the presentation compiler yields.
+ * Yielding improves responsiveness on systems with few cores because it
+ * gives the UI thread a chance to get new tasks and interrupt the presentation
+ * compiler with them.
+ */
+ private final val yieldPeriod = 10
+
+ /** Called from runner thread and signalDone:
+ * Poll for interrupts and execute them immediately.
+ * Then, poll for exceptions and execute them.
+ * Then, poll for work reload/typedTreeAt/doFirst commands during background checking.
+ * @param pos The position of the tree if polling while typechecking, NoPosition otherwise
+ *
+ */
+ private[interactive] def pollForWork(pos: Position) {
+ var loop: Boolean = true
+ while (loop) {
+ breakable{
+ loop = false
+ if (!interruptsEnabled) return
+ if (pos == NoPosition || nodesSeen % yieldPeriod == 0)
+ Thread.`yield`()
+
+ def nodeWithWork(): Option[WorkEvent] =
+ if (scheduler.moreWork || pendingResponse.isCancelled) Some(new WorkEvent(nodesSeen, System.currentTimeMillis))
+ else None
+
+ nodesSeen += 1
+ logreplay("atnode", nodeWithWork()) match {
+ case Some(WorkEvent(id, _)) =>
+ debugLog("some work at node "+id+" current = "+nodesSeen)
+ // assert(id >= nodesSeen)
+ moreWorkAtNode = id
+ case None =>
+ }
+
+ if (nodesSeen >= moreWorkAtNode) {
+
+ logreplay("asked", scheduler.pollInterrupt()) match {
+ case Some(ir) =>
+ try {
+ interruptsEnabled = false
+ debugLog("ask started"+timeStep)
+ ir.execute()
+ } finally {
+ debugLog("ask finished"+timeStep)
+ interruptsEnabled = true
+ }
+ loop = true; break
+ case _ =>
+ }
+
+ if (logreplay("cancelled", pendingResponse.isCancelled)) {
+ throw CancelException
+ }
+
+ logreplay("exception thrown", scheduler.pollThrowable()) match {
+ case Some(ex: FreshRunReq) =>
+ newTyperRun()
+ minRunId = currentRunId
+ demandNewCompilerRun()
+
+ case Some(ShutdownReq) =>
+ scheduler.synchronized { // lock the work queue so no more items are posted while we clean it up
+ val units = scheduler.dequeueAll {
+ case item: WorkItem => Some(item.raiseMissing())
+ case _ => Some(())
+ }
+
+ // don't forget to service interrupt requests
+ scheduler.dequeueAllInterrupts(_.execute())
+
+ debugLog("ShutdownReq: cleaning work queue (%d items)".format(units.size))
+ debugLog("Cleanup up responses (%d loadedType pending, %d parsedEntered pending)"
+ .format(waitLoadedTypeResponses.size, getParsedEnteredResponses.size))
+ checkNoResponsesOutstanding()
+
+ log.flush()
+ scheduler = new NoWorkScheduler
+ throw ShutdownReq
+ }
+
+ case Some(ex: Throwable) => log.flush(); throw ex
+ case _ =>
+ }
+
+ lastWasReload = false
+
+ logreplay("workitem", scheduler.nextWorkItem()) match {
+ case Some(action) =>
+ try {
+ debugLog("picked up work item at "+pos+": "+action+timeStep)
+ action()
+ debugLog("done with work item: "+action)
+ } finally {
+ debugLog("quitting work item: "+action+timeStep)
+ }
+ case None =>
+ }
+ }
+ }
+ }
+ }
+
+ protected def checkForMoreWork(pos: Position) {
+ val typerRun = currentTyperRun
+ pollForWork(pos)
+ if (typerRun != currentTyperRun) demandNewCompilerRun()
+ }
+
+ // ----------------- The Background Runner Thread -----------------------
+
+ private var threadId = 0
+
+ /** The current presentation compiler runner */
+ @volatile private[interactive] var compileRunner: Thread = newRunnerThread()
+
+ /** Check that the currenyly executing thread is the presentation compiler thread.
+ *
+ * Compiler initialization may happen on a different thread (signalled by globalPhase being NoPhase)
+ */
+ @elidable(elidable.WARNING)
+ override def assertCorrectThread() {
+ assert(initializing || onCompilerThread,
+ "Race condition detected: You are running a presentation compiler method outside the PC thread.[phase: %s]".format(globalPhase) +
+ " Please file a ticket with the current stack trace at https://www.assembla.com/spaces/scala-ide/support/tickets")
+ }
+
+ /** Create a new presentation compiler runner.
+ */
+ private def newRunnerThread(): Thread = {
+ threadId += 1
+ compileRunner = new PresentationCompilerThread(this, projectName)
+ compileRunner.setDaemon(true)
+ compileRunner.start()
+ compileRunner
+ }
+
+ private def ensureUpToDate(unit: RichCompilationUnit) =
+ if (!unit.isUpToDate && unit.status != JustParsed) reset(unit) // reparse previously typechecked units.
+
+ /** Compile all loaded source files in the order given by `allSources`.
+ */
+ private[interactive] final def backgroundCompile() {
+ informIDE("Starting new presentation compiler type checking pass")
+ reporter.reset()
+
+ // remove any files in first that are no longer maintained by presentation compiler (i.e. closed)
+ allSources = allSources filter (s => unitOfFile contains (s.file))
+
+ // ensure all loaded units are parsed
+ for (s <- allSources; unit <- getUnit(s)) {
+ // checkForMoreWork(NoPosition) // disabled, as any work done here would be in an inconsistent state
+ ensureUpToDate(unit)
+ parseAndEnter(unit)
+ serviceParsedEntered()
+ }
+
+ // sleep window
+ if (afterTypeDelay > 0 && lastWasReload) {
+ val limit = System.currentTimeMillis() + afterTypeDelay
+ while (System.currentTimeMillis() < limit) {
+ Thread.sleep(SleepTime)
+ checkForMoreWork(NoPosition)
+ }
+ }
+
+ // ensure all loaded units are typechecked
+ for (s <- allSources; if !ignoredFiles(s.file); unit <- getUnit(s)) {
+ try {
+ if (!unit.isUpToDate)
+ if (unit.problems.isEmpty || !settings.YpresentationStrict)
+ typeCheck(unit)
+ else debugLog("%s has syntax errors. Skipped typechecking".format(unit))
+ else debugLog("already up to date: "+unit)
+ for (r <- waitLoadedTypeResponses(unit.source))
+ r set unit.body
+ serviceParsedEntered()
+ } catch {
+ case ex: FreshRunReq => throw ex // propagate a new run request
+ case ShutdownReq => throw ShutdownReq // propagate a shutdown request
+ case ex: ControlThrowable => throw ex
+ case ex: Throwable =>
+ println("[%s]: exception during background compile: ".format(unit.source) + ex)
+ ex.printStackTrace()
+ for (r <- waitLoadedTypeResponses(unit.source)) {
+ r.raise(ex)
+ }
+ serviceParsedEntered()
+
+ lastException = Some(ex)
+ ignoredFiles += unit.source.file
+ println("[%s] marking unit as crashed (crashedFiles: %s)".format(unit, ignoredFiles))
+
+ reporter.error(unit.body.pos, "Presentation compiler crashed while type checking this file: %s".format(ex.toString()))
+ }
+ }
+
+ // move units removable after this run to the "to-be-removed" buffer
+ toBeRemoved ++= toBeRemovedAfterRun
+
+ // clean out stale waiting responses
+ cleanAllResponses()
+
+ // wind down
+ if (waitLoadedTypeResponses.nonEmpty || getParsedEnteredResponses.nonEmpty) {
+ // need another cycle to treat those
+ newTyperRun()
+ backgroundCompile()
+ } else {
+ outOfDate = false
+ informIDE("Everything is now up to date")
+ }
+ }
+
+ /** Service all pending getParsedEntered requests
+ */
+ private def serviceParsedEntered() {
+ var atOldRun = true
+ for ((source, rs) <- getParsedEnteredResponses; r <- rs) {
+ if (atOldRun) { newTyperRun(); atOldRun = false }
+ getParsedEnteredNow(source, r)
+ }
+ getParsedEnteredResponses.clear()
+ }
+
+ /** Reset unit to unloaded state */
+ private def reset(unit: RichCompilationUnit): Unit = {
+ unit.depends.clear()
+ unit.defined.clear()
+ unit.synthetics.clear()
+ unit.toCheck.clear()
+ unit.checkedFeatures = Set()
+ unit.targetPos = NoPosition
+ unit.contexts.clear()
+ unit.problems.clear()
+ unit.body = EmptyTree
+ unit.status = NotLoaded
+ }
+
+ /** Parse unit and create a name index, unless this has already been done before */
+ private def parseAndEnter(unit: RichCompilationUnit): Unit =
+ if (unit.status == NotLoaded) {
+ debugLog("parsing: "+unit)
+ currentTyperRun.compileLate(unit)
+ if (debugIDE && !reporter.hasErrors) validatePositions(unit.body)
+ if (!unit.isJava) syncTopLevelSyms(unit)
+ unit.status = JustParsed
+ }
+
+ /** Make sure unit is typechecked
+ */
+ private def typeCheck(unit: RichCompilationUnit) {
+ debugLog("type checking: "+unit)
+ parseAndEnter(unit)
+ unit.status = PartiallyChecked
+ currentTyperRun.typeCheck(unit)
+ unit.lastBody = unit.body
+ unit.status = currentRunId
+ }
+
+ /** Update deleted and current top-level symbols sets */
+ def syncTopLevelSyms(unit: RichCompilationUnit) {
+ val deleted = currentTopLevelSyms filter { sym =>
+ /** We sync after namer phase and it resets all the top-level symbols
+ * that survive the new parsing
+ * round to NoPeriod.
+ */
+ sym.sourceFile == unit.source.file &&
+ sym.validTo != NoPeriod &&
+ runId(sym.validTo) < currentRunId
+ }
+ for (d <- deleted) {
+ d.owner.info.decls unlink d
+ deletedTopLevelSyms += d
+ currentTopLevelSyms -= d
+ }
+ }
+
+ /** Move list of files to front of allSources */
+ def moveToFront(fs: List[SourceFile]) {
+ allSources = fs ::: (allSources diff fs)
+ }
+
+ // ----------------- Implementations of client commands -----------------------
+
+ def respond[T](result: Response[T])(op: => T): Unit =
+ respondGradually(result)(Stream(op))
+
+ def respondGradually[T](response: Response[T])(op: => Stream[T]): Unit = {
+ val prevResponse = pendingResponse
+ try {
+ pendingResponse = response
+ if (!response.isCancelled) {
+ var results = op
+ while (!response.isCancelled && results.nonEmpty) {
+ val result = results.head
+ results = results.tail
+ if (results.isEmpty) {
+ response set result
+ debugLog("responded"+timeStep)
+ } else response setProvisionally result
+ }
+ }
+ } catch {
+ case CancelException =>
+ debugLog("cancelled")
+ case ex: FreshRunReq =>
+ if (debugIDE) {
+ println("FreshRunReq thrown during response")
+ ex.printStackTrace()
+ }
+ response raise ex
+ throw ex
+
+ case ex @ ShutdownReq =>
+ if (debugIDE) {
+ println("ShutdownReq thrown during response")
+ ex.printStackTrace()
+ }
+ response raise ex
+ throw ex
+
+ case ex: Throwable =>
+ if (debugIDE) {
+ println("exception thrown during response: "+ex)
+ ex.printStackTrace()
+ }
+ response raise ex
+ } finally {
+ pendingResponse = prevResponse
+ }
+ }
+
+ private def reloadSource(source: SourceFile) {
+ val unit = new RichCompilationUnit(source)
+ unitOfFile(source.file) = unit
+ toBeRemoved -= source.file
+ toBeRemovedAfterRun -= source.file
+ reset(unit)
+ //parseAndEnter(unit)
+ }
+
+ /** Make sure a set of compilation units is loaded and parsed */
+ private def reloadSources(sources: List[SourceFile]) {
+ newTyperRun()
+ minRunId = currentRunId
+ sources foreach reloadSource
+ moveToFront(sources)
+ }
+
+ /** Make sure a set of compilation units is loaded and parsed */
+ private[interactive] def reload(sources: List[SourceFile], response: Response[Unit]) {
+ informIDE("reload: " + sources)
+ lastWasReload = true
+ respond(response)(reloadSources(sources))
+ demandNewCompilerRun()
+ }
+
+ private[interactive] def filesDeleted(sources: List[SourceFile], response: Response[Unit]) {
+ informIDE("files deleted: " + sources)
+ val deletedFiles = sources.map(_.file).toSet
+ val deletedSyms = currentTopLevelSyms filter {sym => deletedFiles contains sym.sourceFile}
+ for (d <- deletedSyms) {
+ d.owner.info.decls unlink d
+ deletedTopLevelSyms += d
+ currentTopLevelSyms -= d
+ }
+ sources foreach (removeUnitOf(_))
+ minRunId = currentRunId
+ respond(response)(())
+ demandNewCompilerRun()
+ }
+
+ /** Arrange for unit to be removed after run, to give a chance to typecheck the unit fully.
+ * If we do just removeUnit, some problems with default parameters can ensue.
+ * Calls to this method could probably be replaced by removeUnit once default parameters are handled more robustly.
+ */
+ private def afterRunRemoveUnitsOf(sources: List[SourceFile]) {
+ toBeRemovedAfterRun ++= sources map (_.file)
+ }
+
+ /** A fully attributed tree located at position `pos` */
+ private def typedTreeAt(pos: Position): Tree = getUnit(pos.source) match {
+ case None =>
+ reloadSources(List(pos.source))
+ try typedTreeAt(pos)
+ finally afterRunRemoveUnitsOf(List(pos.source))
+ case Some(unit) =>
+ informIDE("typedTreeAt " + pos)
+ parseAndEnter(unit)
+ val tree = locateTree(pos)
+ debugLog("at pos "+pos+" was found: "+tree.getClass+" "+tree.pos.show)
+ tree match {
+ case Import(expr, _) =>
+ debugLog("import found"+expr.tpe+(if (expr.tpe == null) "" else " "+expr.tpe.members))
+ case _ =>
+ }
+ if (stabilizedType(tree) ne null) {
+ debugLog("already attributed: "+tree.symbol+" "+tree.tpe)
+ tree
+ } else {
+ unit.targetPos = pos
+ try {
+ debugLog("starting targeted type check")
+ typeCheck(unit)
+// println("tree not found at "+pos)
+ EmptyTree
+ } catch {
+ case ex: TyperResult => new Locator(pos) locateIn ex.tree
+ } finally {
+ unit.targetPos = NoPosition
+ }
+ }
+ }
+
+ /** A fully attributed tree corresponding to the entire compilation unit */
+ private[interactive] def typedTree(source: SourceFile, forceReload: Boolean): Tree = {
+ informIDE("typedTree " + source + " forceReload: " + forceReload)
+ val unit = getOrCreateUnitOf(source)
+ if (forceReload) reset(unit)
+ parseAndEnter(unit)
+ if (unit.status <= PartiallyChecked) typeCheck(unit)
+ unit.body
+ }
+
+ /** Set sync var `response` to a fully attributed tree located at position `pos` */
+ private[interactive] def getTypedTreeAt(pos: Position, response: Response[Tree]) {
+ respond(response)(typedTreeAt(pos))
+ }
+
+ /** Set sync var `response` to a fully attributed tree corresponding to the
+ * entire compilation unit */
+ private[interactive] def getTypedTree(source: SourceFile, forceReload: Boolean, response: Response[Tree]) {
+ respond(response)(typedTree(source, forceReload))
+ }
+
+ private def withTempUnits[T](sources: List[SourceFile])(f: (SourceFile => RichCompilationUnit) => T): T = {
+ val unitOfSrc: SourceFile => RichCompilationUnit = src => unitOfFile(src.file)
+ sources filterNot (getUnit(_).isDefined) match {
+ case Nil =>
+ f(unitOfSrc)
+ case unknown =>
+ reloadSources(unknown)
+ try {
+ f(unitOfSrc)
+ } finally
+ afterRunRemoveUnitsOf(unknown)
+ }
+ }
+
+ private def withTempUnit[T](source: SourceFile)(f: RichCompilationUnit => T): T =
+ withTempUnits(List(source)){ srcToUnit =>
+ f(srcToUnit(source))
+ }
+
+ /** Find a 'mirror' of symbol `sym` in unit `unit`. Pre: `unit is loaded. */
+ private def findMirrorSymbol(sym: Symbol, unit: RichCompilationUnit): Symbol = {
+ val originalTypeParams = sym.owner.typeParams
+ ensureUpToDate(unit)
+ parseAndEnter(unit)
+ val pre = adaptToNewRunMap(ThisType(sym.owner))
+ val rawsym = pre.typeSymbol.info.decl(sym.name)
+ val newsym = rawsym filter { alt =>
+ sym.isType || {
+ try {
+ val tp1 = pre.memberType(alt) onTypeError NoType
+ val tp2 = adaptToNewRunMap(sym.tpe) substSym (originalTypeParams, sym.owner.typeParams)
+ matchesType(tp1, tp2, alwaysMatchSimple = false) || {
+ debugLog(s"findMirrorSymbol matchesType($tp1, $tp2) failed")
+ val tp3 = adaptToNewRunMap(sym.tpe) substSym (originalTypeParams, alt.owner.typeParams)
+ matchesType(tp1, tp3, alwaysMatchSimple = false) || {
+ debugLog(s"findMirrorSymbol fallback matchesType($tp1, $tp3) failed")
+ false
+ }
+ }
+ }
+ catch {
+ case ex: ControlThrowable => throw ex
+ case ex: Throwable =>
+ debugLog("error in findMirrorSymbol: " + ex)
+ ex.printStackTrace()
+ false
+ }
+ }
+ }
+ if (newsym == NoSymbol) {
+ if (rawsym.exists && !rawsym.isOverloaded) rawsym
+ else {
+ debugLog("mirror not found " + sym + " " + unit.source + " " + pre)
+ NoSymbol
+ }
+ } else if (newsym.isOverloaded) {
+ settings.uniqid.value = true
+ debugLog("mirror ambiguous " + sym + " " + unit.source + " " + pre + " " + newsym.alternatives)
+ NoSymbol
+ } else {
+ debugLog("mirror found for " + newsym + ": " + newsym.pos)
+ newsym
+ }
+ }
+
+ /** Implements CompilerControl.askLinkPos */
+ private[interactive] def getLinkPos(sym: Symbol, source: SourceFile, response: Response[Position]) {
+ informIDE("getLinkPos "+sym+" "+source)
+ respond(response) {
+ if (sym.owner.isClass) {
+ withTempUnit(source){ u =>
+ findMirrorSymbol(sym, u).pos
+ }
+ } else {
+ debugLog("link not in class "+sym+" "+source+" "+sym.owner)
+ NoPosition
+ }
+ }
+ }
+
+ private def forceDocComment(sym: Symbol, unit: RichCompilationUnit) {
+ unit.body foreachPartial {
+ case DocDef(comment, defn) if defn.symbol == sym =>
+ fillDocComment(defn.symbol, comment)
+ EmptyTree
+ case _: ValOrDefDef =>
+ EmptyTree
+ }
+ }
+
+ /** Implements CompilerControl.askDocComment */
+ private[interactive] def getDocComment(sym: Symbol, source: SourceFile, site: Symbol, fragments: List[(Symbol,SourceFile)],
+ response: Response[(String, String, Position)]) {
+ informIDE(s"getDocComment $sym at $source, site $site")
+ respond(response) {
+ withTempUnits(fragments.unzip._2){ units =>
+ for((sym, src) <- fragments) {
+ val mirror = findMirrorSymbol(sym, units(src))
+ if (mirror ne NoSymbol) forceDocComment(mirror, units(src))
+ }
+ val mirror = findMirrorSymbol(sym, units(source))
+ if (mirror eq NoSymbol)
+ ("", "", NoPosition)
+ else {
+ (expandedDocComment(mirror, site), rawDocComment(mirror), docCommentPos(mirror))
+ }
+ }
+ }
+ // New typer run to remove temp units and drop per-run caches that might refer to symbols entered from temp units.
+ newTyperRun()
+ }
+
+ def stabilizedType(tree: Tree): Type = tree match {
+ case Ident(_) if treeInfo.admitsTypeSelection(tree) =>
+ singleType(NoPrefix, tree.symbol)
+ case Select(qual, _) if treeInfo.admitsTypeSelection(tree) =>
+ singleType(qual.tpe, tree.symbol)
+ case Import(expr, selectors) =>
+ tree.symbol.info match {
+ case analyzer.ImportType(expr) => expr match {
+ case s@Select(qual, name) if treeInfo.admitsTypeSelection(expr) => singleType(qual.tpe, s.symbol)
+ case i : Ident => i.tpe
+ case _ => tree.tpe
+ }
+ case _ => tree.tpe
+ }
+
+ case _ => tree.tpe
+ }
+
+ import analyzer.{SearchResult, ImplicitSearch}
+
+ private[interactive] def getScopeCompletion(pos: Position, response: Response[List[Member]]) {
+ informIDE("getScopeCompletion" + pos)
+ respond(response) { scopeMembers(pos) }
+ }
+
+ private class Members[M <: Member] extends LinkedHashMap[Name, Set[M]] {
+ override def default(key: Name) = Set()
+
+ private def matching(sym: Symbol, symtpe: Type, ms: Set[M]): Option[M] = ms.find { m =>
+ (m.sym.name == sym.name) && (m.sym.isType || (m.tpe matches symtpe))
+ }
+
+ private def keepSecond(m: M, sym: Symbol, implicitlyAdded: Boolean): Boolean =
+ m.sym.hasFlag(ACCESSOR | PARAMACCESSOR) &&
+ !sym.hasFlag(ACCESSOR | PARAMACCESSOR) &&
+ (!implicitlyAdded || m.implicitlyAdded)
+
+ def add(sym: Symbol, pre: Type, implicitlyAdded: Boolean)(toMember: (Symbol, Type) => M) {
+ if ((sym.isGetter || sym.isSetter) && sym.accessed != NoSymbol) {
+ add(sym.accessed, pre, implicitlyAdded)(toMember)
+ } else if (!sym.name.decodedName.containsName("$") && !sym.isSynthetic && sym.hasRawInfo) {
+ val symtpe = pre.memberType(sym) onTypeError ErrorType
+ matching(sym, symtpe, this(sym.name)) match {
+ case Some(m) =>
+ if (keepSecond(m, sym, implicitlyAdded)) {
+ //print(" -+ "+sym.name)
+ this(sym.name) = this(sym.name) - m + toMember(sym, symtpe)
+ }
+ case None =>
+ //print(" + "+sym.name)
+ this(sym.name) = this(sym.name) + toMember(sym, symtpe)
+ }
+ }
+ }
+
+ def addNonShadowed(other: Members[M]) = {
+ for ((name, ms) <- other)
+ if (ms.nonEmpty && this(name).isEmpty) this(name) = ms
+ }
+
+ def allMembers: List[M] = values.toList.flatten
+ }
+
+ /** Return all members visible without prefix in context enclosing `pos`. */
+ private def scopeMembers(pos: Position): List[ScopeMember] = {
+ typedTreeAt(pos) // to make sure context is entered
+ val context = doLocateContext(pos)
+ val locals = new Members[ScopeMember]
+ val enclosing = new Members[ScopeMember]
+ def addScopeMember(sym: Symbol, pre: Type, viaImport: Tree) =
+ locals.add(sym, pre, implicitlyAdded = false) { (s, st) =>
+ new ScopeMember(s, st, context.isAccessible(s, pre, superAccess = false), viaImport)
+ }
+ def localsToEnclosing() = {
+ enclosing.addNonShadowed(locals)
+ locals.clear()
+ }
+ //print("add scope members")
+ var cx = context
+ while (cx != NoContext) {
+ for (sym <- cx.scope)
+ addScopeMember(sym, NoPrefix, EmptyTree)
+ localsToEnclosing()
+ if (cx == cx.enclClass) {
+ val pre = cx.prefix
+ for (sym <- pre.members)
+ addScopeMember(sym, pre, EmptyTree)
+ localsToEnclosing()
+ }
+ cx = cx.outer
+ }
+ //print("\nadd imported members")
+ for (imp <- context.imports) {
+ val pre = imp.qual.tpe
+ for (sym <- imp.allImportedSymbols)
+ addScopeMember(sym, pre, imp.qual)
+ localsToEnclosing()
+ }
+ // println()
+ val result = enclosing.allMembers
+// if (debugIDE) for (m <- result) println(m)
+ result
+ }
+
+ private[interactive] def getTypeCompletion(pos: Position, response: Response[List[Member]]) {
+ informIDE("getTypeCompletion " + pos)
+ respondGradually(response) { typeMembers(pos) }
+ //if (debugIDE) typeMembers(pos)
+ }
+
+ private def typeMembers(pos: Position): Stream[List[TypeMember]] = {
+ // Choosing which tree will tell us the type members at the given position:
+ // If pos leads to an Import, type the expr
+ // If pos leads to a Select, type the qualifier as long as it is not erroneous
+ // (this implies discarding the possibly incomplete name in the Select node)
+ // Otherwise, type the tree found at 'pos' directly.
+ val tree0 = typedTreeAt(pos) match {
+ case sel @ Select(qual, _) if sel.tpe == ErrorType => qual
+ case Import(expr, _) => expr
+ case t => t
+ }
+ val context = doLocateContext(pos)
+
+ val shouldTypeQualifier = tree0.tpe match {
+ case null => true
+ case mt: MethodType => mt.isImplicit
+ case _ => false
+ }
+
+ // TODO: guard with try/catch to deal with ill-typed qualifiers.
+ val tree = if (shouldTypeQualifier) analyzer newTyper context typedQualifier tree0 else tree0
+
+ debugLog("typeMembers at "+tree+" "+tree.tpe)
+ val superAccess = tree.isInstanceOf[Super]
+ val members = new Members[TypeMember]
+
+ def addTypeMember(sym: Symbol, pre: Type, inherited: Boolean, viaView: Symbol) = {
+ val implicitlyAdded = viaView != NoSymbol
+ members.add(sym, pre, implicitlyAdded) { (s, st) =>
+ new TypeMember(s, st,
+ context.isAccessible(if (s.hasGetter) s.getter(s.owner) else s, pre, superAccess && !implicitlyAdded),
+ inherited,
+ viaView)
+ }
+ }
+
+ /** Create a function application of a given view function to `tree` and typechecked it.
+ */
+ def viewApply(view: SearchResult): Tree = {
+ assert(view.tree != EmptyTree)
+ analyzer.newTyper(context.makeImplicit(reportAmbiguousErrors = false))
+ .typed(Apply(view.tree, List(tree)) setPos tree.pos)
+ .onTypeError(EmptyTree)
+ }
+
+ val pre = stabilizedType(tree)
+
+ val ownerTpe = tree.tpe match {
+ case analyzer.ImportType(expr) => expr.tpe
+ case null => pre
+ case MethodType(List(), rtpe) => rtpe
+ case _ => tree.tpe
+ }
+
+ //print("add members")
+ for (sym <- ownerTpe.members)
+ addTypeMember(sym, pre, sym.owner != ownerTpe.typeSymbol, NoSymbol)
+ members.allMembers #:: {
+ //print("\nadd enrichment")
+ val applicableViews: List[SearchResult] =
+ if (ownerTpe.isErroneous) List()
+ else new ImplicitSearch(
+ tree, functionType(List(ownerTpe), AnyTpe), isView = true,
+ context0 = context.makeImplicit(reportAmbiguousErrors = false)).allImplicits
+ for (view <- applicableViews) {
+ val vtree = viewApply(view)
+ val vpre = stabilizedType(vtree)
+ for (sym <- vtree.tpe.members) {
+ addTypeMember(sym, vpre, inherited = false, view.tree.symbol)
+ }
+ }
+ //println()
+ Stream(members.allMembers)
+ }
+ }
+
+ /** Implements CompilerControl.askLoadedTyped */
+ private[interactive] def waitLoadedTyped(source: SourceFile, response: Response[Tree], onSameThread: Boolean = true) {
+ getUnit(source) match {
+ case Some(unit) =>
+ if (unit.isUpToDate) {
+ debugLog("already typed")
+ response set unit.body
+ } else if (ignoredFiles(source.file)) {
+ response.raise(lastException.getOrElse(CancelException))
+ } else if (onSameThread) {
+ getTypedTree(source, forceReload = false, response)
+ } else {
+ debugLog("wait for later")
+ outOfDate = true
+ waitLoadedTypeResponses(source) += response
+ }
+ case None =>
+ debugLog("load unit and type")
+ try reloadSources(List(source))
+ finally waitLoadedTyped(source, response, onSameThread)
+ }
+ }
+
+ /** Implements CompilerControl.askParsedEntered */
+ private[interactive] def getParsedEntered(source: SourceFile, keepLoaded: Boolean, response: Response[Tree], onSameThread: Boolean = true) {
+ getUnit(source) match {
+ case Some(unit) =>
+ getParsedEnteredNow(source, response)
+ case None =>
+ try {
+ if (keepLoaded || outOfDate && onSameThread)
+ reloadSources(List(source))
+ } finally {
+ if (keepLoaded || !outOfDate || onSameThread)
+ getParsedEnteredNow(source, response)
+ else
+ getParsedEnteredResponses(source) += response
+ }
+ }
+ }
+
+ /** Parses and enters given source file, stroring parse tree in response */
+ private def getParsedEnteredNow(source: SourceFile, response: Response[Tree]) {
+ respond(response) {
+ onUnitOf(source) { unit =>
+ parseAndEnter(unit)
+ unit.body
+ }
+ }
+ }
+
+ // ---------------- Helper classes ---------------------------
+
+ /** The typer run */
+ class TyperRun extends Run {
+ // units is always empty
+
+ /** canRedefine is used to detect double declarations of classes and objects
+ * in multiple source files.
+ * Since the IDE rechecks units several times in the same run, these tests
+ * are disabled by always returning true here.
+ */
+ override def canRedefine(sym: Symbol) = true
+
+ def typeCheck(unit: CompilationUnit): Unit = {
+ applyPhase(typerPhase, unit)
+ }
+
+ /** Apply a phase to a compilation unit
+ * @return true iff typechecked correctly
+ */
+ private def applyPhase(phase: Phase, unit: CompilationUnit) {
+ enteringPhase(phase) { phase.asInstanceOf[GlobalPhase] applyPhase unit }
+ }
+ }
+
+ def newTyperRun() {
+ currentTyperRun = new TyperRun
+ }
+
+ class TyperResult(val tree: Tree) extends ControlThrowable
+
+ assert(globalPhase.id == 0)
+
+ implicit def addOnTypeError[T](x: => T): OnTypeError[T] = new OnTypeError(x)
+
+ // OnTypeError should still catch TypeError because of cyclic references,
+ // but DivergentImplicit shouldn't leak anymore here
+ class OnTypeError[T](op: => T) {
+ def onTypeError(alt: => T) = try {
+ op
+ } catch {
+ case ex: TypeError =>
+ debugLog("type error caught: "+ex)
+ alt
+ }
+ }
+
+ /** The compiler has been initialized. Constructors are evaluated in textual order,
+ * so this is set to true only after all super constructors and the primary constructor
+ * have been executed.
+ */
+ initializing = false
+}
+
+object CancelException extends Exception
diff --git a/src/interactive/scala/tools/nsc/interactive/InteractiveReporter.scala b/src/interactive/scala/tools/nsc/interactive/InteractiveReporter.scala
new file mode 100644
index 0000000000..013b152e96
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/InteractiveReporter.scala
@@ -0,0 +1,47 @@
+/* NSC -- new Scala compiler
+ * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
+ * @author Martin Odersky
+ */
+package scala.tools.nsc
+package interactive
+
+import scala.collection.mutable.ArrayBuffer
+import scala.reflect.internal.util.Position
+import reporters.Reporter
+
+case class Problem(pos: Position, msg: String, severityLevel: Int)
+
+abstract class InteractiveReporter extends Reporter {
+
+ def compiler: Global
+
+ val otherProblems = new ArrayBuffer[Problem]
+
+ override def info0(pos: Position, msg: String, severity: Severity, force: Boolean): Unit = try {
+ severity.count += 1
+ val problems =
+ if (compiler eq null) {
+ otherProblems
+ } else if (pos.isDefined) {
+ compiler.getUnit(pos.source) match {
+ case Some(unit) =>
+ compiler.debugLog(pos.source.file.name + ":" + pos.line + ": " + msg)
+ unit.problems
+ case None =>
+ compiler.debugLog(pos.source.file.name + "[not loaded] :" + pos.line + ": " + msg)
+ otherProblems
+ }
+ } else {
+ compiler.debugLog("[no position] :" + msg)
+ otherProblems
+ }
+ problems += Problem(pos, msg, severity.id)
+ } catch {
+ case ex: UnsupportedOperationException =>
+ }
+
+ override def reset() {
+ super.reset()
+ otherProblems.clear()
+ }
+}
diff --git a/src/interactive/scala/tools/nsc/interactive/Lexer.scala b/src/interactive/scala/tools/nsc/interactive/Lexer.scala
new file mode 100644
index 0000000000..82e8de3f3d
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/Lexer.scala
@@ -0,0 +1,299 @@
+package scala.tools.nsc.interactive
+
+import java.io.Reader
+
+/** Companion object of class `Lexer` which defines tokens and some utility concepts
+ * used for tokens and lexers
+ */
+object Lexer {
+
+ /** An exception raised if an input does not correspond to what's expected
+ * @param rdr the lexer from which the bad input is read
+ * @param msg the error message
+ */
+ class MalformedInput(val rdr: Lexer, val msg: String) extends Exception("Malformed JSON input at "+rdr.tokenPos+": "+msg)
+
+ /** The class of tokens, i.e. descriptions of input words (or: lexemes).
+ * @param str the characters making up this token
+ */
+ class Token(val str: String) {
+ override def toString = str
+ }
+
+ /** A subclass of `Token` representing single-character delimiters
+ * @param char the delimiter character making up this token
+ */
+ case class Delim(char: Char) extends Token(s"'$char'")
+
+ /** A subclass of token representing integer literals */
+ case class IntLit(override val str: String) extends Token(str)
+
+ /** A subclass of token representing floating point literals */
+ case class FloatLit(override val str: String) extends Token(str)
+
+ /** A subclass of token representing string literals */
+ case class StringLit(override val str: String) extends Token(str) {
+ override def toString = quoted(str)
+ }
+
+ /** The `true` token */
+ val TrueLit = new Token("true")
+
+ /** The `false` token */
+ val FalseLit = new Token("false")
+
+ /** The `null` token */
+ val NullLit = new Token("null")
+
+ /** The '`(`' token */
+ val LParen = new Delim('(')
+
+ /** The '`)`' token */
+ val RParen = new Delim(')')
+
+ /** The '`{`' token */
+ val LBrace = new Delim('{')
+
+ /** The '`}`' token */
+ val RBrace = new Delim('}')
+
+ /** The '`[`' token */
+ val LBracket = new Delim('[')
+
+ /** The '`]`' token */
+ val RBracket = new Delim(']')
+
+ /** The '`,`' token */
+ val Comma = new Delim(',')
+
+ /** The '`:`' token */
+ val Colon = new Delim(':')
+
+ /** The token representing end of input */
+ val EOF = new Token("<end of input>")
+
+ private def toUDigit(ch: Int): Char = {
+ val d = ch & 0xF
+ (if (d < 10) d + '0' else d - 10 + 'A').toChar
+ }
+
+ private def addToStr(buf: StringBuilder, ch: Char) {
+ ch match {
+ case '"' => buf ++= "\\\""
+ case '\b' => buf ++= "\\b"
+ case '\f' => buf ++= "\\f"
+ case '\n' => buf ++= "\\n"
+ case '\r' => buf ++= "\\r"
+ case '\t' => buf ++= "\\t"
+ case '\\' => buf ++= "\\\\"
+ case _ =>
+ if (' ' <= ch && ch < 128) buf += ch
+ else buf ++= "\\u" += toUDigit(ch >>> 12) += toUDigit(ch >>> 8) += toUDigit(ch >>> 4) += toUDigit(ch.toInt)
+ }
+ }
+
+ /** Returns given string enclosed in `"`-quotes with all string characters escaped
+ * so that they correspond to the JSON standard.
+ * Characters that escaped are: `"`, `\b`, `\f`, `\n`, `\r`, `\t`, `\`.
+ * Furthermore, every other character which is not in the ASCII range 32-127 is
+ * escaped as a four hex-digit unicode character of the form `\ u x x x x`.
+ * @param str the string to be quoted
+ */
+ def quoted(str: String): String = {
+ val buf = new StringBuilder += '\"'
+ str foreach (addToStr(buf, _))
+ buf += '\"'
+ buf.toString
+ }
+
+ private val BUF_SIZE = 2 << 16
+}
+
+import Lexer._
+
+/** A simple lexer for tokens as they are used in JSON, plus parens `(`, `)`
+ * Tokens understood are:
+ *
+ * `(`, `)`, `[`, `]`, `{`, `}`, `:`, `,`, `true`, `false`, `null`,
+ * strings (syntax as in JSON),
+ * integer numbers (syntax as in JSON: -?(0|\d+)
+ * floating point numbers (syntax as in JSON: -?(0|\d+)(\.\d+)?((e|E)(+|-)?\d+)?)
+ * The end of input is represented as its own token, EOF.
+ * Lexers can keep one token lookahead
+ *
+ * @param rd the reader from which characters are read.
+ */
+class Lexer(rd: Reader) {
+
+ /** The last-read character */
+ var ch: Char = 0
+
+ /** The number of characters read so far */
+ var pos: Long = 0
+
+ /** The last-read token */
+ var token: Token = _
+
+ /** The number of characters read before the start of the last-read token */
+ var tokenPos: Long = 0
+
+ private var atEOF: Boolean = false
+ private val buf = new Array[Char](BUF_SIZE)
+ private var nread: Int = 0
+ private var bp = 0
+
+ /** Reads next character into `ch` */
+ def nextChar() {
+ assert(!atEOF)
+ if (bp == nread) {
+ nread = rd.read(buf)
+ bp = 0
+ if (nread <= 0) { ch = 0; atEOF = true; return }
+ }
+ ch = buf(bp)
+ bp += 1
+ pos += 1
+ }
+
+ /** If last-read character equals given character, reads next character,
+ * otherwise raises an error
+ * @param c the given character to compare with last-read character
+ * @throws MalformedInput if character does not match
+ */
+ def acceptChar(c: Char) = if (ch == c) nextChar() else error("'"+c+"' expected")
+
+ private val sb = new StringBuilder
+
+ private def putChar() {
+ sb += ch; nextChar()
+ }
+
+ private def putAcceptString(str: String) {
+ str foreach acceptChar
+ sb ++= str
+ }
+
+ /** Skips whitespace and reads next lexeme into `token`
+ * @throws MalformedInput if lexeme not recognized as a valid token
+ */
+ def nextToken() {
+ sb.clear()
+ while (!atEOF && ch <= ' ') nextChar()
+ tokenPos = pos - 1
+ if (atEOF) token = EOF
+ else ch match {
+ case '(' => putChar(); token = LParen
+ case ')' => putChar(); token = RParen
+ case '{' => putChar(); token = LBrace
+ case '}' => putChar(); token = RBrace
+ case '[' => putChar(); token = LBracket
+ case ']' => putChar(); token = RBracket
+ case ',' => putChar(); token = Comma
+ case ':' => putChar(); token = Colon
+ case 't' => putAcceptString("true"); token = TrueLit
+ case 'f' => putAcceptString("false"); token = FalseLit
+ case 'n' => putAcceptString("null"); token = NullLit
+ case '"' => getString()
+ case '-' | '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' => getNumber()
+ case _ => error("unrecoginezed start of token: '"+ch+"'")
+ }
+ //println("["+token+"]")
+ }
+
+ /** Reads a string literal, and forms a `StringLit` token from it.
+ * Last-read input character `ch` must be opening `"`-quote.
+ * @throws MalformedInput if lexeme not recognized as a string literal.
+ */
+ def getString() {
+ def udigit() = {
+ nextChar()
+ if ('0' <= ch && ch <= '9') ch - '9'
+ else if ('A' <= ch && ch <= 'F') ch - 'A' + 10
+ else if ('a' <= ch && ch <= 'f') ch - 'a' + 10
+ else error("illegal unicode escape character: '"+ch+"'")
+ }
+ val delim = ch
+ nextChar()
+ while (ch != delim && ch >= ' ') {
+ if (ch == '\\') {
+ nextChar()
+ ch match {
+ case '\'' => sb += '\''
+ case '"' => sb += '"'
+ case '\\' => sb += '\\'
+ case '/' => sb += '/'
+ case 'b' => sb += '\b'
+ case 'f' => sb += '\f'
+ case 'n' => sb += '\n'
+ case 'r' => sb += '\r'
+ case 't' => sb += '\t'
+ case 'u' => sb += (udigit() << 12 | udigit() << 8 | udigit() << 4 | udigit()).toChar
+ case _ => error("illegal escape character: '"+ch+"'")
+ }
+ nextChar()
+ } else {
+ putChar()
+ }
+ }
+ acceptChar(delim)
+ token = StringLit(sb.toString)
+ }
+
+ /** Reads a numeric literal, and forms an `IntLit` or `FloatLit` token from it.
+ * Last-read input character `ch` must be either `-` or a digit.
+ * @throws MalformedInput if lexeme not recognized as a numeric literal.
+ */
+ def getNumber() {
+ def digit() =
+ if ('0' <= ch && ch <= '9') putChar()
+ else error("<digit> expected")
+ def digits() =
+ do { digit() } while ('0' <= ch && ch <= '9')
+ var isFloating = false
+ if (ch == '-') putChar()
+ if (ch == '0') digit()
+ else digits()
+ if (ch == '.') {
+ isFloating = true
+ putChar()
+ digits()
+ }
+ if (ch == 'e' || ch == 'E') {
+ isFloating = true
+ putChar()
+ if (ch == '+' || ch == '-') putChar()
+ digits()
+ }
+ token = if (isFloating) FloatLit(sb.toString) else IntLit(sb.toString)
+ }
+
+ /** If current token equals given token, reads next token, otherwise raises an error.
+ * @param t the given token to compare current token with
+ * @throws MalformedInput if the two tokens do not match.
+ */
+ def accept(t: Token) {
+ if (token == t) nextToken()
+ else error(t+" expected, but "+token+" found")
+ }
+
+ /** The current token is a delimiter consisting of given character, reads next token,
+ * otherwise raises an error.
+ * @param ch the given delimiter character to compare current token with
+ * @throws MalformedInput if the current token `token` is not a delimiter, or
+ * consists of a character different from `c`.
+ */
+ def accept(ch: Char) {
+ token match {
+ case Delim(`ch`) => nextToken()
+ case _ => accept(Delim(ch))
+ }
+ }
+
+ /** Always throws a `MalformedInput` exception with given error message.
+ * @param msg the error message
+ */
+ def error(msg: String) = throw new MalformedInput(this, msg)
+
+ nextChar()
+ nextToken()
+}
diff --git a/src/interactive/scala/tools/nsc/interactive/Main.scala b/src/interactive/scala/tools/nsc/interactive/Main.scala
new file mode 100644
index 0000000000..c838606f02
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/Main.scala
@@ -0,0 +1,34 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools
+package nsc
+package interactive
+
+/** The main class for NSC, a compiler for the programming
+ * language Scala.
+ */
+object Main extends nsc.MainClass {
+ override def processSettingsHook(): Boolean = {
+ if (this.settings.Yidedebug) {
+ this.settings.Xprintpos.value = true
+ this.settings.Yrangepos.value = true
+ val compiler = new interactive.Global(this.settings, this.reporter)
+ import compiler.{ reporter => _, _ }
+
+ val sfs = command.files map getSourceFile
+ val reloaded = new interactive.Response[Unit]
+ askReload(sfs, reloaded)
+
+ reloaded.get.right.toOption match {
+ case Some(ex) => reporter.cancelled = true // Causes exit code to be non-0
+ case None => reporter.reset() // Causes other compiler errors to be ignored
+ }
+ askShutdown
+ false
+ }
+ else true
+ }
+}
diff --git a/src/interactive/scala/tools/nsc/interactive/Pickler.scala b/src/interactive/scala/tools/nsc/interactive/Pickler.scala
new file mode 100644
index 0000000000..83f3fab925
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/Pickler.scala
@@ -0,0 +1,377 @@
+package scala.tools.nsc.interactive
+
+import Lexer._
+import java.io.Writer
+import scala.language.implicitConversions
+import scala.reflect.ClassTag
+
+/** An abstract class for writing and reading Scala objects to and
+ * from a legible representation. The presesentation follows the following grammar:
+ * {{{
+ * Pickled = `true` | `false` | `null` | NumericLit | StringLit |
+ * Labelled | Pickled `,` Pickled
+ * Labelled = StringLit `(` Pickled? `)`
+ * }}}
+ *
+ * All ...Lit classes are as in JSON. @see scala.tools.nsc.io.Lexer
+ *
+ * Subclasses of `Pickler` each can write and read individual classes
+ * of values.
+ *
+ * @tparam T the type of values handled by this pickler.
+ *
+ * These Picklers build on the work of Andrew Kennedy. They are most closely inspired by
+ * Iulian Dragos' picklers for Scala to XML. See:
+ *
+ * <a href="http://code.google.com/p/gdata-scala-client/wiki/DevelopersGuide">
+ * http://code.google.com/p/gdata-scala-client/wiki/DevelopersGuide
+ * </a>
+ */
+abstract class Pickler[T] {
+
+ import Pickler._
+
+ /** Writes value in pickled form
+ * @param wr the writer to which pickled form is written
+ * @param x the value to write
+ */
+ def pickle(wr: Writer, x: T)
+
+ /** Reads value from pickled form.
+ *
+ * @param rd the lexer from which lexemes are read
+ * @return An `UnpickleSuccess value if the current input corresponds to the
+ * kind of value that is unpickled by the current subclass of `Pickler`,
+ * an `UnpickleFailure` value otherwise.
+ * @throws `Lexer.MalformedInput` if input is invalid, or if
+ * an `Unpickle
+ */
+ def unpickle(rd: Lexer): Unpickled[T]
+
+ /** A pickler representing a `~`-pair of values as two consecutive pickled
+ * strings, separated by a comma.
+ * @param that the second pickler which together with the current pickler makes
+ * up the pair `this ~ that` to be pickled.
+ */
+ def ~ [U] (that: => Pickler[U]): Pickler[T ~ U] = seqPickler(this, that)
+
+ /** A pickler that adds a label to the current pickler, using the representation
+ * `label ( <current pickler> )`
+ *
+ * @label the string to be added as a label.
+ */
+ def labelled(label: String): Pickler[T] = labelledPickler(label, this)
+
+ /** A pickler obtained from the current pickler by a pair of transformer functions
+ * @param in the function that maps values handled by the current pickler to
+ * values handled by the wrapped pickler.
+ * @param out the function that maps values handled by the wrapped pickler to
+ * values handled by the current pickler.
+ */
+ def wrapped [U] (in: T => U)(out: U => T): Pickler[U] = wrappedPickler(this)(in)(out)
+
+ /** A conditional pickler obtained from the current pickler.
+ * @param p the condition to test to find out whether pickler can handle
+ * some Scala value.
+ */
+ def cond(p: Any => Boolean): CondPickler[T] = conditionalPickler(this, p)
+
+ /** A conditional pickler handling values of some Scala class. It adds the
+ * class name as a label to the representation of the current pickler and
+ * @param c the class of values handled by this pickler.
+ */
+ def asClass[U <: T](c: Class[U]): CondPickler[T] = this.labelled(c.getName).cond(c isInstance _)
+}
+
+object Pickler {
+ /** A base class representing unpickler result. It has two subclasses:
+ * `UnpickleSucess` for successful unpicklings and `UnpickleFailure` for failures,
+ * where a value of the given type `T` could not be unpickled from input.
+ * @tparam T the type of unpickled values in case of success.
+ */
+ abstract class Unpickled[+T] {
+ /** Transforms success values to success values using given function,
+ * leaves failures alone
+ * @param f the function to apply.
+ */
+ def map[U](f: T => U): Unpickled[U] = this match {
+ case UnpickleSuccess(x) => UnpickleSuccess(f(x))
+ case f: UnpickleFailure => f
+ }
+ /** Transforms success values to successes or failures using given function,
+ * leaves failures alone.
+ * @param f the function to apply.
+ */
+ def flatMap[U](f: T => Unpickled[U]): Unpickled[U] = this match {
+ case UnpickleSuccess(x) => f(x)
+ case f: UnpickleFailure => f
+ }
+ /** Tries alternate expression if current result is a failure
+ * @param alt the alternate expression to be tried in case of failure
+ */
+ def orElse[U >: T](alt: => Unpickled[U]): Unpickled[U] = this match {
+ case UnpickleSuccess(x) => this
+ case f: UnpickleFailure => alt
+ }
+
+ /** Transforms failures into thrown `MalformedInput` exceptions.
+ * @throws MalformedInput if current result is a failure
+ */
+ def requireSuccess: UnpickleSuccess[T] = this match {
+ case s @ UnpickleSuccess(x) => s
+ case f: UnpickleFailure =>
+ throw new MalformedInput(f.rd, "Unrecoverable unpickle failure:\n"+f.errMsg)
+ }
+ }
+
+ /** A class representing successful unpicklings
+ * @tparam T the type of the unpickled value
+ * @param result the unpickled value
+ */
+ case class UnpickleSuccess[+T](result: T) extends Unpickled[T]
+
+ /** A class representing unpickle failures
+ * @param msg an error message describing what failed.
+ * @param rd the lexer unpickled values were read from (can be used to get
+ * error position, for instance).
+ */
+ class UnpickleFailure(msg: => String, val rd: Lexer) extends Unpickled[Nothing] {
+ def errMsg = msg
+ override def toString = "Failure at "+rd.tokenPos+":\n"+msg
+ }
+
+ private def errorExpected(rd: Lexer, msg: => String) =
+ new UnpickleFailure("expected: "+msg+"\n" +
+ "found : "+rd.token,
+ rd)
+
+ private def nextSuccess[T](rd: Lexer, result: T) = {
+ rd.nextToken()
+ UnpickleSuccess(result)
+ }
+
+ /** The implicit `Pickler` value for type `T`. Equivalent to `implicitly[Pickler[T]]`.
+ */
+ def pkl[T: Pickler] = implicitly[Pickler[T]]
+
+ /** A class represenenting `~`-pairs */
+ case class ~[+S, +T](fst: S, snd: T)
+
+ /** A wrapper class to be able to use `~` s an infix method */
+ implicit class TildeDecorator[S](x: S) {
+ /** Infix method that forms a `~`-pair. */
+ def ~ [T](y: T): S ~ T = new ~ (x, y)
+ }
+
+ /** Same as `p.labelled(label)`.
+ */
+ def labelledPickler[T](label: String, p: Pickler[T]): Pickler[T] = new Pickler[T] {
+ def pickle(wr: Writer, x: T) = {
+ wr.write(quoted(label))
+ wr.write("(")
+ p.pickle(wr, x)
+ wr.write(")")
+ }
+ def unpickle(rd: Lexer): Unpickled[T] =
+ rd.token match {
+ case StringLit(`label`) =>
+ rd.nextToken()
+ rd.accept('(')
+ val result = p.unpickle(rd).requireSuccess
+ rd.accept(')')
+ result
+ case _ =>
+ errorExpected(rd, quoted(label)+"(...)")
+ }
+ }
+
+ /** Same as `p.wrap(in)(out)`
+ */
+ def wrappedPickler[S, T](p: Pickler[S])(in: S => T)(out: T => S) = new Pickler[T] {
+ def pickle(wr: Writer, x: T) = p.pickle(wr, out(x))
+ def unpickle(rd: Lexer) = p.unpickle(rd) map in
+ }
+
+ /** Same as `p.cond(condition)`
+ */
+ def conditionalPickler[T](p: Pickler[T], condition: Any => Boolean) = new CondPickler[T](condition) {
+ def pickle(wr: Writer, x: T) = p.pickle(wr, x)
+ def unpickle(rd: Lexer) = p.unpickle(rd)
+ }
+
+ /** Same as `p ~ q`
+ */
+ def seqPickler[T, U](p: Pickler[T], q: => Pickler[U]) = new Pickler[T ~ U] {
+ lazy val qq = q
+ def pickle(wr: Writer, x: T ~ U) = {
+ p.pickle(wr, x.fst)
+ wr.write(',')
+ q.pickle(wr, x.snd)
+ }
+ def unpickle(rd: Lexer) =
+ for (x <- p.unpickle(rd); y <- { rd.accept(','); qq.unpickle(rd).requireSuccess })
+ yield x ~ y
+ }
+
+ /** Same as `p | q`
+ */
+ def eitherPickler[T, U <: T, V <: T](p: CondPickler[U], q: => CondPickler[V]) =
+ new CondPickler[T](x => p.canPickle(x) || q.canPickle(x)) {
+ lazy val qq = q
+ override def tryPickle(wr: Writer, x: Any): Boolean =
+ p.tryPickle(wr, x) || qq.tryPickle(wr, x)
+ def pickle(wr: Writer, x: T) =
+ require(tryPickle(wr, x),
+ "no pickler found for "+x+" of class "+x.getClass.getName)
+ def unpickle(rd: Lexer) = p.unpickle(rd) orElse qq.unpickle(rd)
+ }
+
+ /** A conditional pickler for singleton objects. It represents these
+ * with the object's underlying class as a label.
+ * Example: Object scala.None would be represented as `scala.None$()`.
+ */
+ def singletonPickler[T <: AnyRef](x: T): CondPickler[T] =
+ unitPickler
+ .wrapped { _ => x } { x => () }
+ .labelled (x.getClass.getName)
+ .cond (x eq _.asInstanceOf[AnyRef])
+
+ /** A pickler the handles instances of classes that have an empty constructor.
+ * It represents than as `$new ( <name of class> )`.
+ * When unpickling, a new instance of the class is created using the empty
+ * constructor of the class via `Class.forName(<name of class>).newInstance()`.
+ */
+ def javaInstancePickler[T <: AnyRef]: Pickler[T] =
+ (stringPickler labelled "$new")
+ .wrapped { name => Class.forName(name).newInstance().asInstanceOf[T] } { _.getClass.getName }
+
+ /** A picklers that handles iterators. It pickles all values
+ * returned by an iterator separated by commas.
+ * When unpickling, it always returns an `UnpickleSuccess` containing an iterator.
+ * This iterator returns 0 or more values that are obtained by unpickling
+ * until a closing parenthesis, bracket or brace or the end of input is encountered.
+ *
+ * This means that iterator picklers should not be directly followed by `~`
+ * because the pickler would also read any values belonging to the second
+ * part of the `~`-pair.
+ *
+ * What's usually done instead is that the iterator pickler is wrapped and labelled
+ * to handle other kinds of sequences.
+ */
+ implicit def iterPickler[T: Pickler]: Pickler[Iterator[T]] = new Pickler[Iterator[T]] {
+ lazy val p = pkl[T]
+ def pickle(wr: Writer, xs: Iterator[T]) {
+ var first = true
+ for (x <- xs) {
+ if (first) first = false else wr.write(',')
+ p.pickle(wr, x)
+ }
+ }
+ def unpickle(rd: Lexer): Unpickled[Iterator[T]] = UnpickleSuccess(new Iterator[T] {
+ var first = true
+ def hasNext = {
+ val t = rd.token
+ t != EOF && t != RParen && t != RBrace && t != RBracket
+ }
+ def next(): T = {
+ if (first) first = false else rd.accept(',')
+ p.unpickle(rd).requireSuccess.result
+ }
+ })
+ }
+
+ /** A pickler that handles values that can be represented as a single token.
+ * @param kind the kind of token representing the value, used in error messages
+ * for unpickling.
+ * @param matcher A partial function from tokens to handled values. Unpickling
+ * succeeds if the matcher function is defined on the current token.
+ */
+ private def tokenPickler[T](kind: String)(matcher: PartialFunction[Token, T]) = new Pickler[T] {
+ def pickle(wr: Writer, x: T) = wr.write(x.toString)
+ def unpickle(rd: Lexer) =
+ if (matcher isDefinedAt rd.token) nextSuccess(rd, matcher(rd.token))
+ else errorExpected(rd, kind)
+ }
+
+ /** A pickler for values of type `Long`, represented as integer literals */
+ implicit val longPickler: Pickler[Long] =
+ tokenPickler("integer literal") { case IntLit(s) => s.toLong }
+
+ /** A pickler for values of type `Int`, represented as integer literals */
+ implicit val intPickler: Pickler[Int] = longPickler.wrapped { _.toInt } { _.toLong }
+
+ /** A conditional pickler for the boolean value `true` */
+ private val truePickler =
+ tokenPickler("boolean literal") { case TrueLit => true } cond { _ == true }
+
+ /** A conditional pickler for the boolean value `false` */
+ private val falsePickler =
+ tokenPickler("boolean literal") { case FalseLit => false } cond { _ == false }
+
+ /** A pickler for values of type `Boolean`, represented as the literals `true` or `false`. */
+ implicit def booleanPickler: Pickler[Boolean] = truePickler | falsePickler
+
+ /** A pickler for values of type `Unit`, represented by the empty character string */
+ implicit val unitPickler: Pickler[Unit] = new Pickler[Unit] {
+ def pickle(wr: Writer, x: Unit) {}
+ def unpickle(rd: Lexer): Unpickled[Unit] = UnpickleSuccess(())
+ }
+
+ /** A pickler for values of type `String`, represented as string literals */
+ implicit val stringPickler: Pickler[String] = new Pickler[String] {
+ def pickle(wr: Writer, x: String) = wr.write(if (x == null) "null" else quoted(x))
+ def unpickle(rd: Lexer) = rd.token match {
+ case StringLit(s) => nextSuccess(rd, s)
+ case NullLit => nextSuccess(rd, null)
+ case _ => errorExpected(rd, "string literal")
+ }
+ }
+
+ /** A pickler for pairs, represented as `~`-pairs */
+ implicit def tuple2Pickler[T1: Pickler, T2: Pickler]: Pickler[(T1, T2)] =
+ (pkl[T1] ~ pkl[T2])
+ .wrapped { case x1 ~ x2 => (x1, x2) } { case (x1, x2) => x1 ~ x2 }
+ .labelled ("tuple2")
+
+ /** A pickler for 3-tuples, represented as `~`-tuples */
+ implicit def tuple3Pickler[T1, T2, T3](implicit p1: Pickler[T1], p2: Pickler[T2], p3: Pickler[T3]): Pickler[(T1, T2, T3)] =
+ (p1 ~ p2 ~ p3)
+ .wrapped { case x1 ~ x2 ~ x3 => (x1, x2, x3) } { case (x1, x2, x3) => x1 ~ x2 ~ x3 }
+ .labelled ("tuple3")
+
+ /** A pickler for list values */
+ implicit def listPickler[T: Pickler]: Pickler[List[T]] =
+ iterPickler[T] .wrapped { _.toList } { _.iterator } .labelled ("scala.List")
+}
+
+/** A subclass of Pickler can indicate whether a particular value can be pickled by instances
+ * of this class.
+ * @param canPickle The predicate that indicates whether a given value
+ * can be pickled by instances of this class.
+ */
+abstract class CondPickler[T](val canPickle: Any => Boolean) extends Pickler[T] {
+ import Pickler._
+
+ /** Pickles given value `x` if possible, as indicated by `canPickle(x)`.
+ */
+ def tryPickle(wr: Writer, x: Any): Boolean = {
+ val result = canPickle(x)
+ if (result) pickle(wr, x.asInstanceOf[T])
+ result
+ }
+
+ /** A pickler obtained from this pickler and an alternative pickler.
+ * To pickle a value, this pickler is tried first. If it cannot handle
+ * the object (as indicated by its `canPickle` test), then the
+ * alternative pickler is tried.
+ * To unpickle a value, this unpickler is tried first. If it cannot read
+ * the input (as indicated by a `UnpickleFailure` result), then the
+ * alternative pickler is tried.
+ * @tparam V The handled type of the returned pickler.
+ * @tparam U The handled type of the alternative pickler.
+ * @param that The alternative pickler.
+ */
+ def | [V >: T, U <: V] (that: => CondPickler[U]): CondPickler[V] =
+ eitherPickler[V, T, U](this, that)
+}
+
diff --git a/src/interactive/scala/tools/nsc/interactive/Picklers.scala b/src/interactive/scala/tools/nsc/interactive/Picklers.scala
new file mode 100644
index 0000000000..30d3048aa0
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/Picklers.scala
@@ -0,0 +1,189 @@
+/* NSC -- new Scala compiler
+ * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
+ * @author Martin Odersky
+ */
+package scala.tools.nsc
+package interactive
+
+import util.InterruptReq
+import scala.reflect.internal.util.{ SourceFile, BatchSourceFile }
+import io.{ AbstractFile, PlainFile }
+import util.EmptyAction
+import scala.reflect.internal.util.Position
+import Pickler._
+import scala.collection.mutable
+import mutable.ListBuffer
+
+trait Picklers { self: Global =>
+
+ lazy val freshRunReq =
+ unitPickler
+ .wrapped { _ => new FreshRunReq } { x => () }
+ .labelled ("FreshRunReq")
+ .cond (_.isInstanceOf[FreshRunReq])
+
+ lazy val shutdownReq = singletonPickler(ShutdownReq)
+
+ def defaultThrowable[T <: Throwable]: CondPickler[T] = javaInstancePickler[T] cond { _ => true }
+
+ implicit lazy val throwable: Pickler[Throwable] =
+ freshRunReq | shutdownReq | defaultThrowable
+
+ implicit def abstractFile: Pickler[AbstractFile] =
+ pkl[String]
+ .wrapped[AbstractFile] { new PlainFile(_) } { _.path }
+ .asClass (classOf[PlainFile])
+
+ private val sourceFilesSeen = new mutable.HashMap[AbstractFile, Array[Char]] {
+ override def default(key: AbstractFile) = Array()
+ }
+
+ type Diff = (Int /*start*/, Int /*end*/, String /*replacement*/)
+
+ def delta(f: AbstractFile, cs: Array[Char]): Diff = {
+ val bs = sourceFilesSeen(f)
+ var start = 0
+ while (start < bs.length && start < cs.length && bs(start) == cs(start)) start += 1
+ var end = bs.length
+ var end2 = cs.length
+ while (end > start && end2 > start && bs(end - 1) == cs(end2 - 1)) { end -= 1; end2 -= 1 }
+ sourceFilesSeen(f) = cs
+ (start, end, cs.slice(start, end2).mkString(""))
+ }
+
+ def patch(f: AbstractFile, d: Diff): Array[Char] = {
+ val (start, end, replacement) = d
+ val patched = sourceFilesSeen(f).patch(start, replacement, end - start)
+ sourceFilesSeen(f) = patched
+ patched
+ }
+
+ implicit lazy val sourceFile: Pickler[SourceFile] =
+ (pkl[AbstractFile] ~ pkl[Diff]).wrapped[SourceFile] {
+ case f ~ d => new BatchSourceFile(f, patch(f, d))
+ } {
+ f => f.file ~ delta(f.file, f.content)
+ }.asClass (classOf[BatchSourceFile])
+
+ lazy val offsetPosition: CondPickler[Position] =
+ (pkl[SourceFile] ~ pkl[Int])
+ .wrapped { case x ~ y => Position.offset(x, y) } { p => p.source ~ p.point }
+ .asClass (classOf[Position])
+
+ lazy val rangePosition: CondPickler[Position] =
+ (pkl[SourceFile] ~ pkl[Int] ~ pkl[Int] ~ pkl[Int])
+ .wrapped { case source ~ start ~ point ~ end => Position.range(source, start, point, end) } { p => p.source ~ p.start ~ p.point ~ p.end }
+ .asClass (classOf[Position])
+
+ lazy val transparentPosition: CondPickler[Position] =
+ (pkl[SourceFile] ~ pkl[Int] ~ pkl[Int] ~ pkl[Int])
+ .wrapped { case source ~ start ~ point ~ end => Position.range(source, start, point, end).makeTransparent } { p => p.source ~ p.start ~ p.point ~ p.end }
+ .asClass (classOf[Position])
+
+ lazy val noPosition = singletonPickler(NoPosition)
+
+ implicit lazy val position: Pickler[Position] = transparentPosition | rangePosition | offsetPosition | noPosition
+
+ implicit lazy val namePickler: Pickler[Name] =
+ pkl[String] .wrapped[Name] {
+ str => if ((str.length > 1) && (str endsWith "!")) newTypeName(str.init) else newTermName(str)
+ } {
+ name => if (name.isTypeName) name.toString+"!" else name.toString
+ }
+
+ implicit lazy val symPickler: Pickler[Symbol] = {
+ def ownerNames(sym: Symbol, buf: ListBuffer[Name]): ListBuffer[Name] = {
+ if (!sym.isRoot) {
+ ownerNames(sym.owner, buf)
+ buf += (if (sym.isModuleClass) sym.sourceModule else sym).name
+ if (!sym.isType && !sym.isStable) { // TODO: what's the reasoning behind this condition!?
+ val sym1 = sym.owner.info.decl(sym.name)
+ if (sym1.isOverloaded) {
+ val index = sym1.alternatives.indexOf(sym)
+ assert(index >= 0, sym1+" not found in alternatives "+sym1.alternatives)
+ buf += newTermName(index.toString)
+ }
+ }
+ }
+ buf
+ }
+ def makeSymbol(root: Symbol, names: List[Name]): Symbol = names match {
+ case List() =>
+ root
+ case name :: rest =>
+ val sym = root.info.decl(name)
+ if (sym.isOverloaded) makeSymbol(sym.alternatives(rest.head.toString.toInt), rest.tail)
+ else makeSymbol(sym, rest)
+ }
+ pkl[List[Name]] .wrapped { makeSymbol(rootMirror.RootClass, _) } { ownerNames(_, new ListBuffer).toList }
+ }
+
+ implicit def workEvent: Pickler[WorkEvent] = {
+ (pkl[Int] ~ pkl[Long])
+ .wrapped { case id ~ ms => WorkEvent(id, ms) } { w => w.atNode ~ w.atMillis }
+ }
+
+ implicit def interruptReq: Pickler[InterruptReq] = {
+ val emptyIR: InterruptReq = new InterruptReq { type R = Unit; val todo = () => () }
+ pkl[Unit] .wrapped { _ => emptyIR } { _ => () }
+ }
+
+ implicit def reloadItem: CondPickler[ReloadItem] =
+ pkl[List[SourceFile]]
+ .wrapped { ReloadItem(_, new Response) } { _.sources }
+ .asClass (classOf[ReloadItem])
+
+ implicit def askTypeAtItem: CondPickler[AskTypeAtItem] =
+ pkl[Position]
+ .wrapped { new AskTypeAtItem(_, new Response) } { _.pos }
+ .asClass (classOf[AskTypeAtItem])
+
+ implicit def askTypeItem: CondPickler[AskTypeItem] =
+ (pkl[SourceFile] ~ pkl[Boolean])
+ .wrapped { case source ~ forceReload => new AskTypeItem(source, forceReload, new Response) } { w => w.source ~ w.forceReload }
+ .asClass (classOf[AskTypeItem])
+
+ implicit def askTypeCompletionItem: CondPickler[AskTypeCompletionItem] =
+ pkl[Position]
+ .wrapped { new AskTypeCompletionItem(_, new Response) } { _.pos }
+ .asClass (classOf[AskTypeCompletionItem])
+
+ implicit def askScopeCompletionItem: CondPickler[AskScopeCompletionItem] =
+ pkl[Position]
+ .wrapped { new AskScopeCompletionItem(_, new Response) } { _.pos }
+ .asClass (classOf[AskScopeCompletionItem])
+
+ implicit def askToDoFirstItem: CondPickler[AskToDoFirstItem] =
+ pkl[SourceFile]
+ .wrapped { new AskToDoFirstItem(_) } { _.source }
+ .asClass (classOf[AskToDoFirstItem])
+
+ implicit def askLinkPosItem: CondPickler[AskLinkPosItem] =
+ (pkl[Symbol] ~ pkl[SourceFile])
+ .wrapped { case sym ~ source => new AskLinkPosItem(sym, source, new Response) } { item => item.sym ~ item.source }
+ .asClass (classOf[AskLinkPosItem])
+
+ implicit def askDocCommentItem: CondPickler[AskDocCommentItem] =
+ (pkl[Symbol] ~ pkl[SourceFile] ~ pkl[Symbol] ~ pkl[List[(Symbol,SourceFile)]])
+ .wrapped { case sym ~ source ~ site ~ fragments => new AskDocCommentItem(sym, source, site, fragments, new Response) } { item => item.sym ~ item.source ~ item.site ~ item.fragments }
+ .asClass (classOf[AskDocCommentItem])
+
+ implicit def askLoadedTypedItem: CondPickler[AskLoadedTypedItem] =
+ pkl[SourceFile]
+ .wrapped { source => new AskLoadedTypedItem(source, new Response) } { _.source }
+ .asClass (classOf[AskLoadedTypedItem])
+
+ implicit def askParsedEnteredItem: CondPickler[AskParsedEnteredItem] =
+ (pkl[SourceFile] ~ pkl[Boolean])
+ .wrapped { case source ~ keepLoaded => new AskParsedEnteredItem(source, keepLoaded, new Response) } { w => w.source ~ w.keepLoaded }
+ .asClass (classOf[AskParsedEnteredItem])
+
+ implicit def emptyAction: CondPickler[EmptyAction] =
+ pkl[Unit]
+ .wrapped { _ => new EmptyAction } { _ => () }
+ .asClass (classOf[EmptyAction])
+
+ implicit def action: Pickler[() => Unit] =
+ reloadItem | askTypeAtItem | askTypeItem | askTypeCompletionItem | askScopeCompletionItem |
+ askToDoFirstItem | askLinkPosItem | askDocCommentItem | askLoadedTypedItem | askParsedEnteredItem | emptyAction
+}
diff --git a/src/interactive/scala/tools/nsc/interactive/PresentationCompilerThread.scala b/src/interactive/scala/tools/nsc/interactive/PresentationCompilerThread.scala
new file mode 100644
index 0000000000..a2d8e5d49a
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/PresentationCompilerThread.scala
@@ -0,0 +1,51 @@
+/* NSC -- new Scala compiler
+ * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
+ * @author Martin Odersky
+ * @author Iulian Dragos
+ */
+package scala.tools.nsc.interactive
+
+/** A presentation compiler thread. This is a lightweight class, delegating most
+ * of its functionality to the compiler instance.
+ *
+ */
+final class PresentationCompilerThread(var compiler: Global, name: String = "")
+ extends Thread("Scala Presentation Compiler [" + name + "]") {
+
+ /** The presentation compiler loop.
+ */
+ override def run() {
+ compiler.debugLog("starting new runner thread")
+ while (compiler ne null) try {
+ compiler.checkNoResponsesOutstanding()
+ compiler.log.logreplay("wait for more work", { compiler.scheduler.waitForMoreWork(); true })
+ compiler.pollForWork(compiler.NoPosition)
+ while (compiler.isOutOfDate) {
+ try {
+ compiler.backgroundCompile()
+ } catch {
+ case ex: FreshRunReq =>
+ compiler.debugLog("fresh run req caught, starting new pass")
+ }
+ compiler.log.flush()
+ }
+ } catch {
+ case ex @ ShutdownReq =>
+ compiler.debugLog("exiting presentation compiler")
+ compiler.log.close()
+
+ // make sure we don't keep around stale instances
+ compiler = null
+ case ex: Throwable =>
+ compiler.log.flush()
+
+ ex match {
+ case ex: FreshRunReq =>
+ compiler.debugLog("fresh run req caught outside presentation compiler loop; ignored") // This shouldn't be reported
+ case _ : Global#ValidateException => // This will have been reported elsewhere
+ compiler.debugLog("validate exception caught outside presentation compiler loop; ignored")
+ case _ => ex.printStackTrace(); compiler.informIDE("Fatal Error: "+ex)
+ }
+ }
+ }
+}
diff --git a/src/interactive/scala/tools/nsc/interactive/PrettyWriter.scala b/src/interactive/scala/tools/nsc/interactive/PrettyWriter.scala
new file mode 100644
index 0000000000..d7dadcc6a8
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/PrettyWriter.scala
@@ -0,0 +1,41 @@
+package scala.tools.nsc.interactive
+
+import java.io.Writer
+
+class PrettyWriter(wr: Writer) extends Writer {
+ protected val indentStep = " "
+ private var indent = 0
+ private def newLine() {
+ wr.write('\n')
+ wr.write(indentStep * indent)
+ }
+ def close() = wr.close()
+ def flush() = wr.flush()
+ def write(str: Array[Char], off: Int, len: Int): Unit = {
+ if (off < str.length && off < len) {
+ str(off) match {
+ case '{' | '[' | '(' =>
+ indent += 1
+ wr.write(str(off).toInt)
+ newLine()
+ wr.write(str, off + 1, len - 1)
+ case '}' | ']' | ')' =>
+ wr.write(str, off, len)
+ indent -= 1
+ case ',' =>
+ wr.write(',')
+ newLine()
+ wr.write(str, off + 1, len - 1)
+ case ':' =>
+ wr.write(':')
+ wr.write(' ')
+ wr.write(str, off + 1, len - 1)
+ case _ =>
+ wr.write(str, off, len)
+ }
+ } else {
+ wr.write(str, off, len)
+ }
+ }
+ override def toString = wr.toString
+}
diff --git a/src/interactive/scala/tools/nsc/interactive/REPL.scala b/src/interactive/scala/tools/nsc/interactive/REPL.scala
new file mode 100644
index 0000000000..8e9b0ceee0
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/REPL.scala
@@ -0,0 +1,164 @@
+/* NSC -- new Scala compiler
+ * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
+ * @author Martin Odersky
+ */
+package scala
+package tools.nsc
+package interactive
+
+import scala.reflect.internal.util._
+import scala.tools.nsc.reporters._
+import scala.tools.nsc.io._
+import java.io.FileWriter
+
+/** Interface of interactive compiler to a client such as an IDE
+ */
+object REPL {
+
+ val versionMsg = "Scala compiler " +
+ Properties.versionString + " -- " +
+ Properties.copyrightString
+
+ val prompt = "> "
+
+ var reporter: ConsoleReporter = _
+
+ private def replError(msg: String) {
+ reporter.error(/*new Position */FakePos("scalac"),
+ msg + "\n scalac -help gives more information")
+ }
+
+ def process(args: Array[String]) {
+ val settings = new Settings(replError)
+ reporter = new ConsoleReporter(settings)
+ val command = new CompilerCommand(args.toList, settings)
+ if (command.settings.version)
+ reporter.echo(versionMsg)
+ else {
+ try {
+ object compiler extends Global(command.settings, reporter) {
+// printTypings = true
+ }
+ if (reporter.hasErrors) {
+ reporter.flush()
+ return
+ }
+ if (command.shouldStopWithInfo) {
+ reporter.echo(command.getInfoMessage(compiler))
+ } else {
+ run(compiler)
+ }
+ } catch {
+ case ex @ FatalError(msg) =>
+ if (true || command.settings.debug) // !!!
+ ex.printStackTrace()
+ reporter.error(null, "fatal error: " + msg)
+ }
+ }
+ }
+
+ def main(args: Array[String]) {
+ process(args)
+ sys.exit(if (reporter.hasErrors) 1 else 0)
+ }
+
+ def loop(action: (String) => Unit) {
+ Console.print(prompt)
+ try {
+ val line = Console.readLine()
+ if (line.length() > 0) {
+ action(line)
+ }
+ loop(action)
+ }
+ catch {
+ case _: java.io.EOFException => //nop
+ }
+ }
+
+ /** Commands:
+ *
+ * reload file1 ... fileN
+ * typeat file off1 off2?
+ * complete file off1 off2?
+ */
+ def run(comp: Global) {
+ val reloadResult = new Response[Unit]
+ val typeatResult = new Response[comp.Tree]
+ val completeResult = new Response[List[comp.Member]]
+ val typedResult = new Response[comp.Tree]
+ val structureResult = new Response[comp.Tree]
+
+ def makePos(file: String, off1: String, off2: String) = {
+ val source = toSourceFile(file)
+ comp.rangePos(source, off1.toInt, off1.toInt, off2.toInt)
+ }
+
+ def doTypeAt(pos: Position) {
+ comp.askTypeAt(pos, typeatResult)
+ show(typeatResult)
+ }
+
+ def doComplete(pos: Position) {
+ comp.askTypeCompletion(pos, completeResult)
+ show(completeResult)
+ }
+
+ def doStructure(file: String) {
+ comp.askParsedEntered(toSourceFile(file), keepLoaded = false, structureResult)
+ show(structureResult)
+ }
+
+ loop { line =>
+ (line split " ").toList match {
+ case "reload" :: args =>
+ comp.askReload(args map toSourceFile, reloadResult)
+ show(reloadResult)
+ case "reloadAndAskType" :: file :: millis :: Nil =>
+ comp.askReload(List(toSourceFile(file)), reloadResult)
+ Thread.sleep(millis.toLong)
+ println("ask type now")
+ comp.askLoadedTyped(toSourceFile(file), typedResult)
+ typedResult.get
+ case List("typeat", file, off1, off2) =>
+ doTypeAt(makePos(file, off1, off2))
+ case List("typeat", file, off1) =>
+ doTypeAt(makePos(file, off1, off1))
+ case List("complete", file, off1, off2) =>
+ doComplete(makePos(file, off1, off2))
+ case List("complete", file, off1) =>
+ doComplete(makePos(file, off1, off1))
+ case List("quit") =>
+ comp.askShutdown()
+ sys.exit(1)
+ case List("structure", file) =>
+ doStructure(file)
+ case _ =>
+ print("""Available commands:
+ | reload <file_1> ... <file_n>
+ | reloadAndAskType <file> <sleep-ms>
+ | typed <file>
+ | typeat <file> <start-pos> <end-pos>
+ | typeat <file> <pos>
+ | complete <file> <start-pos> <end-pos>
+ | compile <file> <pos>
+ | structure <file>
+ | quit
+ |""".stripMargin)
+ }
+ }
+ }
+
+ def toSourceFile(name: String) = new BatchSourceFile(new PlainFile(new java.io.File(name)))
+
+ def using[T, U](svar: Response[T])(op: T => U): Option[U] = {
+ val res = svar.get match {
+ case Left(result) => Some(op(result))
+ case Right(exc) => exc.printStackTrace; println("ERROR: "+exc); None
+ }
+ svar.clear()
+ res
+ }
+
+ def show[T](svar: Response[T]) = using(svar)(res => println("==> "+res))
+}
diff --git a/src/interactive/scala/tools/nsc/interactive/RangePositions.scala b/src/interactive/scala/tools/nsc/interactive/RangePositions.scala
new file mode 100644
index 0000000000..410f919daa
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/RangePositions.scala
@@ -0,0 +1,14 @@
+/* NSC -- new Scala compiler
+ * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools.nsc
+package interactive
+
+@deprecated("Use scala.reflect.internal.Positions", "2.11.0")
+trait RangePositions extends scala.reflect.internal.Positions with ast.Trees with ast.Positions {
+ self: scala.tools.nsc.Global =>
+
+ override val useOffsetPositions = false
+}
diff --git a/src/interactive/scala/tools/nsc/interactive/Replayer.scala b/src/interactive/scala/tools/nsc/interactive/Replayer.scala
new file mode 100644
index 0000000000..0e3e2493fe
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/Replayer.scala
@@ -0,0 +1,74 @@
+package scala.tools.nsc.interactive
+
+import java.io.{Reader, Writer}
+
+import Pickler._
+import Lexer.EOF
+
+abstract class LogReplay {
+ def logreplay(event: String, x: => Boolean): Boolean
+ def logreplay[T: Pickler](event: String, x: => Option[T]): Option[T]
+ def close()
+ def flush()
+}
+
+class Logger(wr0: Writer) extends LogReplay {
+ val wr = new PrettyWriter(wr0)
+ private var first = true
+ private def insertComma() = if (first) first = false else wr.write(",")
+
+ def logreplay(event: String, x: => Boolean) = {
+ val xx = x
+ if (xx) { insertComma(); pkl[Unit].labelled(event).pickle(wr, ()) }
+ xx
+ }
+ def logreplay[T: Pickler](event: String, x: => Option[T]) = {
+ val xx = x
+ xx match {
+ case Some(y) => insertComma(); pkl[T].labelled(event).pickle(wr, y)
+ case None =>
+ }
+ xx
+ }
+ def close() { wr.close() }
+ def flush() { wr.flush() }
+}
+
+object NullLogger extends LogReplay {
+ def logreplay(event: String, x: => Boolean) = x
+ def logreplay[T: Pickler](event: String, x: => Option[T]) = x
+ def close() {}
+ def flush() {}
+}
+
+class Replayer(raw: Reader) extends LogReplay {
+ private val rd = new Lexer(raw)
+ private var nextComma = false
+
+ private def eatComma() =
+ if (nextComma) { rd.accept(','); nextComma = false }
+
+ def logreplay(event: String, x: => Boolean) =
+ if (rd.token == EOF) NullLogger.logreplay(event, x)
+ else {
+ eatComma()
+ pkl[Unit].labelled(event).unpickle(rd) match {
+ case UnpickleSuccess(_) => nextComma = true; true
+ case _ => false
+ }
+ }
+
+ def logreplay[T: Pickler](event: String, x: => Option[T]) =
+ if (rd.token == EOF) NullLogger.logreplay(event, x)
+ else {
+ eatComma()
+ pkl[T].labelled(event).unpickle(rd) match {
+ case UnpickleSuccess(y) => nextComma = true; Some(y)
+ case _ => None
+ }
+ }
+
+ def close() { raw.close() }
+ def flush() {}
+}
+
diff --git a/src/interactive/scala/tools/nsc/interactive/Response.scala b/src/interactive/scala/tools/nsc/interactive/Response.scala
new file mode 100644
index 0000000000..3e84c83e55
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/Response.scala
@@ -0,0 +1,107 @@
+/* NSC -- new Scala compiler
+ * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
+ * @author Martin Odersky
+ */
+package scala.tools.nsc
+package interactive
+
+/** Typical interaction, given a predicate <user-input>, a function <display>,
+ * and an exception handler <handle>:
+ *
+ * val TIMEOUT = 100 // (milliseconds) or something like that
+ * val r = new Response()
+ * while (!r.isComplete && !r.isCancelled) {
+ * if (<user-input>) r.cancel()
+ * else r.get(TIMEOUT) match {
+ * case Some(Left(data)) => <display>(data)
+ * case Some(Right(exc)) => <handle>(exc)
+ * case None =>
+ * }
+ * }
+ */
+class Response[T] {
+
+ private var data: Option[Either[T, Throwable]] = None
+ private var complete = false
+ private var cancelled = false
+
+ /** Set provisional data, more to come
+ */
+ def setProvisionally(x: T) = synchronized {
+ data = Some(Left(x))
+ }
+
+ /** Set final data, and mark response as complete.
+ */
+ def set(x: T) = synchronized {
+ data = Some(Left(x))
+ complete = true
+ notifyAll()
+ }
+
+ /** Store raised exception in data, and mark response as complete.
+ */
+ def raise(exc: Throwable) = synchronized {
+ data = Some(Right(exc))
+ complete = true
+ notifyAll()
+ }
+
+ /** Get final data, wait as long as necessary.
+ * When interrupted will return with Right(InterruptedException)
+ */
+ def get: Either[T, Throwable] = synchronized {
+ while (!complete) {
+ try {
+ wait()
+ } catch {
+ case exc: InterruptedException => {
+ Thread.currentThread().interrupt()
+ raise(exc)
+ }
+ }
+ }
+ data.get
+ }
+
+ /** Optionally get data within `timeout` milliseconds.
+ * When interrupted will return with Some(Right(InterruptedException))
+ * When timeout ends, will return last stored provisional result,
+ * or else None if no provisional result was stored.
+ */
+ def get(timeout: Long): Option[Either[T, Throwable]] = synchronized {
+ val start = System.currentTimeMillis
+ var current = start
+ while (!complete && start + timeout > current) {
+ try {
+ wait(timeout - (current - start))
+ } catch {
+ case exc: InterruptedException => {
+ Thread.currentThread().interrupt()
+ raise(exc)
+ }
+ }
+ current = System.currentTimeMillis
+ }
+ data
+ }
+
+ /** Final data set was stored
+ */
+ def isComplete = synchronized { complete }
+
+ /** Cancel action computing this response (Only the
+ * party that calls get on a response may cancel).
+ */
+ def cancel() = synchronized { cancelled = true }
+
+ /** A cancel request for this response has been issued
+ */
+ def isCancelled = synchronized { cancelled }
+
+ def clear() = synchronized {
+ data = None
+ complete = false
+ cancelled = false
+ }
+}
diff --git a/src/interactive/scala/tools/nsc/interactive/RichCompilationUnits.scala b/src/interactive/scala/tools/nsc/interactive/RichCompilationUnits.scala
new file mode 100644
index 0000000000..b83c2cd095
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/RichCompilationUnits.scala
@@ -0,0 +1,58 @@
+/* NSC -- new Scala compiler
+ * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
+ * @author Martin Odersky
+ */
+package scala.tools.nsc
+package interactive
+
+import scala.reflect.internal.util.{SourceFile, Position, NoPosition}
+import scala.collection.mutable.ArrayBuffer
+
+trait RichCompilationUnits { self: Global =>
+
+ /** The status value of a unit that has not yet been loaded */
+ final val NotLoaded = -2
+
+ /** The status value of a unit that has not yet been typechecked */
+ final val JustParsed = -1
+
+ /** The status value of a unit that has been partially typechecked */
+ final val PartiallyChecked = 0
+
+ class RichCompilationUnit(source: SourceFile) extends CompilationUnit(source) {
+
+ /** The runid of the latest compiler run that typechecked this unit,
+ * or else @see NotLoaded, JustParsed
+ */
+ var status: Int = NotLoaded
+
+ /** Unit has been parsed */
+ def isParsed: Boolean = status >= JustParsed
+
+ /** Unit has been typechecked, but maybe not in latest runs */
+ def isTypeChecked: Boolean = status > JustParsed
+
+ /** Unit has been typechecked and is up to date */
+ def isUpToDate: Boolean = status >= minRunId
+
+ /** the current edit point offset */
+ var editPoint: Int = -1
+
+ /** The problems reported for this unit */
+ val problems = new ArrayBuffer[Problem]
+
+ /** The position of a targeted type check
+ * If this is different from NoPosition, the type checking
+ * will stop once a tree that contains this position range
+ * is fully attributed.
+ */
+ var _targetPos: Position = NoPosition
+ override def targetPos: Position = _targetPos
+ def targetPos_=(p: Position) { _targetPos = p }
+
+ var contexts: Contexts = new Contexts
+
+ /** The last fully type-checked body of this unit */
+ var lastBody: Tree = EmptyTree
+ }
+}
diff --git a/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTest.scala b/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTest.scala
new file mode 100644
index 0000000000..2cb4f5fd4a
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTest.scala
@@ -0,0 +1,113 @@
+/* NSC -- new Scala compiler
+ * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
+ * @author Martin Odersky
+ */
+package scala.tools.nsc
+package interactive
+package tests
+
+import core._
+import scala.collection.mutable.ListBuffer
+
+/** A base class for writing interactive compiler tests.
+ *
+ * This class tries to cover common functionality needed when testing the presentation
+ * compiler: instantiation source files, reloading, creating positions, instantiating
+ * the presentation compiler, random stress testing.
+ *
+ * By default, this class loads all scala and java classes found under `src/`, going
+ * recursively into subfolders. Loaded classes are found in `sourceFiles`. trait `TestResources`
+ * The presentation compiler is available through `compiler`.
+ *
+ * It is easy to test member completion, type and hyperlinking at a given position. Source
+ * files are searched for `TextMarkers`. By default, the completion marker is `/*!*/`, the
+ * typedAt marker is `/*?*/` and the hyperlinking marker is `/*#*/`. Place these markers in
+ * your source files, and the test framework will automatically pick them up and test the
+ * corresponding actions. Sources are reloaded by `askReload(sourceFiles)` (blocking
+ * call). All ask operations are placed on the work queue without waiting for each one to
+ * complete before asking the next. After all asks, it waits for each response in turn and
+ * prints the result. The default timeout is 1 second per operation.
+ *
+ * To define a custom operation you have to:
+ *
+ * (1) Define a new marker by extending `TestMarker`
+ * (2) Provide an implementation for the operation you want to check by extending `PresentationCompilerTestDef`
+ * (3) Add the class defined in (1) to the set of executed test actions by calling `++` on `InteractiveTest`.
+ *
+ * Then you can simply use the new defined `marker` in your test sources and the testing
+ * framework will automatically pick it up.
+ *
+ * @see Check existing tests under test/files/presentation
+ *
+ * @author Iulian Dragos
+ * @author Mirco Dotta
+ */
+abstract class InteractiveTest
+ extends AskParse
+ with AskShutdown
+ with AskReload
+ with AskLoadedTyped
+ with PresentationCompilerInstance
+ with CoreTestDefs
+ with InteractiveTestSettings { self =>
+
+ protected val runRandomTests = false
+
+ /** Should askAllSources wait for each ask to finish before issuing the next? */
+ override protected val synchronousRequests = true
+
+ /** The core set of test actions that are executed during each test run are
+ * `CompletionAction`, `TypeAction` and `HyperlinkAction`.
+ * Override this member if you need to change the default set of executed test actions.
+ */
+ protected lazy val testActions: ListBuffer[PresentationCompilerTestDef] = {
+ ListBuffer(new TypeCompletionAction(compiler), new ScopeCompletionAction(compiler), new TypeAction(compiler), new HyperlinkAction(compiler))
+ }
+
+ /** Add new presentation compiler actions to test. Presentation compiler's test
+ * need to extends trait `PresentationCompilerTestDef`.
+ */
+ protected def ++(tests: PresentationCompilerTestDef*) {
+ testActions ++= tests
+ }
+
+ /** Test's entry point */
+ def main(args: Array[String]) {
+ try execute()
+ finally askShutdown()
+ }
+
+ protected def execute(): Unit = {
+ loadSources()
+ runDefaultTests()
+ }
+
+ /** Load all sources before executing the test. */
+ protected def loadSources() {
+ // ask the presentation compiler to track all sources. We do
+ // not wait for the file to be entirely typed because we do want
+ // to exercise the presentation compiler on scoped type requests.
+ askReload(sourceFiles)
+ // make sure all sources are parsed before running the test. This
+ // is because test may depend on the sources having been parsed at
+ // least once
+ askParse(sourceFiles)
+ }
+
+ /** Run all defined `PresentationCompilerTestDef` */
+ protected def runDefaultTests() {
+ //TODO: integrate random tests!, i.e.: if (runRandomTests) randomTests(20, sourceFiles)
+ testActions.foreach(_.runTest())
+ }
+
+ /** Perform n random tests with random changes. */
+ /****
+ private def randomTests(n: Int, files: Array[SourceFile]) {
+ val tester = new Tester(n, files, settings) {
+ override val compiler = self.compiler
+ override val reporter = new reporters.StoreReporter
+ }
+ tester.run()
+ }
+ ****/
+}
diff --git a/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala b/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala
new file mode 100644
index 0000000000..ad5c61b2b0
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala
@@ -0,0 +1,69 @@
+package scala.tools.nsc
+package interactive
+package tests
+
+import java.io.File.pathSeparatorChar
+import java.io.File.separatorChar
+import scala.tools.nsc.interactive.tests.core.PresentationCompilerInstance
+import scala.tools.nsc.io.{File,Path}
+import core.Reporter
+import core.TestSettings
+
+trait InteractiveTestSettings extends TestSettings with PresentationCompilerInstance {
+ /** Character delimiter for comments in .opts file */
+ private final val CommentStartDelimiter = "#"
+
+ private final val TestOptionsFileExtension = "flags"
+
+ /** Prepare the settings object. Load the .opts file and adjust all paths from the
+ * Unix-like syntax to the platform specific syntax. This is necessary so that a
+ * single .opts file can be used on all platforms.
+ *
+ * @note Bootclasspath is treated specially. If there is a -bootclasspath option in
+ * the file, the 'usejavacp' setting is set to false. This ensures that the
+ * bootclasspath takes precedence over the scala-library used to run the current
+ * test.
+ */
+ override protected def prepareSettings(settings: Settings) {
+ def adjustPaths(paths: settings.PathSetting*) {
+ for (p <- paths if argsString.contains(p.name)) p.value = p.value.map {
+ case '/' => separatorChar
+ case ':' => pathSeparatorChar
+ case c => c
+ }
+ }
+
+ // need this so that the classpath comes from what partest
+ // instead of scala.home
+ settings.usejavacp.value = !argsString.contains("-bootclasspath")
+
+ // pass any options coming from outside
+ settings.processArgumentString(argsString) match {
+ case (false, rest) =>
+ println("error processing arguments (unprocessed: %s)".format(rest))
+ case _ => ()
+ }
+
+ // Make the --sourcepath path provided in the .flags file (if any) relative to the test's base directory
+ if(settings.sourcepath.isSetByUser)
+ settings.sourcepath.value = (baseDir / Path(settings.sourcepath.value)).path
+
+ adjustPaths(settings.bootclasspath, settings.classpath, settings.javabootclasspath, settings.sourcepath)
+ }
+
+ /** If there's a file ending in .opts, read it and parse it for cmd line arguments. */
+ protected val argsString = {
+ val optsFile = outDir / "%s.%s".format(System.getProperty("partest.testname"), TestOptionsFileExtension)
+ val str = try File(optsFile).slurp() catch {
+ case e: java.io.IOException => ""
+ }
+ str.lines.filter(!_.startsWith(CommentStartDelimiter)).mkString(" ")
+ }
+
+ override protected def printClassPath(implicit reporter: Reporter) {
+ reporter.println("\toutDir: %s".format(outDir.path))
+ reporter.println("\tbaseDir: %s".format(baseDir.path))
+ reporter.println("\targsString: %s".format(argsString))
+ super.printClassPath(reporter)
+ }
+}
diff --git a/src/interactive/scala/tools/nsc/interactive/tests/Tester.scala b/src/interactive/scala/tools/nsc/interactive/tests/Tester.scala
new file mode 100644
index 0000000000..f1ada32808
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/tests/Tester.scala
@@ -0,0 +1,209 @@
+/* NSC -- new Scala compiler
+ * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
+ * @author Martin Odersky
+ */
+package scala
+package tools.nsc
+package interactive
+package tests
+
+import scala.reflect.internal.util._
+import reporters._
+import io.AbstractFile
+import scala.collection.mutable.ArrayBuffer
+
+class Tester(ntests: Int, inputs: Array[SourceFile], settings: Settings) {
+
+ val reporter = new StoreReporter
+ val compiler = new Global(settings, reporter)
+
+ def askAndListen[T, U](msg: String, arg: T, op: (T, Response[U]) => Unit) {
+ if (settings.verbose) print(msg+" "+arg+": ")
+ val TIMEOUT = 10 // ms
+ val limit = System.currentTimeMillis() + randomDelayMillis
+ val res = new Response[U]
+ op(arg, res)
+ while (!res.isComplete && !res.isCancelled) {
+ if (System.currentTimeMillis() > limit) {
+ print("c"); res.cancel()
+ } else res.get(TIMEOUT.toLong) match {
+ case Some(Left(t)) =>
+ /**/
+ if (settings.verbose) println(t)
+ case Some(Right(ex)) =>
+ ex.printStackTrace()
+ println(ex)
+ case None =>
+ }
+ }
+ }
+
+ def askReload(sfs: SourceFile*) = askAndListen("reload", sfs.toList, compiler.askReload)
+ def askTypeAt(pos: Position) = askAndListen("type at", pos, compiler.askTypeAt)
+ def askTypeCompletion(pos: Position) = askAndListen("type at", pos, compiler.askTypeCompletion)
+ def askScopeCompletion(pos: Position) = askAndListen("type at", pos, compiler.askScopeCompletion)
+
+ val rand = new java.util.Random()
+
+ private def randomInverse(n: Int) = n / (rand.nextInt(n) + 1)
+
+ private def randomDecreasing(n: Int) = {
+ var r = rand.nextInt((1 to n).sum)
+ var limit = n
+ var result = 0
+ while (r > limit) {
+ result += 1
+ r -= limit
+ limit -= 1
+ }
+ result
+ }
+
+ def randomSourceFileIdx() = rand.nextInt(inputs.length)
+
+ def randomBatchesPerSourceFile(): Int = randomDecreasing(100)
+
+ def randomChangesPerBatch(): Int = randomInverse(50)
+
+ def randomPositionIn(sf: SourceFile) = rand.nextInt(sf.content.length)
+
+ def randomNumChars() = randomInverse(100)
+
+ def randomDelayMillis = randomInverse(10000)
+
+ class Change(sfidx: Int, start: Int, nchars: Int, toLeft: Boolean) {
+
+ private var pos = start
+ private var deleted: List[Char] = List()
+
+ override def toString =
+ "In "+inputs(sfidx)+" at "+start+" take "+nchars+" to "+
+ (if (toLeft) "left" else "right")
+
+ def deleteOne() {
+ val sf = inputs(sfidx)
+ deleted = sf.content(pos) :: deleted
+ val sf1 = new BatchSourceFile(sf.file, sf.content.take(pos) ++ sf.content.drop(pos + 1))
+ inputs(sfidx) = sf1
+ askReload(sf1)
+ }
+
+ def deleteAll() {
+ print("/"+nchars)
+ for (i <- 0 until nchars) {
+ if (toLeft) {
+ if (pos > 0 && pos <= inputs(sfidx).length) {
+ pos -= 1
+ deleteOne()
+ }
+ } else {
+ if (pos < inputs(sfidx).length) {
+ deleteOne()
+ }
+ }
+ }
+ }
+
+ def insertAll() {
+ for (chr <- if (toLeft) deleted else deleted.reverse) {
+ val sf = inputs(sfidx)
+ val (pre, post) = sf./**/content splitAt pos
+ pos += 1
+ val sf1 = new BatchSourceFile(sf.file, pre ++ (chr +: post))
+ inputs(sfidx) = sf1
+ askReload(sf1)
+ }
+ }
+ }
+
+ val testComment = "/**/"
+
+ def testFileChanges(sfidx: Int) = {
+ lazy val testPositions: Seq[Int] = {
+ val sf = inputs(sfidx)
+ val buf = new ArrayBuffer[Int]
+ var pos = sf.content.indexOfSlice(testComment)
+ while (pos > 0) {
+ buf += pos
+ pos = sf.content.indexOfSlice(testComment, pos + 1)
+ }
+ buf
+ }
+ def otherTest() {
+ if (testPositions.nonEmpty) {
+ val pos = Position.offset(inputs(sfidx), rand.nextInt(testPositions.length))
+ rand.nextInt(3) match {
+ case 0 => askTypeAt(pos)
+ case 1 => askTypeCompletion(pos)
+ case 2 => askScopeCompletion(pos)
+ }
+ }
+ }
+ for (i <- 0 until randomBatchesPerSourceFile()) {
+ val changes = Vector.fill(/**/randomChangesPerBatch()) {
+ /**/
+ new Change(sfidx, randomPositionIn(inputs(sfidx)), randomNumChars(), rand.nextBoolean())
+ }
+ doTest(sfidx, changes, testPositions, otherTest) match {
+ case Some(errortrace) =>
+ println(errortrace)
+ minimize(errortrace)
+ case None =>
+ }
+ }
+ }
+
+ def doTest(sfidx: Int, changes: Seq[Change], testPositions: Seq[Int], otherTest: () => Unit): Option[ErrorTrace] = {
+ print("new round with "+changes.length+" changes:")
+ changes foreach (_.deleteAll())
+ otherTest()
+ def errorCount() = compiler.ask(() => reporter.ERROR.count)
+// println("\nhalf test round: "+errorCount())
+ changes.view.reverse foreach (_.insertAll())
+ otherTest()
+ println("done test round: "+errorCount())
+ if (errorCount() != 0)
+ Some(ErrorTrace(sfidx, changes, reporter.infos, inputs(sfidx).content))
+ else
+ None
+ }
+
+ case class ErrorTrace(
+ sfidx: Int, changes: Seq[Change], infos: scala.collection.Set[reporter.Info], content: Array[Char]) {
+ override def toString =
+ "Sourcefile: "+inputs(sfidx)+
+ "\nChanges:\n "+changes.mkString("\n ")+
+ "\nErrors:\n "+infos.mkString("\n ")+
+ "\nContents:\n"+content.mkString
+ }
+
+ def minimize(etrace: ErrorTrace) {}
+
+ /**/
+ def run() {
+ askReload(inputs: _*)
+ for (i <- 0 until ntests)
+ testFileChanges(randomSourceFileIdx())
+ }
+}
+
+/* A program to do presentation compiler stress tests.
+ * Usage:
+ *
+ * scala scala.tools.nsc.interactive.test.Tester <n> <files>
+ *
+ * where <n> is the number os tests to be run and <files> is the set of files to test.
+ * This will do random deletions and re-insertions in any of the files.
+ * At places where an empty comment /**/ appears it will in addition randomly
+ * do ask-types, type-completions, or scope-completions.
+ */
+object Tester {
+ def main(args: Array[String]) {
+ val settings = new Settings()
+ val (_, filenames) = settings.processArguments(args.toList.tail, processAll = true)
+ println("filenames = "+filenames)
+ val files = filenames.toArray map (str => new BatchSourceFile(AbstractFile.getFile(str)): SourceFile)
+ new Tester(args(0).toInt, files, settings).run()
+ sys.exit(0)
+ }
+}
diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/AskCommand.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/AskCommand.scala
new file mode 100644
index 0000000000..4f9df6808f
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/tests/core/AskCommand.scala
@@ -0,0 +1,122 @@
+/* NSC -- new Scala compiler
+ * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
+ * @author Martin Odersky
+ */
+package scala.tools.nsc
+package interactive
+package tests.core
+
+import scala.tools.nsc.interactive.Response
+import scala.reflect.internal.util.Position
+import scala.reflect.internal.util.SourceFile
+
+/**
+ * A trait for defining commands that can be queried to the
+ * presentation compiler.
+ * */
+trait AskCommand {
+
+ /** presentation compiler's instance. */
+ protected val compiler: Global
+
+ /**
+ * Presentation compiler's `askXXX` actions work by doing side-effects
+ * on a `Response` instance passed as an argument during the `askXXX`
+ * call.
+ * The defined method `ask` is meant to encapsulate this behavior.
+ * */
+ protected def ask[T](op: Response[T] => Unit): Response[T] = {
+ val r = new Response[T]
+ op(r)
+ r
+ }
+}
+
+/** Ask the presentation compiler to shut-down. */
+trait AskShutdown extends AskCommand {
+ def askShutdown() = compiler.askShutdown()
+}
+
+/** Ask the presentation compiler to parse a sequence of `sources` */
+trait AskParse extends AskCommand {
+ import compiler.Tree
+
+ /** `sources` need to be entirely parsed before running the test
+ * (else commands such as `AskTypeCompletionAt` may fail simply because
+ * the source's AST is not yet loaded).
+ */
+ def askParse(sources: Seq[SourceFile]) {
+ val responses = sources map (askParse(_))
+ responses.foreach(_.get) // force source files parsing
+ }
+
+ private def askParse(src: SourceFile, keepLoaded: Boolean = true): Response[Tree] = {
+ ask {
+ compiler.askParsedEntered(src, keepLoaded, _)
+ }
+ }
+}
+
+/** Ask the presentation compiler to reload a sequence of `sources` */
+trait AskReload extends AskCommand {
+
+ /** Reload the given source files and wait for them to be reloaded. */
+ protected def askReload(sources: Seq[SourceFile])(implicit reporter: Reporter): Response[Unit] = {
+ val sortedSources = (sources map (_.file.name)).sorted
+ reporter.println("reload: " + sortedSources.mkString(", "))
+
+ ask {
+ compiler.askReload(sources.toList, _)
+ }
+ }
+}
+
+/** Ask the presentation compiler for completion at a given position. */
+trait AskTypeCompletionAt extends AskCommand {
+ import compiler.Member
+
+ private[tests] def askTypeCompletionAt(pos: Position)(implicit reporter: Reporter): Response[List[Member]] = {
+ reporter.println("\naskTypeCompletion at " + pos.source.file.name + ((pos.line, pos.column)))
+
+ ask {
+ compiler.askTypeCompletion(pos, _)
+ }
+ }
+}
+
+/** Ask the presentation compiler for scope completion at a given position. */
+trait AskScopeCompletionAt extends AskCommand {
+ import compiler.Member
+
+ private[tests] def askScopeCompletionAt(pos: Position)(implicit reporter: Reporter): Response[List[Member]] = {
+ reporter.println("\naskScopeCompletion at " + pos.source.file.name + ((pos.line, pos.column)))
+
+ ask {
+ compiler.askScopeCompletion(pos, _)
+ }
+ }
+}
+
+/** Ask the presentation compiler for type info at a given position. */
+trait AskTypeAt extends AskCommand {
+ import compiler.Tree
+
+ private[tests] def askTypeAt(pos: Position)(implicit reporter: Reporter): Response[Tree] = {
+ reporter.println("\naskType at " + pos.source.file.name + ((pos.line, pos.column)))
+
+ ask {
+ compiler.askTypeAt(pos, _)
+ }
+ }
+}
+
+trait AskLoadedTyped extends AskCommand {
+ import compiler.Tree
+
+ protected def askLoadedTyped(source: SourceFile)(implicit reporter: Reporter): Response[Tree] = {
+ ask {
+ compiler.askLoadedTyped(source, _)
+ }
+ }
+
+}
diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala
new file mode 100644
index 0000000000..bc490d8d45
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala
@@ -0,0 +1,128 @@
+package scala.tools.nsc
+package interactive
+package tests.core
+
+import scala.reflect.internal.util.Position
+
+/** Set of core test definitions that are executed for each test run. */
+private[tests] trait CoreTestDefs
+ extends PresentationCompilerRequestsWorkingMode {
+
+ import scala.tools.nsc.interactive.Global
+
+ /** Ask the presentation compiler for completion at all locations
+ * (in all sources) where the defined `marker` is found. */
+ class TypeCompletionAction(override val compiler: Global)
+ extends PresentationCompilerTestDef
+ with AskTypeCompletionAt {
+
+ override def runTest() {
+ askAllSources(TypeCompletionMarker) { pos =>
+ askTypeCompletionAt(pos)
+ } { (pos, members) =>
+ withResponseDelimiter {
+ reporter.println("[response] askTypeCompletion at " + format(pos))
+ // we skip getClass because it changed signature between 1.5 and 1.6, so there is no
+ // universal check file that we can provide for this to work
+ reporter.println("retrieved %d members".format(members.size))
+ compiler ask { () =>
+ val filtered = members.filterNot(member => (member.sym.name string_== "getClass") || member.sym.isConstructor)
+ reporter println (filtered.map(_.forceInfoString).sorted mkString "\n")
+ }
+ }
+ }
+ }
+ }
+
+ /** Ask the presentation compiler for completion at all locations
+ * (in all sources) where the defined `marker` is found. */
+ class ScopeCompletionAction(override val compiler: Global)
+ extends PresentationCompilerTestDef
+ with AskScopeCompletionAt {
+
+ override def runTest() {
+ askAllSources(ScopeCompletionMarker) { pos =>
+ askScopeCompletionAt(pos)
+ } { (pos, members) =>
+ withResponseDelimiter {
+ reporter.println("[response] askScopeCompletion at " + format(pos))
+ try {
+ // exclude members not from source (don't have position), for more focused and self contained tests.
+ def eligible(sym: compiler.Symbol) = sym.pos != compiler.NoPosition
+ val filtered = members.filter(member => eligible(member.sym))
+
+ reporter.println("retrieved %d members".format(filtered.size))
+ compiler ask { () =>
+ reporter.println(filtered.map(_.forceInfoString).sorted mkString "\n")
+ }
+ } catch {
+ case t: Throwable =>
+ t.printStackTrace()
+ }
+
+ }
+ }
+ }
+ }
+
+ /** Ask the presentation compiler for type info at all locations
+ * (in all sources) where the defined `marker` is found. */
+ class TypeAction(override val compiler: Global)
+ extends PresentationCompilerTestDef
+ with AskTypeAt {
+
+ override def runTest() {
+ askAllSources(TypeMarker) { pos =>
+ askTypeAt(pos)
+ } { (pos, tree) =>
+ withResponseDelimiter {
+ reporter.println("[response] askTypeAt " + format(pos))
+ compiler.ask(() => reporter.println(tree))
+ }
+ }
+ }
+ }
+
+ /** Ask the presentation compiler for hyperlink at all locations
+ * (in all sources) where the defined `marker` is found. */
+ class HyperlinkAction(override val compiler: Global)
+ extends PresentationCompilerTestDef
+ with AskTypeAt
+ with AskTypeCompletionAt {
+
+ override def runTest() {
+ askAllSources(HyperlinkMarker) { pos =>
+ askTypeAt(pos)(NullReporter)
+ } { (pos, tree) =>
+ if(tree.symbol == compiler.NoSymbol) {
+ reporter.println("\nNo symbol is associated with tree: "+tree)
+ }
+ else {
+ reporter.println("\naskHyperlinkPos for `" + tree.symbol.name + "` at " + format(pos) + " " + pos.source.file.name)
+ val r = new Response[Position]
+ // `tree.symbol.sourceFile` was discovered to be null when testing using virtpatmat on the akka presentation test, where a position had shifted to point to `Int`
+ // askHyperlinkPos for `Int` at (73,19) pi.scala --> class Int in package scala has null sourceFile!
+ val treePath = if (tree.symbol.sourceFile ne null) tree.symbol.sourceFile.path else null
+ val treeName = if (tree.symbol.sourceFile ne null) tree.symbol.sourceFile.name else null
+
+ sourceFiles.find(_.path == treePath) match {
+ case Some(source) =>
+ compiler.askLinkPos(tree.symbol, source, r)
+ r.get match {
+ case Left(pos) =>
+ val resolvedPos = if (tree.symbol.pos.isDefined) tree.symbol.pos else pos
+ withResponseDelimiter {
+ reporter.println("[response] found askHyperlinkPos for `" + tree.symbol.name + "` at " + format(resolvedPos) + " " + tree.symbol.sourceFile.name)
+ }
+ case Right(ex) =>
+ ex.printStackTrace()
+ }
+ case None =>
+ reporter.println("[error] could not locate sourcefile `" + treeName + "`." +
+ "Hint: Does the looked up definition come form a binary?")
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala
new file mode 100644
index 0000000000..29e546f9fe
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala
@@ -0,0 +1,33 @@
+package scala.tools.nsc
+package interactive
+package tests.core
+
+import reporters.{Reporter => CompilerReporter}
+
+/** Trait encapsulating the creation of a presentation compiler's instance.*/
+private[tests] trait PresentationCompilerInstance extends TestSettings {
+ protected val settings = new Settings
+
+ protected val compilerReporter: CompilerReporter = new InteractiveReporter {
+ override def compiler = PresentationCompilerInstance.this.compiler
+ }
+
+ protected def createGlobal: Global = new Global(settings, compilerReporter)
+
+ protected lazy val compiler: Global = {
+ prepareSettings(settings)
+ createGlobal
+ }
+
+ /**
+ * Called before instantiating the presentation compiler's instance.
+ * You should provide an implementation of this method if you need
+ * to customize the `settings` used to instantiate the presentation compiler.
+ * */
+ protected def prepareSettings(settings: Settings) {}
+
+ protected def printClassPath(implicit reporter: Reporter) {
+ reporter.println("\tbootClassPath: %s".format(settings.bootclasspath.value))
+ reporter.println("\tverbose: %b".format(settings.verbose.value))
+ }
+}
diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala
new file mode 100644
index 0000000000..b5ae5f2d75
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala
@@ -0,0 +1,62 @@
+package scala.tools.nsc
+package interactive
+package tests.core
+
+import scala.reflect.internal.util.Position
+import scala.reflect.internal.util.SourceFile
+
+trait PresentationCompilerRequestsWorkingMode extends TestResources {
+
+ protected def synchronousRequests: Boolean
+
+ protected def askAllSources[T] = if (synchronousRequests) askAllSourcesSync[T] _ else askAllSourcesAsync[T] _
+
+ /** Perform an operation on all sources at all positions that match the given
+ * `marker`. For instance, askAllSources(TypeMarker)(askTypeAt)(println) would
+ * ask the type at all positions marked with `TypeMarker.marker` and println the result.
+ */
+ private def askAllSourcesAsync[T](marker: TestMarker)(askAt: Position => Response[T])(f: (Position, T) => Unit) {
+ val positions = allPositionsOf(str = marker.marker)
+ val responses = for (pos <- positions) yield askAt(pos)
+
+ for ((pos, r) <- positions zip responses) withResponse(pos, r)(f)
+ }
+
+ /** Synchronous version of askAllSources. Each position is treated in turn, waiting for the
+ * response before going to the next one.
+ */
+ private def askAllSourcesSync[T](marker: TestMarker)(askAt: Position => Response[T])(f: (Position, T) => Unit) {
+ val positions = allPositionsOf(str = marker.marker)
+ for (pos <- positions) withResponse(pos, askAt(pos))(f)
+ }
+
+ /** All positions of the given string in all source files. */
+ private def allPositionsOf(srcs: Seq[SourceFile] = sourceFiles, str: String): Seq[Position] =
+ for (s <- srcs; p <- positionsOf(s, str)) yield p
+
+ /** Return all positions of the given str in the given source file. */
+ private def positionsOf(source: SourceFile, str: String): Seq[Position] = {
+ val buf = new scala.collection.mutable.ListBuffer[Position]
+ var pos = source.content.indexOfSlice(str)
+ while (pos >= 0) {
+ buf += source.position(pos - 1) // we need the position before the first character of this marker
+ pos = source.content.indexOfSlice(str, pos + 1)
+ }
+ buf.toList
+ }
+
+ private def withResponse[T](pos: Position, response: Response[T])(f: (Position, T) => Unit) {
+ /** Return the filename:line:col version of this position. */
+ def showPos(pos: Position): String =
+ "%s:%d:%d".format(pos.source.file.name, pos.line, pos.column)
+
+ response.get(TIMEOUT) match {
+ case Some(Left(t)) =>
+ f(pos, t)
+ case None =>
+ println("TIMEOUT: " + showPos(pos))
+ case Some(r) =>
+ println("ERROR: " + r)
+ }
+ }
+}
diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala
new file mode 100644
index 0000000000..4d5b4e1129
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala
@@ -0,0 +1,18 @@
+package scala.tools.nsc.interactive.tests.core
+
+import scala.reflect.internal.util.Position
+
+trait PresentationCompilerTestDef {
+
+ private[tests] def runTest(): Unit
+
+ protected def withResponseDelimiter(block: => Unit)(implicit reporter: Reporter) {
+ def printDelimiter() = reporter.println("=" * 80)
+ printDelimiter()
+ block
+ printDelimiter()
+ }
+
+ protected def format(pos: Position): String =
+ (if(pos.isDefined) "(%d,%d)".format(pos.line, pos.column) else "<no position>")
+}
diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/Reporter.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/Reporter.scala
new file mode 100644
index 0000000000..631504cda5
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/tests/core/Reporter.scala
@@ -0,0 +1,15 @@
+package scala.tools.nsc.interactive.tests.core
+
+private[tests] trait Reporter {
+ def println(msg: Any): Unit
+}
+
+/** Reporter that simply prints all messages in the standard output.*/
+private[tests] object ConsoleReporter extends Reporter {
+ def println(msg: Any) { Console.println(msg) }
+}
+
+/** Reporter that swallows all passed message. */
+private[tests] object NullReporter extends Reporter {
+ def println(msg: Any) {}
+} \ No newline at end of file
diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala
new file mode 100644
index 0000000000..40cfc111a1
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala
@@ -0,0 +1,20 @@
+package scala.tools.nsc.interactive.tests.core
+
+import scala.reflect.internal.util.{SourceFile,BatchSourceFile}
+import scala.tools.nsc.io.{AbstractFile,Path}
+
+private[tests] object SourcesCollector {
+ type SourceFilter = Path => Boolean
+
+ /**
+ * All files below `base` directory that pass the `filter`.
+ * With the default `filter` only .scala and .java files are collected.
+ * */
+ def apply(base: Path, filter: SourceFilter): Array[SourceFile] = {
+ assert(base.isDirectory, base + " is not a directory")
+ base.walk.filter(filter).map(source).toList.toArray.sortBy(_.file.name)
+ }
+
+ private def source(file: Path): SourceFile = source(AbstractFile.getFile(file.toFile))
+ private def source(file: AbstractFile): SourceFile = new BatchSourceFile(file)
+}
diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/TestMarker.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/TestMarker.scala
new file mode 100644
index 0000000000..3f9b40277c
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/tests/core/TestMarker.scala
@@ -0,0 +1,29 @@
+package scala.tools.nsc.interactive.tests.core
+
+case class DuplicateTestMarker(msg: String) extends Exception(msg)
+
+object TestMarker {
+ import scala.collection.mutable.Map
+ private val markers: Map[String, TestMarker] = Map.empty
+
+ private def checkForDuplicate(marker: TestMarker) {
+ markers.get(marker.marker) match {
+ case None => markers(marker.marker) = marker
+ case Some(otherMarker) =>
+ val msg = "Marker `%s` is already used by %s. Please choose a different marker for %s".format(marker.marker, marker, otherMarker)
+ throw new DuplicateTestMarker(msg)
+ }
+ }
+}
+
+abstract case class TestMarker(marker: String) {
+ TestMarker.checkForDuplicate(this)
+}
+
+object TypeCompletionMarker extends TestMarker("/*!*/")
+
+object ScopeCompletionMarker extends TestMarker("/*_*/")
+
+object TypeMarker extends TestMarker("/*?*/")
+
+object HyperlinkMarker extends TestMarker("/*#*/")
diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/TestResources.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/TestResources.scala
new file mode 100644
index 0000000000..887c3cf29b
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/tests/core/TestResources.scala
@@ -0,0 +1,12 @@
+package scala.tools.nsc.interactive.tests.core
+
+import scala.tools.nsc.io.Path
+import scala.reflect.internal.util.SourceFile
+
+/** Resources used by the test. */
+private[tests] trait TestResources extends TestSettings {
+ /** collected source files that are to be used by the test runner */
+ protected lazy val sourceFiles: Array[SourceFile] = SourcesCollector(baseDir / sourceDir, isScalaOrJavaSource)
+
+ private def isScalaOrJavaSource(file: Path): Boolean = file.extension == "scala" | file.extension == "java"
+} \ No newline at end of file
diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/TestSettings.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/TestSettings.scala
new file mode 100644
index 0000000000..681204172b
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/tests/core/TestSettings.scala
@@ -0,0 +1,19 @@
+package scala.tools.nsc.interactive.tests.core
+
+import scala.tools.nsc.io.Path
+
+/** Common settings for the test. */
+private[tests] trait TestSettings {
+ protected final val TIMEOUT = 10000 // timeout in milliseconds
+
+ /** The root directory for this test suite, usually the test kind ("test/files/presentation"). */
+ protected val outDir = Path(Option(System.getProperty("partest.cwd")).getOrElse("."))
+
+ /** The base directory for this test, usually a subdirectory of "test/files/presentation/" */
+ protected val baseDir = Option(System.getProperty("partest.testname")).map(outDir / _).getOrElse(Path("."))
+
+ /** Where source files are placed. */
+ protected val sourceDir = "src"
+
+ protected implicit val reporter: Reporter = ConsoleReporter
+}