summaryrefslogtreecommitdiff
path: root/src/compiler/scala/tools/nsc/ast
diff options
context:
space:
mode:
Diffstat (limited to 'src/compiler/scala/tools/nsc/ast')
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/DocComments.scala51
-rw-r--r--src/compiler/scala/tools/nsc/ast/NodePrinters.scala30
-rw-r--r--src/compiler/scala/tools/nsc/ast/Positions.scala11
-rw-r--r--src/compiler/scala/tools/nsc/ast/Printers.scala123
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala47
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeDSL.scala216
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeGen.scala212
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeInfo.scala74
-rw-r--r--src/compiler/scala/tools/nsc/ast/Trees.scala184
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/CommonTokens.scala112
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala90
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala1578
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Scanners.scala423
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala27
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala92
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Tokens.scala186
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala504
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/xml/MarkupParserCommon.scala211
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/parser/xml/Utility.scala163
19 files changed, 1980 insertions, 2354 deletions
diff --git a/src/compiler/scala/tools/nsc/ast/DocComments.scala b/src/compiler/scala/tools/nsc/ast/DocComments.scala
index 6e39fc9aa1..6d9b41ec45 100755
--- a/src/compiler/scala/tools/nsc/ast/DocComments.scala
+++ b/src/compiler/scala/tools/nsc/ast/DocComments.scala
@@ -7,10 +7,7 @@ package scala.tools.nsc
package ast
import symtab._
-import reporters._
-import scala.reflect.internal.util.{Position, NoPosition}
import util.DocStrings._
-import scala.reflect.internal.Chars._
import scala.collection.mutable
/*
@@ -21,8 +18,14 @@ trait DocComments { self: Global =>
val cookedDocComments = mutable.HashMap[Symbol, String]()
- /** The raw doc comment map */
- val docComments = mutable.HashMap[Symbol, DocComment]()
+ /** The raw doc comment map
+ *
+ * In IDE, background compilation runs get interrupted by
+ * reloading new sourcefiles. This is weak to avoid
+ * memleaks due to the doc of their cached symbols
+ * (e.g. in baseTypeSeq) between periodic doc reloads.
+ */
+ val docComments = mutable.WeakHashMap[Symbol, DocComment]()
def clearDocComments() {
cookedDocComments.clear()
@@ -30,11 +33,6 @@ trait DocComments { self: Global =>
defs.clear()
}
- /** Associate comment with symbol `sym` at position `pos`. */
- def docComment(sym: Symbol, docStr: String, pos: Position = NoPosition) =
- if ((sym ne null) && (sym ne NoSymbol))
- docComments += (sym -> DocComment(docStr, pos))
-
/** The raw doc comment of symbol `sym`, as it appears in the source text, "" if missing.
*/
def rawDocComment(sym: Symbol): String =
@@ -96,11 +94,6 @@ trait DocComments { self: Global =>
expandVariables(cookedDocComment(sym, docStr), sym, site1)
}
- /** The cooked doc comment of symbol `sym` after variable expansion, or "" if missing.
- * @param sym The symbol for which doc comment is returned (site is always the containing class)
- */
- def expandedDocComment(sym: Symbol): String = expandedDocComment(sym, sym.enclClass)
-
/** The list of use cases of doc comment of symbol `sym` seen as a member of class
* `site`. Each use case consists of a synthetic symbol (which is entered nowhere else),
* of an expanded doc comment string, and of its position.
@@ -129,12 +122,6 @@ trait DocComments { self: Global =>
getDocComment(sym) map getUseCases getOrElse List()
}
- def useCases(sym: Symbol): List[(Symbol, String, Position)] = useCases(sym, sym.enclClass)
-
- /** Returns the javadoc format of doc comment string `s`, including wiki expansion
- */
- def toJavaDoc(s: String): String = expandWiki(s)
-
private val wikiReplacements = List(
("""(\n\s*\*?)(\s*\n)""" .r, """$1 <p>$2"""),
("""<([^\w/])""" .r, """&lt;$1"""),
@@ -275,7 +262,7 @@ trait DocComments { self: Global =>
cleanupSectionText(parent.substring(sectionTextBounds._1, sectionTextBounds._2))
case None =>
reporter.info(sym.pos, "The \"" + getSectionHeader + "\" annotation of the " + sym +
- " comment contains @inheritdoc, but the corresponding section in the parent is not defined.", true)
+ " comment contains @inheritdoc, but the corresponding section in the parent is not defined.", force = true)
"<invalid inheritdoc annotation>"
}
@@ -313,7 +300,6 @@ trait DocComments { self: Global =>
/** Lookup definition of variable.
*
* @param vble The variable for which a definition is searched
- * @param owner The current owner in which variable definitions are searched.
* @param site The class for which doc comments are generated
*/
def lookupVariable(vble: String, site: Symbol): Option[String] = site match {
@@ -330,12 +316,12 @@ trait DocComments { self: Global =>
}
/** Expand variable occurrences in string `str`, until a fix point is reached or
- * a expandLimit is exceeded.
+ * an expandLimit is exceeded.
*
- * @param str The string to be expanded
- * @param sym The symbol for which doc comments are generated
- * @param site The class for which doc comments are generated
- * @return Expanded string
+ * @param initialStr The string to be expanded
+ * @param sym The symbol for which doc comments are generated
+ * @param site The class for which doc comments are generated
+ * @return Expanded string
*/
protected def expandVariables(initialStr: String, sym: Symbol, site: Symbol): String = {
val expandLimit = 10
@@ -372,7 +358,10 @@ trait DocComments { self: Global =>
case vname =>
lookupVariable(vname, site) match {
case Some(replacement) => replaceWith(replacement)
- case None => reporter.warning(sym.pos, "Variable " + vname + " undefined in comment for " + sym + " in " + site)
+ case None =>
+ val pos = docCommentPos(sym)
+ val loc = pos withPoint (pos.start + vstart + 1)
+ reporter.warning(loc, s"Variable $vname undefined in comment for $sym in $site")
}
}
}
@@ -470,7 +459,7 @@ trait DocComments { self: Global =>
//val (classes, pkgs) = site.ownerChain.span(!_.isPackageClass)
//val sites = (classes ::: List(pkgs.head, rootMirror.RootClass)))
//findIn(sites)
- findIn(site.ownerChain ::: List(definitions.EmptyPackage))
+ findIn(site.ownerChain ::: List(rootMirror.EmptyPackage))
}
def getType(str: String, variable: String): Type = {
@@ -507,7 +496,7 @@ trait DocComments { self: Global =>
result
}
- /**
+ /*
* work around the backticks issue suggested by Simon in
* https://groups.google.com/forum/?hl=en&fromgroups#!topic/scala-internals/z7s1CCRCz74
* ideally, we'd have a removeWikiSyntax method in the CommentFactory to completely eliminate the wiki markup
diff --git a/src/compiler/scala/tools/nsc/ast/NodePrinters.scala b/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
index deea4de707..9c8e13a1a9 100644
--- a/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
+++ b/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
@@ -32,7 +32,7 @@ abstract class NodePrinters {
}
trait DefaultPrintAST extends PrintAST {
- val printPos = settings.Xprintpos.value || settings.Yposdebug.value
+ val printPos = settings.Xprintpos || settings.Yposdebug
def showNameAndPos(tree: NameTree) = showPosition(tree) + showName(tree.name)
def showDefTreeName(tree: DefTree) = showName(tree.name)
@@ -100,9 +100,9 @@ abstract class NodePrinters {
def stringify(tree: Tree): String = {
buf.clear()
- if (settings.XshowtreesStringified.value) buf.append(tree.toString + EOL)
- if (settings.XshowtreesCompact.value) {
- buf.append(showRaw(tree, printIds = settings.uniqid.value, printTypes = settings.printtypes.value))
+ if (settings.XshowtreesStringified) buf.append(tree.toString + EOL)
+ if (settings.XshowtreesCompact) {
+ buf.append(showRaw(tree, printIds = settings.uniqid, printTypes = settings.printtypes))
} else {
level = 0
traverse(tree)
@@ -168,6 +168,13 @@ abstract class NodePrinters {
}
}
+ def typeApplyCommon(tree: Tree, fun: Tree, args: List[Tree]) {
+ printMultiline(tree) {
+ traverse(fun)
+ traverseList("[]", "type argument")(args)
+ }
+ }
+
def treePrefix(tree: Tree) = showPosition(tree) + tree.productPrefix
def printMultiline(tree: Tree)(body: => Unit) {
printMultiline(treePrefix(tree), showAttributes(tree))(body)
@@ -203,9 +210,11 @@ abstract class NodePrinters {
showPosition(tree)
tree match {
- case AppliedTypeTree(tpt, args) => applyCommon(tree, tpt, args)
- case ApplyDynamic(fun, args) => applyCommon(tree, fun, args)
- case Apply(fun, args) => applyCommon(tree, fun, args)
+ case ApplyDynamic(fun, args) => applyCommon(tree, fun, args)
+ case Apply(fun, args) => applyCommon(tree, fun, args)
+
+ case TypeApply(fun, args) => typeApplyCommon(tree, fun, args)
+ case AppliedTypeTree(tpt, args) => typeApplyCommon(tree, tpt, args)
case Throw(Ident(name)) =>
printSingle(tree, name)
@@ -273,7 +282,7 @@ abstract class NodePrinters {
traverseList("[]", "type parameter")(tparams)
vparamss match {
case Nil => println("Nil")
- case Nil :: Nil => println("List(Nil)")
+ case ListOfNil => println("List(Nil)")
case ps :: Nil =>
printLine("", "1 parameter list")
ps foreach traverse
@@ -312,11 +321,6 @@ abstract class NodePrinters {
}
case This(qual) =>
printSingle(tree, qual)
- case TypeApply(fun, args) =>
- printMultiline(tree) {
- traverse(fun)
- traverseList("[]", "type argument")(args)
- }
case tt @ TypeTree() =>
println(showTypeTree(tt))
diff --git a/src/compiler/scala/tools/nsc/ast/Positions.scala b/src/compiler/scala/tools/nsc/ast/Positions.scala
index d8fb632f73..beab801edf 100644
--- a/src/compiler/scala/tools/nsc/ast/Positions.scala
+++ b/src/compiler/scala/tools/nsc/ast/Positions.scala
@@ -1,16 +1,9 @@
package scala.tools.nsc
package ast
-import scala.reflect.internal.util.{ SourceFile, Position, OffsetPosition, NoPosition }
-
trait Positions extends scala.reflect.internal.Positions {
self: Global =>
- def rangePos(source: SourceFile, start: Int, point: Int, end: Int) =
- new OffsetPosition(source, point)
-
- def validatePositions(tree: Tree) {}
-
class ValidatingPosAssigner extends PosAssigner {
var pos: Position = _
override def traverse(t: Tree) {
@@ -20,7 +13,7 @@ trait Positions extends scala.reflect.internal.Positions {
// When we prune due to encountering a position, traverse the
// pruned children so we can warn about those lacking positions.
t.children foreach { c =>
- if ((c eq EmptyTree) || (c eq emptyValDef)) ()
+ if (!c.canHaveAttrs) ()
else if (c.pos == NoPosition) {
reporter.warning(t.pos, " Positioned tree has unpositioned child in phase " + globalPhase)
inform("parent: " + treeSymStatus(t))
@@ -32,6 +25,6 @@ trait Positions extends scala.reflect.internal.Positions {
}
override protected[this] lazy val posAssigner: PosAssigner =
- if (settings.Yrangepos.value && settings.debug.value || settings.Yposdebug.value) new ValidatingPosAssigner
+ if (settings.Yrangepos && settings.debug || settings.Yposdebug) new ValidatingPosAssigner
else new DefaultPosAssigner
}
diff --git a/src/compiler/scala/tools/nsc/ast/Printers.scala b/src/compiler/scala/tools/nsc/ast/Printers.scala
index 83222a24b4..f3def3c80c 100644
--- a/src/compiler/scala/tools/nsc/ast/Printers.scala
+++ b/src/compiler/scala/tools/nsc/ast/Printers.scala
@@ -7,8 +7,6 @@ package scala.tools.nsc
package ast
import java.io.{ OutputStream, PrintWriter, StringWriter, Writer }
-import symtab.Flags._
-import symtab.SymbolTable
trait Printers extends scala.reflect.internal.Printers { this: Global =>
@@ -22,7 +20,7 @@ trait Printers extends scala.reflect.internal.Printers { this: Global =>
printTree(
if (tree.isDef && tree.symbol != NoSymbol && tree.symbol.isInitialized) {
tree match {
- case ClassDef(_, _, _, impl @ Template(ps, emptyValDef, body))
+ case ClassDef(_, _, _, impl @ Template(ps, noSelfType, body))
if (tree.symbol.thisSym != tree.symbol) =>
ClassDef(tree.symbol, Template(ps, ValDef(tree.symbol.thisSym), body))
case ClassDef(_, _, _, impl) => ClassDef(tree.symbol, impl)
@@ -44,7 +42,7 @@ trait Printers extends scala.reflect.internal.Printers { this: Global =>
}
}
- // overflow cases missing from TreePrinter in reflect.api
+ // overflow cases missing from TreePrinter in scala.reflect.api
override def xprintTree(treePrinter: super.TreePrinter, tree: Tree) = tree match {
case DocDef(comment, definition) =>
treePrinter.print(comment.raw)
@@ -130,7 +128,7 @@ trait Printers extends scala.reflect.internal.Printers { this: Global =>
case Select(qualifier, name) =>
printTree(qualifier)
print(".")
- print(quotedName(name, true))
+ print(quotedName(name, decode = true))
// target.toString() ==> target.toString
case Apply(fn, Nil) => printTree(fn)
@@ -154,7 +152,7 @@ trait Printers extends scala.reflect.internal.Printers { this: Global =>
// If thenp or elsep has only one statement, it doesn't need more than one line.
case If(cond, thenp, elsep) =>
def ifIndented(x: Tree) = {
- indent ; println() ; printTree(x) ; undent
+ indent() ; println() ; printTree(x) ; undent()
}
val List(thenStmts, elseStmts) = List(thenp, elsep) map allStatements
@@ -168,128 +166,27 @@ trait Printers extends scala.reflect.internal.Printers { this: Global =>
if (elseStmts.nonEmpty) {
print(" else")
- indent ; println()
+ indent() ; println()
elseStmts match {
case List(x) => printTree(x)
case _ => printTree(elsep)
}
- undent ; println()
+ undent() ; println()
}
case _ => s()
}
}
}
- /** This must guarantee not to force any evaluation, so we can learn
- * a little bit about trees in the midst of compilation without altering
- * the natural course of events.
- */
- class SafeTreePrinter(out: PrintWriter) extends TreePrinter(out) {
-
- private def default(t: Tree) = t.getClass.getName.reverse.takeWhile(_ != '.').reverse
- private def params(trees: List[Tree]): String = trees map safe mkString ", "
-
- private def safe(name: Name): String = name.decode
- private def safe(tree: Tree): String = tree match {
- case Apply(fn, args) => "%s(%s)".format(safe(fn), params(args))
- case Select(qual, name) => safe(qual) + "." + safe(name)
- case This(qual) => safe(qual) + ".this"
- case Ident(name) => safe(name)
- case Literal(value) => value.stringValue
- case _ => "(?: %s)".format(default(tree))
- }
-
- override def printTree(tree: Tree) { print(safe(tree)) }
- }
-
- class TreeMatchTemplate {
- // non-trees defined in Trees
- //
- // case class ImportSelector(name: Name, namePos: Int, rename: Name, renamePos: Int)
- // case class Modifiers(flags: Long, privateWithin: Name, annotations: List[Tree], positions: Map[Long, Position])
- //
- def apply(t: Tree): Unit = t match {
- // eliminated by typer
- case Annotated(annot, arg) =>
- case AssignOrNamedArg(lhs, rhs) =>
- case DocDef(comment, definition) =>
- case Import(expr, selectors) =>
-
- // eliminated by refchecks
- case ModuleDef(mods, name, impl) =>
- case TypeTreeWithDeferredRefCheck() =>
-
- // eliminated by erasure
- case TypeDef(mods, name, tparams, rhs) =>
- case Typed(expr, tpt) =>
-
- // eliminated by cleanup
- case ApplyDynamic(qual, args) =>
-
- // eliminated by explicitouter
- case Alternative(trees) =>
- case Bind(name, body) =>
- case CaseDef(pat, guard, body) =>
- case Star(elem) =>
- case UnApply(fun, args) =>
-
- // eliminated by lambdalift
- case Function(vparams, body) =>
-
- // eliminated by uncurry
- case AppliedTypeTree(tpt, args) =>
- case CompoundTypeTree(templ) =>
- case ExistentialTypeTree(tpt, whereClauses) =>
- case SelectFromTypeTree(qual, selector) =>
- case SingletonTypeTree(ref) =>
- case TypeBoundsTree(lo, hi) =>
-
- // survivors
- case Apply(fun, args) =>
- case ArrayValue(elemtpt, trees) =>
- case Assign(lhs, rhs) =>
- case Block(stats, expr) =>
- case ClassDef(mods, name, tparams, impl) =>
- case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
- case EmptyTree =>
- case Ident(name) =>
- case If(cond, thenp, elsep) =>
- case LabelDef(name, params, rhs) =>
- case Literal(value) =>
- case Match(selector, cases) =>
- case New(tpt) =>
- case PackageDef(pid, stats) =>
- case Return(expr) =>
- case Select(qualifier, selector) =>
- case Super(qual, mix) =>
- case Template(parents, self, body) =>
- case This(qual) =>
- case Throw(expr) =>
- case Try(block, catches, finalizer) =>
- case TypeApply(fun, args) =>
- case TypeTree() =>
- case ValDef(mods, name, tpt, rhs) =>
-
- // missing from the Trees comment
- case Parens(args) => // only used during parsing
- case SelectFromArray(qual, name, erasure) => // only used during erasure
- }
- }
-
- def asString(t: Tree): String = render(t, newStandardTreePrinter, settings.printtypes.value, settings.uniqid.value, settings.Yshowsymkinds.value)
- def asCompactString(t: Tree): String = render(t, newCompactTreePrinter, settings.printtypes.value, settings.uniqid.value, settings.Yshowsymkinds.value)
- def asCompactDebugString(t: Tree): String = render(t, newCompactTreePrinter, true, true, true)
+ def asString(t: Tree): String = render(t, newStandardTreePrinter, settings.printtypes, settings.uniqid, settings.Yshowsymowners, settings.Yshowsymkinds)
+ def asCompactString(t: Tree): String = render(t, newCompactTreePrinter, settings.printtypes, settings.uniqid, settings.Yshowsymowners, settings.Yshowsymkinds)
+ def asCompactDebugString(t: Tree): String = render(t, newCompactTreePrinter, true, true, true, true)
def newStandardTreePrinter(writer: PrintWriter): TreePrinter = new TreePrinter(writer)
- def newStandardTreePrinter(stream: OutputStream): TreePrinter = newStandardTreePrinter(new PrintWriter(stream))
- def newStandardTreePrinter(): TreePrinter = newStandardTreePrinter(new PrintWriter(ConsoleWriter))
-
def newCompactTreePrinter(writer: PrintWriter): CompactTreePrinter = new CompactTreePrinter(writer)
- def newCompactTreePrinter(stream: OutputStream): CompactTreePrinter = newCompactTreePrinter(new PrintWriter(stream))
- def newCompactTreePrinter(): CompactTreePrinter = newCompactTreePrinter(new PrintWriter(ConsoleWriter))
override def newTreePrinter(writer: PrintWriter): TreePrinter =
- if (settings.Ycompacttrees.value) newCompactTreePrinter(writer)
+ if (settings.Ycompacttrees) newCompactTreePrinter(writer)
else newStandardTreePrinter(writer)
override def newTreePrinter(stream: OutputStream): TreePrinter = newTreePrinter(new PrintWriter(stream))
override def newTreePrinter(): TreePrinter = newTreePrinter(new PrintWriter(ConsoleWriter))
diff --git a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
index 5c954096f4..eafecf9462 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package ast
import java.awt.{List => awtList, _}
@@ -16,8 +17,6 @@ import javax.swing.tree._
import scala.concurrent.Lock
import scala.text._
-import symtab.Flags._
-import symtab.SymbolTable
import scala.language.implicitConversions
/**
@@ -34,7 +33,7 @@ abstract class TreeBrowsers {
val borderSize = 10
- def create(): SwingBrowser = new SwingBrowser();
+ def create(): SwingBrowser = new SwingBrowser()
/** Pseudo tree class, so that all JTree nodes are treated uniformly */
case class ProgramTree(units: List[UnitTree]) extends Tree {
@@ -50,21 +49,6 @@ abstract class TreeBrowsers {
* Java Swing pretty printer for Scala abstract syntax trees.
*/
class SwingBrowser {
-
- def browse(t: Tree): Tree = {
- val tm = new ASTTreeModel(t)
-
- val frame = new BrowserFrame()
- frame.setTreeModel(tm)
-
- val lock = new Lock()
- frame.createFrame(lock)
-
- // wait for the frame to be closed
- lock.acquire
- t
- }
-
def browse(pName: String, units: Iterator[CompilationUnit]): Unit =
browse(pName, units.toList)
@@ -83,7 +67,7 @@ abstract class TreeBrowsers {
frame.createFrame(lock)
// wait for the frame to be closed
- lock.acquire
+ lock.acquire()
}
}
@@ -171,8 +155,8 @@ abstract class TreeBrowsers {
_setExpansionState(root, new TreePath(root.getModel.getRoot))
}
- def expandAll(subtree: JTree) = setExpansionState(subtree, true)
- def collapseAll(subtree: JTree) = setExpansionState(subtree, false)
+ def expandAll(subtree: JTree) = setExpansionState(subtree, expand = true)
+ def collapseAll(subtree: JTree) = setExpansionState(subtree, expand = false)
/** Create a frame that displays the AST.
@@ -184,14 +168,14 @@ abstract class TreeBrowsers {
* especially symbols/types would change while the window is visible.
*/
def createFrame(lock: Lock): Unit = {
- lock.acquire // keep the lock until the user closes the window
+ lock.acquire() // keep the lock until the user closes the window
frame.setDefaultCloseOperation(WindowConstants.DISPOSE_ON_CLOSE)
frame.addWindowListener(new WindowAdapter() {
/** Release the lock, so compilation may resume after the window is closed. */
- override def windowClosed(e: WindowEvent): Unit = lock.release
- });
+ override def windowClosed(e: WindowEvent): Unit = lock.release()
+ })
jTree = new JTree(treeModel) {
/** Return the string for a tree node. */
@@ -253,7 +237,7 @@ abstract class TreeBrowsers {
putValue(Action.ACCELERATOR_KEY, KeyStroke.getKeyStroke(KeyEvent.VK_Q, menuKey + shiftKey, false))
override def actionPerformed(e: ActionEvent) {
closeWindow()
- global.currentRun.cancel
+ global.currentRun.cancel()
}
}
)
@@ -509,7 +493,7 @@ abstract class TreeBrowsers {
/** Return a textual representation of this t's symbol */
def symbolText(t: Tree): String = {
val prefix =
- if (t.hasSymbol) "[has] "
+ if (t.hasSymbolField) "[has] "
else if (t.isDef) "[defines] "
else ""
@@ -529,11 +513,10 @@ abstract class TreeBrowsers {
* attributes */
def symbolAttributes(t: Tree): String = {
val s = t.symbol
- var att = ""
if ((s ne null) && (s != NoSymbol)) {
- var str = flagsToString(s.flags)
- if (s.isStaticMember) str = str + " isStatic ";
+ var str = s.flagString
+ if (s.isStaticMember) str = str + " isStatic "
(str + " annotations: " + s.annotations.mkString("", " ", "")
+ (if (s.isTypeSkolem) "\ndeSkolemized annotations: " + s.deSkolemize.annotations.mkString("", " ", "") else ""))
}
@@ -636,7 +619,7 @@ abstract class TreeBrowsers {
toDocument(result) :: ")")
)
- case AnnotatedType(annots, tp, _) =>
+ case AnnotatedType(annots, tp) =>
Document.group(
Document.nest(4, "AnnotatedType(" :/:
annots.mkString("[", ",", "]") :/:
@@ -649,7 +632,7 @@ abstract class TreeBrowsers {
Document.group("(" :/: symsToDocument(tparams) :/: "), ") :/:
toDocument(result) :: ")"))
- case global.analyzer.ImportType(expr) =>
+ case ImportType(expr) =>
"ImportType(" + expr.toString + ")"
diff --git a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
index 9a5b92e795..6dda30b5e7 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
@@ -7,7 +7,6 @@
package scala.tools.nsc
package ast
-import PartialFunction._
import symtab.Flags
import scala.language.implicitConversions
@@ -21,7 +20,6 @@ trait TreeDSL {
import global._
import definitions._
- import gen.{ scalaDot }
object CODE {
// Add a null check to a Tree => Tree function
@@ -31,30 +29,16 @@ trait TreeDSL {
def returning[T](x: T)(f: T => Unit): T = util.returning(x)(f)
object LIT extends (Any => Literal) {
+ def typed(x: Any) = apply(x) setType ConstantType(Constant(x))
def apply(x: Any) = Literal(Constant(x))
- def unapply(x: Any) = condOpt(x) { case Literal(Constant(value)) => value }
}
- // You might think these could all be vals, but empirically I have found that
- // at least in the case of UNIT the compiler breaks if you re-use trees.
- // However we need stable identifiers to have attractive pattern matching.
- // So it's inconsistent until I devise a better way.
- val TRUE = LIT(true)
- val FALSE = LIT(false)
- val ZERO = LIT(0)
- def NULL = LIT(null)
- def UNIT = LIT(())
-
- // for those preferring boring, predictable lives, without the thrills of tree-sharing
- // (but with the perk of typed trees)
- def TRUE_typed = LIT(true) setType ConstantType(Constant(true))
- def FALSE_typed = LIT(false) setType ConstantType(Constant(false))
-
- object WILD {
- def empty = Ident(nme.WILDCARD)
- def apply(tpe: Type) = Ident(nme.WILDCARD) setType tpe
- def unapply(other: Any) = cond(other) { case Ident(nme.WILDCARD) => true }
- }
+ // Boring, predictable trees.
+ def TRUE = LIT typed true
+ def FALSE = LIT typed false
+ def ZERO = LIT(0)
+ def NULL = LIT(null)
+ def UNIT = LIT(())
def fn(lhs: Tree, op: Name, args: Tree*) = Apply(Select(lhs, op), args.toList)
def fn(lhs: Tree, op: Symbol, args: Tree*) = Apply(Select(lhs, op), args.toList)
@@ -82,19 +66,15 @@ trait TreeDSL {
if (opSym == NoSymbol) ANY_==(other)
else fn(target, opSym, other)
}
- def ANY_EQ (other: Tree) = OBJ_EQ(other AS ObjectClass.tpe)
+ def ANY_EQ (other: Tree) = OBJ_EQ(other AS ObjectTpe)
def ANY_== (other: Tree) = fn(target, Any_==, other)
def ANY_!= (other: Tree) = fn(target, Any_!=, other)
- def OBJ_== (other: Tree) = fn(target, Object_==, other)
- def OBJ_!= (other: Tree) = fn(target, Object_!=, other)
def OBJ_EQ (other: Tree) = fn(target, Object_eq, other)
def OBJ_NE (other: Tree) = fn(target, Object_ne, other)
- def INT_| (other: Tree) = fn(target, getMember(IntClass, nme.OR), other)
- def INT_& (other: Tree) = fn(target, getMember(IntClass, nme.AND), other)
def INT_>= (other: Tree) = fn(target, getMember(IntClass, nme.GE), other)
def INT_== (other: Tree) = fn(target, getMember(IntClass, nme.EQ), other)
- def INT_!= (other: Tree) = fn(target, getMember(IntClass, nme.NE), other)
+ def INT_- (other: Tree) = fn(target, getMember(IntClass, nme.MINUS), other)
// generic operations on ByteClass, IntClass, LongClass
def GEN_| (other: Tree, kind: ClassSymbol) = fn(target, getMember(kind, nme.OR), other)
@@ -102,37 +82,28 @@ trait TreeDSL {
def GEN_== (other: Tree, kind: ClassSymbol) = fn(target, getMember(kind, nme.EQ), other)
def GEN_!= (other: Tree, kind: ClassSymbol) = fn(target, getMember(kind, nme.NE), other)
- def BOOL_&& (other: Tree) = fn(target, Boolean_and, other)
- def BOOL_|| (other: Tree) = fn(target, Boolean_or, other)
-
/** Apply, Select, Match **/
def APPLY(params: Tree*) = Apply(target, params.toList)
def APPLY(params: List[Tree]) = Apply(target, params)
- def MATCH(cases: CaseDef*) = Match(target, cases.toList)
def DOT(member: Name) = SelectStart(Select(target, member))
def DOT(sym: Symbol) = SelectStart(Select(target, sym))
/** Assignment */
+ // !!! This method is responsible for some tree sharing, but a diligent
+ // reviewer pointed out that we shouldn't blindly duplicate these trees
+ // as there might be DefTrees nested beneath them. It's not entirely
+ // clear how to proceed, so for now it retains the non-duplicating behavior.
def ===(rhs: Tree) = Assign(target, rhs)
- /** Methods for sequences **/
- def DROP(count: Int): Tree =
- if (count == 0) target
- else (target DOT nme.drop)(LIT(count))
-
/** Casting & type tests -- working our way toward understanding exactly
* what differs between the different forms of IS and AS.
*
* See ticket #2168 for one illustration of AS vs. AS_ANY.
*/
def AS(tpe: Type) = gen.mkAsInstanceOf(target, tpe, any = true, wrapInApply = false)
- def IS(tpe: Type) = gen.mkIsInstanceOf(target, tpe, true)
- def IS_OBJ(tpe: Type) = gen.mkIsInstanceOf(target, tpe, false)
+ def IS_OBJ(tpe: Type) = gen.mkIsInstanceOf(target, tpe, any = false)
- // XXX having some difficulty expressing nullSafe in a way that doesn't freak out value types
- // def TOSTRING() = nullSafe(fn(_: Tree, nme.toString_), LIT("null"))(target)
- def TOSTRING() = fn(target, nme.toString_)
def GETCLASS() = fn(target, Object_getClass)
}
@@ -145,98 +116,6 @@ trait TreeDSL {
def ==>(body: Tree): CaseDef = CaseDef(pat, guard, body)
}
- /** VODD, if it's not obvious, means ValOrDefDef. This is the
- * common code between a tree based on a pre-existing symbol and
- * one being built from scratch.
- */
- trait VODDStart {
- def name: Name
- def defaultMods: Modifiers
- def defaultTpt: Tree
- def defaultPos: Position
-
- type ResultTreeType <: ValOrDefDef
- def mkTree(rhs: Tree): ResultTreeType
- def ===(rhs: Tree): ResultTreeType
-
- private var _mods: Modifiers = null
- private var _tpt: Tree = null
- private var _pos: Position = null
-
- def withType(tp: Type): this.type = {
- _tpt = TypeTree(tp)
- this
- }
- def withFlags(flags: Long*): this.type = {
- if (_mods == null)
- _mods = defaultMods
-
- _mods = flags.foldLeft(_mods)(_ | _)
- this
- }
- def withPos(pos: Position): this.type = {
- _pos = pos
- this
- }
-
- final def mods = if (_mods == null) defaultMods else _mods
- final def tpt = if (_tpt == null) defaultTpt else _tpt
- final def pos = if (_pos == null) defaultPos else _pos
- }
- trait SymVODDStart extends VODDStart {
- def sym: Symbol
- def symType: Type
-
- def name = sym.name
- def defaultMods = Modifiers(sym.flags)
- def defaultTpt = TypeTree(symType) setPos sym.pos.focus
- def defaultPos = sym.pos
-
- final def ===(rhs: Tree): ResultTreeType =
- atPos(pos)(mkTree(rhs) setSymbol sym)
- }
- trait ValCreator {
- self: VODDStart =>
-
- type ResultTreeType = ValDef
- def mkTree(rhs: Tree): ValDef = ValDef(mods, name, tpt, rhs)
- }
- trait DefCreator {
- self: VODDStart =>
-
- def tparams: List[TypeDef]
- def vparamss: List[List[ValDef]]
-
- type ResultTreeType = DefDef
- def mkTree(rhs: Tree): DefDef = DefDef(mods, name, tparams, vparamss, tpt, rhs)
- }
-
- class DefSymStart(val sym: Symbol) extends SymVODDStart with DefCreator {
- def symType = sym.tpe.finalResultType
- def tparams = sym.typeParams map TypeDef
- def vparamss = mapParamss(sym)(ValDef)
- }
- class ValSymStart(val sym: Symbol) extends SymVODDStart with ValCreator {
- def symType = sym.tpe
- }
-
- trait TreeVODDStart extends VODDStart {
- def defaultMods = NoMods
- def defaultTpt = TypeTree()
- def defaultPos = NoPosition
-
- final def ===(rhs: Tree): ResultTreeType =
- if (pos == NoPosition) mkTree(rhs)
- else atPos(pos)(mkTree(rhs))
- }
-
- class ValTreeStart(val name: Name) extends TreeVODDStart with ValCreator {
- }
- class DefTreeStart(val name: Name) extends TreeVODDStart with DefCreator {
- def tparams: List[TypeDef] = Nil
- def vparamss: List[List[ValDef]] = ListOfNil
- }
-
class IfStart(cond: Tree, thenp: Tree) {
def THEN(x: Tree) = new IfStart(cond, x)
def ELSE(elsep: Tree) = If(cond, thenp, elsep)
@@ -244,84 +123,29 @@ trait TreeDSL {
}
class TryStart(body: Tree, catches: List[CaseDef], fin: Tree) {
def CATCH(xs: CaseDef*) = new TryStart(body, xs.toList, fin)
- def FINALLY(x: Tree) = Try(body, catches, x)
def ENDTRY = Try(body, catches, fin)
}
def CASE(pat: Tree): CaseStart = new CaseStart(pat, EmptyTree)
- def DEFAULT: CaseStart = new CaseStart(WILD.empty, EmptyTree)
-
- class SymbolMethods(target: Symbol) {
- def BIND(body: Tree) = Bind(target, body)
- def IS_NULL() = REF(target) OBJ_EQ NULL
- def NOT_NULL() = REF(target) OBJ_NE NULL
-
- def GET() = fn(REF(target), nme.get)
-
- // name of nth indexed argument to a method (first parameter list), defaults to 1st
- def ARG(idx: Int = 0) = Ident(target.paramss.head(idx))
- def ARGS = target.paramss.head
- def ARGNAMES = ARGS map Ident
- }
-
- /** Top level accessible. */
- def MATCHERROR(arg: Tree) = Throw(MatchErrorClass.tpe, arg)
- def THROW(sym: Symbol, msg: Tree): Throw = Throw(sym.tpe, msg.TOSTRING())
+ def DEFAULT: CaseStart = new CaseStart(Ident(nme.WILDCARD), EmptyTree)
def NEW(tpt: Tree, args: Tree*): Tree = New(tpt, List(args.toList))
- def NEW(sym: Symbol, args: Tree*): Tree = New(sym.tpe, args: _*)
-
- def DEF(name: Name, tp: Type): DefTreeStart = DEF(name) withType tp
- def DEF(name: Name): DefTreeStart = new DefTreeStart(name)
- def DEF(sym: Symbol): DefSymStart = new DefSymStart(sym)
-
- def VAL(name: Name, tp: Type): ValTreeStart = VAL(name) withType tp
- def VAL(name: Name): ValTreeStart = new ValTreeStart(name)
- def VAL(sym: Symbol): ValSymStart = new ValSymStart(sym)
-
- def VAR(name: Name, tp: Type): ValTreeStart = VAL(name, tp) withFlags Flags.MUTABLE
- def VAR(name: Name): ValTreeStart = VAL(name) withFlags Flags.MUTABLE
- def VAR(sym: Symbol): ValSymStart = VAL(sym) withFlags Flags.MUTABLE
-
- def LAZYVAL(name: Name, tp: Type): ValTreeStart = VAL(name, tp) withFlags Flags.LAZY
- def LAZYVAL(name: Name): ValTreeStart = VAL(name) withFlags Flags.LAZY
- def LAZYVAL(sym: Symbol): ValSymStart = VAL(sym) withFlags Flags.LAZY
- def AND(guards: Tree*) =
- if (guards.isEmpty) EmptyTree
- else guards reduceLeft gen.mkAnd
-
- def OR(guards: Tree*) =
- if (guards.isEmpty) EmptyTree
- else guards reduceLeft gen.mkOr
+ def NOT(tree: Tree) = Select(tree, Boolean_not)
+ def AND(guards: Tree*) = if (guards.isEmpty) EmptyTree else guards reduceLeft gen.mkAnd
def IF(tree: Tree) = new IfStart(tree, EmptyTree)
def TRY(tree: Tree) = new TryStart(tree, Nil, EmptyTree)
def BLOCK(xs: Tree*) = Block(xs.init.toList, xs.last)
- def NOT(tree: Tree) = Select(tree, Boolean_not)
- def SOME(xs: Tree*) = Apply(SomeClass.companionSymbol, makeTupleTerm(xs.toList, true))
+ def SOME(xs: Tree*) = Apply(SomeClass.companionSymbol, gen.mkTuple(xs.toList))
/** Typed trees from symbols. */
- def THIS(sym: Symbol) = gen.mkAttributedThis(sym)
- def ID(sym: Symbol) = gen.mkAttributedIdent(sym)
- def REF(sym: Symbol) = gen.mkAttributedRef(sym)
- def REF(pre: Type, sym: Symbol) = gen.mkAttributedRef(pre, sym)
-
- def makeTupleTerm(trees: List[Tree], flattenUnary: Boolean): Tree = trees match {
- case Nil => UNIT
- case List(tree) if flattenUnary => tree
- case _ => Apply(TupleClass(trees.length).companionModule, trees: _*)
- }
- def makeTupleType(trees: List[Tree], flattenUnary: Boolean): Tree = trees match {
- case Nil => gen.scalaUnitConstr
- case List(tree) if flattenUnary => tree
- case _ => AppliedTypeTree(REF(TupleClass(trees.length)), trees)
- }
+ def REF(sym: Symbol) = gen.mkAttributedRef(sym)
+ def REF(pre: Type, sym: Symbol) = gen.mkAttributedRef(pre, sym)
/** Implicits - some of these should probably disappear **/
implicit def mkTreeMethods(target: Tree): TreeMethods = new TreeMethods(target)
implicit def mkTreeMethodsFromSymbol(target: Symbol): TreeMethods = new TreeMethods(Ident(target))
- implicit def mkSymbolMethodsFromSymbol(target: Symbol): SymbolMethods = new SymbolMethods(target)
/** (foo DOT bar) might be simply a Select, but more likely it is to be immediately
* followed by an Apply. We don't want to add an actual apply method to arbitrary
diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
index 99b82d9746..0575b9703e 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
@@ -8,7 +8,6 @@ package ast
import scala.collection.mutable.ListBuffer
import symtab.Flags._
-import symtab.SymbolTable
import scala.language.postfixOps
/** XXX to resolve: TreeGen only assumes global is a SymbolTable, but
@@ -20,31 +19,28 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL {
import global._
import definitions._
- def mkCheckInit(tree: Tree): Tree = {
- val tpe =
- if (tree.tpe != null || !tree.hasSymbol) tree.tpe
- else tree.symbol.tpe
-
- if (!global.phase.erasedTypes && settings.warnSelectNullable.value &&
- tpe <:< NotNullClass.tpe && !tpe.isNotNull)
- mkRuntimeCall(nme.checkInitialized, List(tree))
- else
- tree
- }
-
- /** Builds a fully attributed wildcard import node.
+ /** Builds a fully attributed, synthetic wildcard import node.
*/
- def mkWildcardImport(pkg: Symbol): Import = {
- assert(pkg ne null, this)
- val qual = gen.mkAttributedStableRef(pkg)
+ def mkWildcardImport(pkg: Symbol): Import =
+ mkImportFromSelector(pkg, ImportSelector.wildList)
+
+ /** Builds a fully attributed, synthetic import node.
+ * import `qualSym`.{`name` => `toName`}
+ */
+ def mkImport(qualSym: Symbol, name: Name, toName: Name): Import =
+ mkImportFromSelector(qualSym, ImportSelector(name, 0, toName, 0) :: Nil)
+
+ private def mkImportFromSelector(qualSym: Symbol, selector: List[ImportSelector]): Import = {
+ assert(qualSym ne null, this)
+ val qual = gen.mkAttributedStableRef(qualSym)
val importSym = (
NoSymbol
newImport NoPosition
setFlag SYNTHETIC
- setInfo analyzer.ImportType(qual)
+ setInfo ImportType(qual)
)
val importTree = (
- Import(qual, ImportSelector.wildList)
+ Import(qual, selector)
setSymbol importSym
setType NoType
)
@@ -52,120 +48,23 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL {
}
// wrap the given expression in a SoftReference so it can be gc-ed
- def mkSoftRef(expr: Tree): Tree = atPos(expr.pos)(New(SoftReferenceClass.tpe, expr))
-
- // annotate the expression with @unchecked
- def mkUnchecked(expr: Tree): Tree = atPos(expr.pos) {
- // This can't be "Annotated(New(UncheckedClass), expr)" because annotations
- // are very picky about things and it crashes the compiler with "unexpected new".
- Annotated(New(scalaDot(UncheckedClass.name), ListOfNil), expr)
- }
- // if it's a Match, mark the selector unchecked; otherwise nothing.
- def mkUncheckedMatch(tree: Tree) = tree match {
- case Match(selector, cases) => atPos(tree.pos)(Match(mkUnchecked(selector), cases))
- case _ => tree
- }
-
- def mkSynthSwitchSelector(expr: Tree): Tree = atPos(expr.pos) {
- // This can't be "Annotated(New(SwitchClass), expr)" because annotations
- // are very picky about things and it crashes the compiler with "unexpected new".
- Annotated(Ident(nme.synthSwitch), expr)
- }
-
- // TODO: would be so much nicer if we would know during match-translation (i.e., type checking)
- // whether we should emit missingCase-style apply (and isDefinedAt), instead of transforming trees post-factum
- class MatchMatcher {
- def caseMatch(orig: Tree, selector: Tree, cases: List[CaseDef], wrap: Tree => Tree): Tree = unknownTree(orig)
- def caseVirtualizedMatch(orig: Tree, _match: Tree, targs: List[Tree], scrut: Tree, matcher: Tree): Tree = unknownTree(orig)
- def caseVirtualizedMatchOpt(orig: Tree, prologue: List[Tree], cases: List[Tree], matchEndDef: Tree, wrap: Tree => Tree): Tree = unknownTree(orig)
-
- def genVirtualizedMatch(prologue: List[Tree], cases: List[Tree], matchEndDef: Tree): Tree = Block(prologue ++ cases, matchEndDef)
-
- def apply(matchExpr: Tree): Tree = matchExpr match {
- // old-style match or virtpatmat switch
- case Match(selector, cases) => // println("simple match: "+ (selector, cases) + "for:\n"+ matchExpr )
- caseMatch(matchExpr, selector, cases, identity)
- // old-style match or virtpatmat switch
- case Block((vd: ValDef) :: Nil, orig@Match(selector, cases)) => // println("block match: "+ (selector, cases, vd) + "for:\n"+ matchExpr )
- caseMatch(matchExpr, selector, cases, m => copyBlock(matchExpr, List(vd), m))
- // virtpatmat
- case Apply(Apply(TypeApply(Select(tgt, nme.runOrElse), targs), List(scrut)), List(matcher)) if opt.virtPatmat => // println("virt match: "+ (tgt, targs, scrut, matcher) + "for:\n"+ matchExpr )
- caseVirtualizedMatch(matchExpr, tgt, targs, scrut, matcher)
- // optimized version of virtpatmat
- case Block(stats, matchEndDef) if opt.virtPatmat && (stats forall treeInfo.hasSynthCaseSymbol) =>
- // the assumption is once we encounter a case, the remainder of the block will consist of cases
- // the prologue may be empty, usually it is the valdef that stores the scrut
- val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef])
- caseVirtualizedMatchOpt(matchExpr, prologue, cases, matchEndDef, identity)
- // optimized version of virtpatmat
- case Block(outerStats, orig@Block(stats, matchEndDef)) if opt.virtPatmat && (stats forall treeInfo.hasSynthCaseSymbol) =>
- val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef])
- caseVirtualizedMatchOpt(matchExpr, prologue, cases, matchEndDef, m => copyBlock(matchExpr, outerStats, m))
- case other =>
- unknownTree(other)
- }
-
- def unknownTree(t: Tree): Tree = throw new MatchError(t)
- def copyBlock(orig: Tree, stats: List[Tree], expr: Tree): Block = Block(stats, expr)
-
- def dropSyntheticCatchAll(cases: List[CaseDef]): List[CaseDef] =
- if (!opt.virtPatmat) cases
- else cases filter {
- case CaseDef(pat, EmptyTree, Throw(Apply(Select(New(exTpt), nme.CONSTRUCTOR), _))) if (treeInfo.isWildcardArg(pat) && (exTpt.tpe.typeSymbol eq MatchErrorClass)) => false
- case CaseDef(pat, guard, body) => true
- }
- }
-
- def mkCached(cvar: Symbol, expr: Tree): Tree = {
- val cvarRef = mkUnattributedRef(cvar)
- Block(
- List(
- If(Apply(Select(cvarRef, nme.eq), List(Literal(Constant(null)))),
- Assign(cvarRef, expr),
- EmptyTree)),
- cvarRef
- )
+ def mkSoftRef(expr: Tree): Tree = atPos(expr.pos) {
+ val constructor = SoftReferenceClass.info.nonPrivateMember(nme.CONSTRUCTOR).suchThat(_.paramss.flatten.size == 1)
+ NewFromConstructor(constructor, expr)
}
// Builds a tree of the form "{ lhs = rhs ; lhs }"
def mkAssignAndReturn(lhs: Symbol, rhs: Tree): Tree = {
- val lhsRef = mkUnattributedRef(lhs)
+ def lhsRef = if (lhs.owner.isClass) Select(This(lhs.owner), lhs) else Ident(lhs)
Block(Assign(lhsRef, rhs) :: Nil, lhsRef)
}
- def mkModuleVarDef(accessor: Symbol) = {
- val inClass = accessor.owner.isClass
- val extraFlags = if (inClass) PrivateLocal | SYNTHETIC else 0
-
- val mval = (
- accessor.owner.newVariable(nme.moduleVarName(accessor.name), accessor.pos.focus, MODULEVAR | extraFlags)
- setInfo accessor.tpe.finalResultType
- addAnnotation VolatileAttr
- )
- if (inClass)
- mval.owner.info.decls enter mval
-
- ValDef(mval)
- }
-
- // def m: T = { if (m$ eq null) m$ = new m$class(...) m$ }
- // where (...) are eventual outer accessors
- def mkCachedModuleAccessDef(accessor: Symbol, mvar: Symbol) =
- DefDef(accessor, mkCached(mvar, newModule(accessor, mvar.tpe)))
-
- def mkModuleAccessDef(accessor: Symbol, msym: Symbol) =
- DefDef(accessor, Select(This(msym.owner), msym))
-
def newModule(accessor: Symbol, tpe: Type) = {
val ps = tpe.typeSymbol.primaryConstructor.info.paramTypes
if (ps.isEmpty) New(tpe)
else New(tpe, This(accessor.owner.enclClass))
}
- // def m: T;
- def mkModuleAccessDcl(accessor: Symbol) =
- DefDef(accessor setFlag lateDEFERRED, EmptyTree)
-
def mkRuntimeCall(meth: Name, args: List[Tree]): Tree =
mkRuntimeCall(meth, Nil, args)
@@ -206,7 +105,6 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL {
else AppliedTypeTree(Ident(clazz), targs map TypeTree)
))
}
- def mkSuperSelect = Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR)
def wildcardStar(tree: Tree) =
atPos(tree.pos) { Typed(tree, Ident(tpnme.WILDCARD_STAR)) }
@@ -244,7 +142,7 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL {
* x.asInstanceOf[`pt`]() if after uncurry but before erasure
* x.$asInstanceOf[`pt`]() if at or after erasure
*/
- def mkCast(tree: Tree, pt: Type): Tree = {
+ override def mkCast(tree: Tree, pt: Type): Tree = {
debuglog("casting " + tree + ":" + tree.tpe + " to " + pt + " at phase: " + phase)
assert(!tree.tpe.isInstanceOf[MethodType], tree)
assert(pt eq pt.normalize, tree +" : "+ debugString(pt) +" ~>"+ debugString(pt.normalize))
@@ -267,25 +165,6 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL {
else
mkCast(tree, pt)
- def mkZeroContravariantAfterTyper(tp: Type): Tree = {
- // contravariant -- for replacing an argument in a method call
- // must use subtyping, as otherwise we miss types like `Any with Int`
- val tree =
- if (NullClass.tpe <:< tp) Literal(Constant(null))
- else if (UnitClass.tpe <:< tp) Literal(Constant())
- else if (BooleanClass.tpe <:< tp) Literal(Constant(false))
- else if (FloatClass.tpe <:< tp) Literal(Constant(0.0f))
- else if (DoubleClass.tpe <:< tp) Literal(Constant(0.0d))
- else if (ByteClass.tpe <:< tp) Literal(Constant(0.toByte))
- else if (ShortClass.tpe <:< tp) Literal(Constant(0.toShort))
- else if (IntClass.tpe <:< tp) Literal(Constant(0))
- else if (LongClass.tpe <:< tp) Literal(Constant(0L))
- else if (CharClass.tpe <:< tp) Literal(Constant(0.toChar))
- else mkCast(Literal(Constant(null)), tp)
-
- tree
- }
-
/** Translate names in Select/Ident nodes to type names.
*/
def convertToTypeName(tree: Tree): Option[RefTree] = tree match {
@@ -307,7 +186,7 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL {
*/
private def mkPackedValDef(expr: Tree, owner: Symbol, name: Name): (ValDef, () => Ident) = {
val packedType = typer.packedType(expr, owner)
- val sym = owner.newValue(name, expr.pos.makeTransparent, SYNTHETIC) setInfo packedType
+ val sym = owner.newValue(name.toTermName, expr.pos.makeTransparent, SYNTHETIC) setInfo packedType
(ValDef(sym, expr), () => Ident(sym) setPos sym.pos.focus setType expr.tpe)
}
@@ -368,4 +247,53 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL {
attrThis,
If(cond, Block(syncBody: _*), EmptyTree)) ::
stats: _*)
+
+ /** Creates a tree representing new Object { stats }.
+ * To make sure an anonymous subclass of Object is created,
+ * if there are no stats, a () is added.
+ */
+ def mkAnonymousNew(stats: List[Tree]): Tree = {
+ val stats1 = if (stats.isEmpty) List(Literal(Constant(()))) else stats
+ mkNew(Nil, noSelfType, stats1, NoPosition, NoPosition)
+ }
+
+ /**
+ * Create a method based on a Function
+ *
+ * Used both to under `-Ydelambdafy:method` create a lifted function and
+ * under `-Ydelamdafy:inline` to create the apply method on the anonymous
+ * class.
+ *
+ * It creates a method definition with value params cloned from the
+ * original lambda. Then it calls a supplied function to create
+ * the body and types the result. Finally
+ * everything is wrapped up in a DefDef
+ *
+ * @param owner The owner for the new method
+ * @param name name for the new method
+ * @param additionalFlags flags to be put on the method in addition to FINAL
+ */
+ def mkMethodFromFunction(localTyper: analyzer.Typer)
+ (fun: Function, owner: Symbol, name: TermName, additionalFlags: FlagSet = NoFlags) = {
+ val funParams = fun.vparams map (_.symbol)
+ val formals :+ restpe = fun.tpe.typeArgs
+
+ val methSym = owner.newMethod(name, fun.pos, FINAL | additionalFlags)
+
+ val paramSyms = map2(formals, fun.vparams) {
+ (tp, vparam) => methSym.newSyntheticValueParam(tp, vparam.name)
+ }
+
+ methSym setInfo MethodType(paramSyms, restpe.deconst)
+
+ fun.body.substituteSymbols(funParams, paramSyms)
+ fun.body changeOwner (fun.symbol -> methSym)
+
+ val methDef = DefDef(methSym, fun.body)
+
+ // Have to repack the type to avoid mismatches when existentials
+ // appear in the result - see SI-4869.
+ methDef.tpt setType localTyper.packedType(fun.body, methSym).deconst
+ methDef
+ }
}
diff --git a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
index 1005cd1ccf..689e6405d0 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
@@ -6,10 +6,6 @@
package scala.tools.nsc
package ast
-import scala.reflect.internal.HasFlags
-import scala.reflect.internal.Flags._
-import symtab._
-
/** This class ...
*
* @author Martin Odersky
@@ -18,8 +14,65 @@ import symtab._
abstract class TreeInfo extends scala.reflect.internal.TreeInfo {
val global: Global
import global._
+ import definitions._
+
+ // arg1.op(arg2) returns (arg1, op.symbol, arg2)
+ object BinaryOp {
+ def unapply(t: Tree): Option[(Tree, Symbol, Tree)] = t match {
+ case Apply(sel @ Select(arg1, _), arg2 :: Nil) => Some((arg1, sel.symbol, arg2))
+ case _ => None
+ }
+ }
+ // recv.op[T1, ...] returns (recv, op.symbol, type argument types)
+ object TypeApplyOp {
+ def unapply(t: Tree): Option[(Tree, Symbol, List[Type])] = t match {
+ case TypeApply(sel @ Select(recv, _), targs) => Some((recv, sel.symbol, targs map (_.tpe)))
+ case _ => None
+ }
+ }
+
+ // x.asInstanceOf[T] returns (x, typeOf[T])
+ object AsInstanceOf {
+ def unapply(t: Tree): Option[(Tree, Type)] = t match {
+ case Apply(TypeApplyOp(recv, Object_asInstanceOf, tpe :: Nil), Nil) => Some((recv, tpe))
+ case _ => None
+ }
+ }
- import definitions.ThrowableClass
+ // Extractors for value classes.
+ object ValueClass {
+ def isValueClass(tpe: Type) = enteringErasure(tpe.typeSymbol.isDerivedValueClass)
+ def valueUnbox(tpe: Type) = enteringErasure(tpe.typeSymbol.derivedValueClassUnbox)
+
+ // B.unbox. Returns B.
+ object Unbox {
+ def unapply(t: Tree): Option[Tree] = t match {
+ case Apply(sel @ Select(ref, _), Nil) if valueUnbox(ref.tpe) == sel.symbol => Some(ref)
+ case _ => None
+ }
+ }
+ // new B(v). Returns B and v.
+ object Box {
+ def unapply(t: Tree): Option[(Tree, Type)] = t match {
+ case Apply(sel @ Select(New(tpt), nme.CONSTRUCTOR), v :: Nil) => Some((v, tpt.tpe.finalResultType))
+ case _ => None
+ }
+ }
+ // (new B(v)).unbox. returns v.
+ object BoxAndUnbox {
+ def unapply(t: Tree): Option[Tree] = t match {
+ case Unbox(Box(v, tpe)) if isValueClass(tpe) => Some(v)
+ case _ => None
+ }
+ }
+ // new B(v1) op new B(v2) where op is == or !=. Returns v1, op, v2.
+ object BoxAndCompare {
+ def unapply(t: Tree): Option[(Tree, Symbol, Tree)] = t match {
+ case BinaryOp(Box(v1, tpe1), op @ (Object_== | Object_!=), Box(v2, tpe2)) if isValueClass(tpe1) && tpe1 =:= tpe2 => Some((v1, op, v2))
+ case _ => None
+ }
+ }
+ }
// TODO these overrides, and the slow trickle of bugs that they solve (e.g. SI-8479),
// suggest that we should pursue an alternative design in which the DocDef nodes
@@ -44,15 +97,4 @@ abstract class TreeInfo extends scala.reflect.internal.TreeInfo {
case DocDef(_, definition) => isPureDef(definition)
case _ => super.isPureDef(tree)
}
-
- /** Does list of trees start with a definition of
- * a class of module with given name (ignoring imports)
- */
- override def firstDefinesClassOrObject(trees: List[Tree], name: Name): Boolean = trees match {
- case ClassDef(_, `name`, _, _) :: Nil => true
- case _ => super.firstDefinesClassOrObject(trees, name)
- }
-
- def isInterface(mods: HasFlags, body: List[Tree]) =
- mods.isTrait && (body forall isInterfaceMember)
}
diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala
index 0a12737572..3652f51153 100644
--- a/src/compiler/scala/tools/nsc/ast/Trees.scala
+++ b/src/compiler/scala/tools/nsc/ast/Trees.scala
@@ -6,6 +6,7 @@
package scala.tools.nsc
package ast
+import scala.reflect.ClassTag
import scala.reflect.internal.Flags.BYNAMEPARAM
import scala.reflect.internal.Flags.DEFAULTPARAM
import scala.reflect.internal.Flags.IMPLICIT
@@ -16,24 +17,6 @@ import scala.reflect.internal.Flags.TRAIT
import scala.compat.Platform.EOL
trait Trees extends scala.reflect.internal.Trees { self: Global =>
-
- def treeLine(t: Tree): String =
- if (t.pos.isDefined && t.pos.isRange) t.pos.lineContent.drop(t.pos.column - 1).take(t.pos.end - t.pos.start + 1)
- else t.summaryString
-
- def treeStatus(t: Tree, enclosingTree: Tree = null) = {
- val parent = if (enclosingTree eq null) " " else " P#%5s".format(enclosingTree.id)
-
- "[L%4s%8s] #%-6s %-15s %-10s // %s".format(t.pos.safeLine, parent, t.id, t.pos.show, t.shortClass, treeLine(t))
- }
- def treeSymStatus(t: Tree) = {
- val line = if (t.pos.isDefined) "line %-4s".format(t.pos.safeLine) else " "
- "#%-5s %s %-10s // %s".format(t.id, line, t.shortClass,
- if (t.symbol ne NoSymbol) "(" + t.symbol.fullLocationString + ")"
- else treeLine(t)
- )
- }
-
// --- additional cases --------------------------------------------------------
/** Only used during parsing */
case class Parens(args: List[Tree]) extends Tree
@@ -65,69 +48,11 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
// --- factory methods ----------------------------------------------------------
- /** Generates a template with constructor corresponding to
- *
- * constrmods (vparams1_) ... (vparams_n) preSuper { presupers }
- * extends superclass(args_1) ... (args_n) with mixins { self => body }
- *
- * This gets translated to
- *
- * extends superclass with mixins { self =>
- * presupers' // presupers without rhs
- * vparamss // abstract fields corresponding to value parameters
- * def <init>(vparamss) {
- * presupers
- * super.<init>(args)
- * }
- * body
- * }
+ /** Factory method for a primary constructor super call `super.<init>(args_1)...(args_n)`
*/
- def Template(parents: List[Tree], self: ValDef, constrMods: Modifiers, vparamss: List[List[ValDef]], argss: List[List[Tree]], body: List[Tree], superPos: Position): Template = {
- /* Add constructor to template */
-
- // create parameters for <init> as synthetic trees.
- var vparamss1 = mmap(vparamss) { vd =>
- atPos(vd.pos.focus) {
- val mods = Modifiers(vd.mods.flags & (IMPLICIT | DEFAULTPARAM | BYNAMEPARAM) | PARAM | PARAMACCESSOR)
- ValDef(mods withAnnotations vd.mods.annotations, vd.name, vd.tpt.duplicate, vd.rhs.duplicate)
- }
- }
- val (edefs, rest) = body span treeInfo.isEarlyDef
- val (evdefs, etdefs) = edefs partition treeInfo.isEarlyValDef
- val gvdefs = evdefs map {
- case vdef @ ValDef(_, _, tpt, _) =>
- copyValDef(vdef)(
- // atPos for the new tpt is necessary, since the original tpt might have no position
- // (when missing type annotation for ValDef for example), so even though setOriginal modifies the
- // position of TypeTree, it would still be NoPosition. That's what the author meant.
- tpt = atPos(vdef.pos.focus)(TypeTree() setOriginal tpt setPos tpt.pos.focus),
- rhs = EmptyTree
- )
- }
- val lvdefs = evdefs collect { case vdef: ValDef => copyValDef(vdef)(mods = vdef.mods | PRESUPER) }
-
- val constrs = {
- if (constrMods hasFlag TRAIT) {
- if (body forall treeInfo.isInterfaceMember) List()
- else List(
- atPos(wrappingPos(superPos, lvdefs)) (
- DefDef(NoMods, nme.MIXIN_CONSTRUCTOR, List(), ListOfNil, TypeTree(), Block(lvdefs, Literal(Constant())))))
- } else {
- // convert (implicit ... ) to ()(implicit ... ) if its the only parameter section
- if (vparamss1.isEmpty || !vparamss1.head.isEmpty && vparamss1.head.head.mods.isImplicit)
- vparamss1 = List() :: vparamss1;
- val superRef: Tree = atPos(superPos)(gen.mkSuperSelect)
- val superCall = (superRef /: argss) (Apply.apply)
- List(
- atPos(wrappingPos(superPos, lvdefs ::: argss.flatten)) (
- DefDef(constrMods, nme.CONSTRUCTOR, List(), vparamss1, TypeTree(), Block(lvdefs ::: List(superCall), Literal(Constant())))))
- }
- }
- constrs foreach (ensureNonOverlapping(_, parents ::: gvdefs, focus=false))
- // Field definitions for the class - remove defaults.
- val fieldDefs = vparamss.flatten map (vd => copyValDef(vd)(mods = vd.mods &~ DEFAULTPARAM, rhs = EmptyTree))
-
- Template(parents, self, gvdefs ::: fieldDefs ::: constrs ::: etdefs ::: rest)
+ def PrimarySuperCall(argss: List[List[Tree]]): Tree = argss match {
+ case Nil => Apply(gen.mkSuperInitCall, Nil)
+ case xs :: rest => rest.foldLeft(Apply(gen.mkSuperInitCall, xs): Tree)(Apply.apply)
}
/** Construct class definition with given class symbol, value parameters,
@@ -137,21 +62,17 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
* @param constrMods the modifiers for the class constructor, i.e. as in `class C private (...)`
* @param vparamss the value parameters -- if they have symbols they
* should be owned by `sym`
- * @param argss the supercall arguments
* @param body the template statements without primary constructor
* and value parameter fields.
*/
- def ClassDef(sym: Symbol, constrMods: Modifiers, vparamss: List[List[ValDef]], argss: List[List[Tree]], body: List[Tree], superPos: Position): ClassDef = {
+ def ClassDef(sym: Symbol, constrMods: Modifiers, vparamss: List[List[ValDef]], body: List[Tree], superPos: Position): ClassDef = {
// "if they have symbols they should be owned by `sym`"
- assert(
- mforall(vparamss)(p => (p.symbol eq NoSymbol) || (p.symbol.owner == sym)),
- ((mmap(vparamss)(_.symbol), sym))
- )
+ assert(mforall(vparamss)(_.symbol.owner == sym), (mmap(vparamss)(_.symbol), sym))
ClassDef(sym,
- Template(sym.info.parents map TypeTree,
- if (sym.thisSym == sym || phase.erasedTypes) emptyValDef else ValDef(sym.thisSym),
- constrMods, vparamss, argss, body, superPos))
+ gen.mkTemplate(sym.info.parents map TypeTree,
+ if (sym.thisSym == sym || phase.erasedTypes) noSelfType else ValDef(sym.thisSym),
+ constrMods, vparamss, body, superPos))
}
// --- subcomponents --------------------------------------------------
@@ -160,8 +81,6 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
val global: Trees.this.type = self
} with TreeInfo
- lazy val treePrinter = newTreePrinter()
-
// --- additional cases in operations ----------------------------------
override protected def xtraverse(traverser: Traverser, tree: Tree): Unit = tree match {
@@ -184,6 +103,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
def InjectDerivedValue(tree: Tree, arg: Tree): InjectDerivedValue
def TypeTreeWithDeferredRefCheck(tree: Tree): TypeTreeWithDeferredRefCheck
}
+ implicit val TreeCopierTag: ClassTag[TreeCopier] = ClassTag[TreeCopier](classOf[TreeCopier])
def newStrictTreeCopier: TreeCopier = new StrictTreeCopier
def newLazyTreeCopier: TreeCopier = new LazyTreeCopier
@@ -227,7 +147,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
try unit.body = transform(unit.body)
catch {
case ex: Exception =>
- println(supplementErrorMessage("unhandled exception while transforming "+unit))
+ log(supplementErrorMessage("unhandled exception while transforming "+unit))
throw ex
}
}
@@ -258,14 +178,34 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
}
}
- /** resets symbol and tpe fields in a tree, @see ResetAttrs
- */
-// def resetAllAttrs[A<:Tree](x:A): A = { new ResetAttrsTraverser().traverse(x); x }
-// def resetLocalAttrs[A<:Tree](x:A): A = { new ResetLocalAttrsTraverser().traverse(x); x }
-
- def resetAllAttrs(x: Tree, leaveAlone: Tree => Boolean = null): Tree = new ResetAttrs(false, leaveAlone).transform(x)
- def resetLocalAttrs(x: Tree, leaveAlone: Tree => Boolean = null): Tree = new ResetAttrs(true, leaveAlone).transform(x)
- def resetLocalAttrsKeepLabels(x: Tree, leaveAlone: Tree => Boolean = null): Tree = new ResetAttrs(true, leaveAlone, true).transform(x)
+ // Finally, noone resetAllAttrs it anymore, so I'm removing it from the compiler.
+ // Even though it's with great pleasure I'm doing that, I'll leave its body here to warn future generations about what happened in the past.
+ //
+ // So what actually happened in the past is that we used to have two flavors of resetAttrs: resetAllAttrs and resetLocalAttrs.
+ // resetAllAttrs destroyed all symbols and types in the tree in order to reset its state to something suitable for retypechecking
+ // and/or embedding into bigger trees / different lexical scopes. (Btw here's some background on why people would want to use
+ // reset attrs in the first place: https://groups.google.com/forum/#!topic/scala-internals/TtCTPlj_qcQ).
+ //
+ // However resetAllAttrs was more of a poison than of a treatment, because along with locally defined symbols that are the cause
+ // for almost every or maybe even every case of tree corruption, it erased external bindings that sometimes could not be restored.
+ // This is how we came up with resetLocalAttrs that left external bindings alone, and that was a big step forward.
+ // Then slowly but steadily we've evicted all usages of resetAllAttrs from our codebase in favor of resetLocalAttrs
+ // and have been living happily ever after.
+ //
+ // def resetAllAttrs(x: Tree, leaveAlone: Tree => Boolean = null): Tree = new ResetAttrs(localOnly = false, leaveAlone).transform(x)
+
+ // upd. Unfortunately this didn't work out quite as we expected. The last two users of resetAllAttrs:
+ // reification and typedLabelDef broke in very weird ways when we replaced resetAllAttrs with resetLocalAttrs
+ // (see SI-8316 change from resetAllAttrs to resetLocalAttrs in reifiers broke Slick and
+ // SI-8318 NPE in mixin in scala-continuations for more information).
+ // Given that we're supposed to release 2.11.0-RC1 in less than a week, I'm temporarily reinstating resetAllAttrs
+ // until we have time to better understand what's going on. In order to dissuade people from using it,
+ // it now comes with a new, ridiculous name.
+ /** @see ResetAttrs */
+ def brutallyResetAttrs(x: Tree, leaveAlone: Tree => Boolean = null): Tree = new ResetAttrs(brutally = true, leaveAlone).transform(x)
+
+ /** @see ResetAttrs */
+ def resetAttrs(x: Tree): Tree = new ResetAttrs(brutally = false, leaveAlone = null).transform(x)
/** A transformer which resets symbol and tpe fields of all nodes in a given tree,
* with special treatment of:
@@ -276,8 +216,10 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
*
* (bq:) This transformer has mutable state and should be discarded after use
*/
- private class ResetAttrs(localOnly: Boolean, leaveAlone: Tree => Boolean = null, keepLabels: Boolean = false) {
- val debug = settings.debug.value
+ private class ResetAttrs(brutally: Boolean, leaveAlone: Tree => Boolean) {
+ // this used to be based on -Ydebug, but the need for logging in this code is so situational
+ // that I've reverted to a hard-coded constant here.
+ val debug = false
val trace = scala.tools.nsc.util.trace when debug
val locals = util.HashSet[Symbol](8)
@@ -298,6 +240,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
registerLocal(sym.moduleClass)
registerLocal(sym.companionClass)
registerLocal(sym.companionModule)
+ registerLocal(sym.deSkolemize)
sym match {
case sym: TermSymbol => registerLocal(sym.referenced)
case _ => ;
@@ -324,6 +267,8 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
else
super.transform {
tree match {
+ case tree if !tree.canHaveAttrs =>
+ tree
case tpt: TypeTree =>
if (tpt.original != null)
transform(tpt.original)
@@ -331,9 +276,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
val refersToLocalSymbols = tpt.tpe != null && (tpt.tpe exists (tp => locals contains tp.typeSymbol))
val isInferred = tpt.wasEmpty
if (refersToLocalSymbols || isInferred) {
- val dupl = tpt.duplicate
- dupl.tpe = null
- dupl
+ tpt.duplicate.clearType()
} else {
tpt
}
@@ -358,42 +301,29 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
// vetoXXX local variables declared below describe the conditions under which we cannot erase symbols.
//
// The first reason to not erase symbols is the threat of non-idempotency (SI-5464).
- // Here we take care of labels (SI-5562) and references to package classes (SI-5705).
+ // Here we take care of references to package classes (SI-5705).
// There are other non-idempotencies, but they are not worked around yet.
//
- // The second reason has to do with the fact that resetAttrs itself has limited usefulness.
- //
- // First of all, why do we need resetAttrs? Gor one, it's absolutely required to move trees around.
- // One cannot just take a typed tree from one lexical context and transplant it somewhere else.
- // Most likely symbols defined by those trees will become borked and the compiler will blow up (SI-5797).
- // To work around we just erase all symbols and types and then hope that we'll be able to correctly retypecheck.
- // For ones who're not affected by scalac Stockholm syndrome, this might seem to be an extremely naive fix, but well...
- //
- // Of course, sometimes erasing everything won't work, because if a given identifier got resolved to something
- // in one lexical scope, it can get resolved to something else.
- //
- // What do we do in these cases? Enter the workaround for the workaround: resetLocalAttrs, which only destroys
- // locally defined symbols, but doesn't touch references to stuff declared outside of a given tree.
- // That's what localOnly and vetoScope are for.
+ // The second reason has to do with the fact that resetAttrs needs to be less destructive.
+ // Erasing locally-defined symbols is useful to prevent tree corruption, but erasing external bindings is not,
+ // therefore we want to retain those bindings, especially given that restoring them can be impossible
+ // if we move these trees into lexical contexts different from their original locations.
if (dupl.hasSymbol) {
val sym = dupl.symbol
- val vetoScope = localOnly && !(locals contains sym)
- val vetoLabel = keepLabels && sym.isLabel
+ val vetoScope = !brutally && !(locals contains sym) && !(locals contains sym.deSkolemize)
val vetoThis = dupl.isInstanceOf[This] && sym.isPackageClass
- if (!(vetoScope || vetoLabel || vetoThis)) dupl.symbol = NoSymbol
+ if (!(vetoScope || vetoThis)) dupl.symbol = NoSymbol
}
- dupl.tpe = null
- dupl
+ dupl.clearType()
}
}
}
}
def transform(x: Tree): Tree = {
- if (localOnly)
new MarkLocals().traverse(x)
- if (localOnly && debug) {
+ if (debug) {
assert(locals.size == orderedLocals.size)
val msg = orderedLocals.toList filter {_ != NoSymbol} map {" " + _} mkString EOL
trace("locals (%d total): %n".format(orderedLocals.size))(msg)
diff --git a/src/compiler/scala/tools/nsc/ast/parser/CommonTokens.scala b/src/compiler/scala/tools/nsc/ast/parser/CommonTokens.scala
new file mode 100644
index 0000000000..5fcb02814b
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/ast/parser/CommonTokens.scala
@@ -0,0 +1,112 @@
+package scala.tools.nsc
+package ast.parser
+
+/** Common code between Scala's Tokens and JavaTokens. */
+abstract class CommonTokens {
+
+ def isIdentifier(code: Int): Boolean
+ def isLiteral(code: Int): Boolean
+
+ /** special tokens */
+ final val EMPTY = -3
+ final val UNDEF = -2
+ final val ERROR = -1
+ final val EOF = 0
+
+ /** literals */
+ final val CHARLIT = 1
+ final val INTLIT = 2
+ final val LONGLIT = 3
+ final val FLOATLIT = 4
+ final val DOUBLELIT = 5
+ final val STRINGLIT = 6
+
+ /** keywords */
+ final val NEW = 20
+ final val THIS = 21
+ final val SUPER = 23
+
+ final val NULL = 24
+ final val TRUE = 25
+ final val FALSE = 26
+
+ // J: INSTANCEOF = 27
+ // J: CONST = 28
+
+ /** modifiers */
+ // S: IMPLICIT = 40
+ // S: OVERRIDE = 41
+ // J: PUBLIC = 42
+ final val PROTECTED = 43
+ final val PRIVATE = 44
+ // S: SEALED = 45
+ final val ABSTRACT = 46
+ // J: DEFAULT = 47
+ // J: STATIC = 48
+ final val FINAL = 49
+ // J: TRANSIENT = 50
+ // J: VOLATILE = 51
+ // J: SYNCHRONIZED = 52
+ // J: NATIVE = 53
+ // J: STRICTFP = 54
+ // S: LAZY = 55
+ // J: THROWS = 56
+ // S: MACRO = 57
+
+ /** templates */
+ final val PACKAGE = 60
+ final val IMPORT = 61
+ final val CLASS = 62
+ // S: CASECLASS = 63
+ // S: OBJECT = 64
+ // S: CASEOBJECT = 65
+ // S: TRAIT, J: INTERFACE = 66
+ // J: ENUM = 67
+ final val EXTENDS = 68
+ // S: WITH, J: IMPLEMENTS = 69
+ // S: TYPE = 70
+ // S: FORSOME = 71
+ // S: DEF = 72
+ // S: VAL = 73
+ // S: VAR = 74
+
+ /** control structures */
+ final val IF = 80
+ // S: THEN = 81
+ final val ELSE = 82
+ final val WHILE = 83
+ final val DO = 84
+ final val FOR = 85
+ // S: YIELD = 86
+ // J: BREAK = 87
+ // J: CONTINUE = 88
+ // J: GOTO = 89
+ final val THROW = 90
+ final val TRY = 91
+ final val CATCH = 92
+ final val FINALLY = 93
+ // J: SWITCH = 94
+ // S: MATCH = 95
+ final val CASE = 96
+ final val RETURN = 97
+ // J: ASSERT = 98
+
+ /** parenthesis */
+ final val LPAREN = 100
+ final val RPAREN = 101
+ final val LBRACKET = 102
+ final val RBRACKET = 103
+ final val LBRACE = 104
+ final val RBRACE = 105
+
+ /** special symbols */
+ final val COMMA = 120
+ final val SEMI = 121
+ final val DOT = 122
+ final val COLON = 123
+ final val EQUALS = 124
+ final val AT = 125
+ // S: <special symbols> = 130 - 139
+ // J: <special symbols> = 140 - 179
+ // J: <primitive types> = 180 - 189
+}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
index 553a2088a6..d3f495f280 100755
--- a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
@@ -10,10 +10,7 @@ import scala.collection.mutable
import mutable.{ Buffer, ArrayBuffer, ListBuffer }
import scala.util.control.ControlThrowable
import scala.tools.nsc.util.CharArrayReader
-import scala.reflect.internal.util.SourceFile
-import scala.xml.{ Text, TextBuffer }
-import scala.xml.parsing.MarkupParserCommon
-import scala.xml.Utility.{ isNameStart, isNameChar, isSpace }
+import scala.tools.nsc.ast.parser.xml.{MarkupParserCommon, Utility}
import scala.reflect.internal.Chars.{ SU, LF }
// XXX/Note: many/most of the functions in here are almost direct cut and pastes
@@ -26,12 +23,6 @@ import scala.reflect.internal.Chars.{ SU, LF }
// I rewrote most of these, but not as yet the library versions: so if you are
// tempted to touch any of these, please be aware of that situation and try not
// to let it get any worse. -- paulp
-
-/** This trait ...
- *
- * @author Burak Emir
- * @version 1.0
- */
trait MarkupParsers {
self: Parsers =>
@@ -50,8 +41,8 @@ trait MarkupParsers {
import global._
class MarkupParser(parser: SourceFileParser, final val preserveWS: Boolean) extends MarkupParserCommon {
-
- import Tokens.{ EMPTY, LBRACE, RBRACE }
+ import Utility.{ isNameStart, isSpace }
+ import Tokens.{ LBRACE, RBRACE }
type PositionType = Position
type InputType = CharArrayReader
@@ -89,7 +80,7 @@ trait MarkupParsers {
var xEmbeddedBlock = false
- private var debugLastStartElement = new mutable.Stack[(Int, String)]
+ private val debugLastStartElement = new mutable.Stack[(Int, String)]
private def debugLastPos = debugLastStartElement.top._1
private def debugLastElem = debugLastStartElement.top._2
@@ -107,7 +98,7 @@ trait MarkupParsers {
*/
def xCheckEmbeddedBlock: Boolean = {
// attentions, side-effect, used in xText
- xEmbeddedBlock = (ch == '{') && { nextch; (ch != '{') }
+ xEmbeddedBlock = (ch == '{') && { nextch(); (ch != '{') }
xEmbeddedBlock
}
@@ -123,8 +114,7 @@ trait MarkupParsers {
while (isNameStart(ch)) {
val start = curOffset
val key = xName
- xEQ
- val delim = ch
+ xEQ()
val mid = curOffset
val value: Tree = ch match {
case '"' | '\'' =>
@@ -137,7 +127,7 @@ trait MarkupParsers {
}
case '{' =>
- nextch
+ nextch()
xEmbeddedExpr
case SU =>
throw TruncatedXMLControl
@@ -150,7 +140,7 @@ trait MarkupParsers {
aMap(key) = value
if (ch != '/' && ch != '>')
- xSpace
+ xSpace()
}
aMap
}
@@ -181,22 +171,31 @@ trait MarkupParsers {
xTakeUntil(handle.comment, () => r2p(start, start, curOffset), "-->")
}
- def appendText(pos: Position, ts: Buffer[Tree], txt: String) {
- val toAppend =
- if (preserveWS) Seq(txt)
- else TextBuffer.fromString(txt).toText map (_.text)
+ def appendText(pos: Position, ts: Buffer[Tree], txt: String): Unit = {
+ def append(t: String) = ts append handle.text(pos, t)
+
+ if (preserveWS) append(txt)
+ else {
+ val sb = new StringBuilder()
+
+ txt foreach { c =>
+ if (!isSpace(c)) sb append c
+ else if (sb.isEmpty || !isSpace(sb.last)) sb append ' '
+ }
- toAppend foreach (t => ts append handle.text(pos, t))
+ val trimmed = sb.toString.trim
+ if (!trimmed.isEmpty) append(trimmed)
+ }
}
/** adds entity/character to ts as side-effect
* @precond ch == '&'
*/
def content_AMP(ts: ArrayBuffer[Tree]) {
- nextch
+ nextch()
val toAppend = ch match {
case '#' => // CharacterRef
- nextch
+ nextch()
val theChar = handle.text(tmppos, xCharRef)
xToken(';')
theChar
@@ -219,17 +218,14 @@ trait MarkupParsers {
/** Returns true if it encounters an end tag (without consuming it),
* appends trees to ts as side-effect.
- *
- * @param ts ...
- * @return ...
*/
private def content_LT(ts: ArrayBuffer[Tree]): Boolean = {
if (ch == '/')
return true // end tag
val toAppend = ch match {
- case '!' => nextch ; if (ch =='[') xCharData else xComment // CDATA or Comment
- case '?' => nextch ; xProcInstr // PI
+ case '!' => nextch() ; if (ch =='[') xCharData else xComment // CDATA or Comment
+ case '?' => nextch() ; xProcInstr // PI
case _ => element // child node
}
@@ -246,7 +242,7 @@ trait MarkupParsers {
tmppos = o2p(curOffset)
ch match {
// end tag, cdata, comment, pi or child node
- case '<' => nextch ; if (content_LT(ts)) return ts
+ case '<' => nextch() ; if (content_LT(ts)) return ts
// either the character '{' or an embedded scala block }
case '{' => content_BRACE(tmppos, ts) // }
// EntityRef or CharRef
@@ -268,7 +264,7 @@ trait MarkupParsers {
val (qname, attrMap) = xTag(())
if (ch == '/') { // empty element
xToken("/>")
- handle.element(r2p(start, start, curOffset), qname, attrMap, true, new ListBuffer[Tree])
+ handle.element(r2p(start, start, curOffset), qname, attrMap, empty = true, new ListBuffer[Tree])
}
else { // handle content
xToken('>')
@@ -278,11 +274,11 @@ trait MarkupParsers {
debugLastStartElement.push((start, qname))
val ts = content
xEndTag(qname)
- debugLastStartElement.pop
+ debugLastStartElement.pop()
val pos = r2p(start, start, curOffset)
qname match {
case "xml:group" => handle.group(pos, ts)
- case _ => handle.element(pos, qname, attrMap, false, ts)
+ case _ => handle.element(pos, qname, attrMap, empty = false, ts)
}
}
}
@@ -297,12 +293,12 @@ trait MarkupParsers {
while (ch != SU) {
if (ch == '}') {
- if (charComingAfter(nextch) == '}') nextch
+ if (charComingAfter(nextch()) == '}') nextch()
else errorBraces()
}
buf append ch
- nextch
+ nextch()
if (xCheckEmbeddedBlock || ch == '<' || ch == '&')
return done
}
@@ -349,12 +345,12 @@ trait MarkupParsers {
content_LT(ts)
// parse more XML ?
- if (charComingAfter(xSpaceOpt) == '<') {
- xSpaceOpt
+ if (charComingAfter(xSpaceOpt()) == '<') {
+ xSpaceOpt()
while (ch == '<') {
- nextch
+ nextch()
ts append element
- xSpaceOpt
+ xSpaceOpt()
}
handle.makeXMLseq(r2p(start, start, curOffset), ts)
}
@@ -375,7 +371,7 @@ trait MarkupParsers {
saving[Boolean, Tree](handle.isPattern, handle.isPattern = _) {
handle.isPattern = true
val tree = xPattern
- xSpaceOpt
+ xSpaceOpt()
tree
}
},
@@ -410,13 +406,13 @@ trait MarkupParsers {
* | Name [S] '/' '>'
*/
def xPattern: Tree = {
- var start = curOffset
+ val start = curOffset
val qname = xName
debugLastStartElement.push((start, qname))
- xSpaceOpt
+ xSpaceOpt()
val ts = new ArrayBuffer[Tree]
- val isEmptyTag = (ch == '/') && { nextch ; true }
+ val isEmptyTag = (ch == '/') && { nextch() ; true }
xToken('>')
if (!isEmptyTag) {
@@ -426,13 +422,13 @@ trait MarkupParsers {
if (xEmbeddedBlock) ts ++= xScalaPatterns
else ch match {
case '<' => // tag
- nextch
+ nextch()
if (ch != '/') ts append xPattern // child
else return false // terminate
case '{' => // embedded Scala patterns
while (ch == '{') {
- nextch
+ nextch()
ts ++= xScalaPatterns
}
assert(!xEmbeddedBlock, "problem with embedded block")
@@ -450,7 +446,7 @@ trait MarkupParsers {
while (doPattern) { } // call until false
xEndTag(qname)
- debugLastStartElement.pop
+ debugLastStartElement.pop()
}
handle.makeXMLpat(r2p(start, start, curOffset), qname, ts)
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index b9e4109623..ffc45b21ea 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -9,12 +9,12 @@
package scala.tools.nsc
package ast.parser
-import scala.collection.mutable.{ListBuffer, StringBuilder}
-import scala.reflect.internal.{ ModifierFlags => Flags }
+import scala.collection.{ mutable, immutable }
+import mutable.{ ListBuffer, StringBuilder }
+import scala.reflect.internal.{ Precedence, ModifierFlags => Flags }
import scala.reflect.internal.Chars.{ isScalaLetter }
-import scala.reflect.internal.util.{ SourceFile, OffsetPosition }
+import scala.reflect.internal.util.{ SourceFile, Position, FreshNameCreator }
import Tokens._
-import util.FreshNameCreator
/** Historical note: JavaParsers started life as a direct copy of Parsers
* but at a time when that Parsers had been replaced by a different one.
@@ -25,20 +25,23 @@ import util.FreshNameCreator
* the beginnings of a campaign against this latest incursion by Cutty
* McPastington and his army of very similar soldiers.
*/
-trait ParsersCommon extends ScannersCommon {
+trait ParsersCommon extends ScannersCommon { self =>
val global : Global
- import global._
+ // the use of currentUnit in the parser should be avoided as it might
+ // cause unexpected behaviour when you work with two units at the
+ // same time; use Parser.unit instead
+ import global.{currentUnit => _, _}
+
+ def newLiteral(const: Any) = Literal(Constant(const))
+ def literalUnit = gen.mkSyntheticUnit()
/** This is now an abstract class, only to work around the optimizer:
* methods in traits are never inlined.
*/
abstract class ParserCommon {
val in: ScannerCommon
- def freshName(prefix: String): Name
- def freshTermName(prefix: String): TermName
- def freshTypeName(prefix: String): TypeName
- def deprecationWarning(off: Int, msg: String): Unit
- def accept(token: Int): Int
+ def deprecationWarning(off: Offset, msg: String): Unit
+ def accept(token: Token): Int
/** Methods inParensOrError and similar take a second argument which, should
* the next token not be the expected opener (e.g. LPAREN) will be returned
@@ -56,7 +59,7 @@ trait ParsersCommon extends ScannersCommon {
if (in.token == LPAREN) inParens(body)
else { accept(LPAREN) ; alt }
- @inline final def inParensOrUnit[T](body: => Tree): Tree = inParensOrError(body, Literal(Constant()))
+ @inline final def inParensOrUnit[T](body: => Tree): Tree = inParensOrError(body, literalUnit)
@inline final def inParensOrNil[T](body: => List[T]): List[T] = inParensOrError(body, Nil)
@inline final def inBraces[T](body: => T): T = {
@@ -70,7 +73,7 @@ trait ParsersCommon extends ScannersCommon {
else { accept(LBRACE) ; alt }
@inline final def inBracesOrNil[T](body: => List[T]): List[T] = inBracesOrError(body, Nil)
- @inline final def inBracesOrUnit[T](body: => Tree): Tree = inBracesOrError(body, Literal(Constant()))
+ @inline final def inBracesOrUnit[T](body: => Tree): Tree = inBracesOrError(body, literalUnit)
@inline final def dropAnyBraces[T](body: => T): T =
if (in.token == LBRACE) inBraces(body)
else body
@@ -94,7 +97,7 @@ trait ParsersCommon extends ScannersCommon {
* <ol>
* <li>
* Places all pattern variables in Bind nodes. In a pattern, for
- * identifiers <code>x</code>:<pre>
+ * identifiers `x`:<pre>
* x => x @ _
* x:T => x @ (_ : T)</pre>
* </li>
@@ -130,7 +133,9 @@ self =>
val global: Global
import global._
- case class OpInfo(operand: Tree, operator: Name, offset: Offset)
+ case class OpInfo(lhs: Tree, operator: TermName, targs: List[Tree], offset: Offset) {
+ def precedence = Precedence(operator.toString)
+ }
class SourceFileParser(val source: SourceFile) extends Parser {
@@ -141,37 +146,36 @@ self =>
if (source.isSelfContained) () => compilationUnit()
else () => scriptBody()
- def newScanner = new SourceFileScanner(source)
+ def newScanner(): Scanner = new SourceFileScanner(source)
- val in = newScanner
+ val in = newScanner()
in.init()
- private val globalFresh = new FreshNameCreator.Default
-
- def freshName(prefix: String): Name = freshTermName(prefix)
- def freshTermName(prefix: String): TermName = newTermName(globalFresh.newName(prefix))
- def freshTypeName(prefix: String): TypeName = newTypeName(globalFresh.newName(prefix))
-
- def o2p(offset: Int): Position = new OffsetPosition(source, offset)
- def r2p(start: Int, mid: Int, end: Int): Position = rangePos(source, start, mid, end)
+ def unit = global.currentUnit
// suppress warnings; silent abort on errors
- def warning(offset: Int, msg: String) {}
- def deprecationWarning(offset: Int, msg: String) {}
+ def warning(offset: Offset, msg: String) {}
+ def deprecationWarning(offset: Offset, msg: String) {}
- def syntaxError(offset: Int, msg: String): Unit = throw new MalformedInput(offset, msg)
+ def syntaxError(offset: Offset, msg: String): Unit = throw new MalformedInput(offset, msg)
def incompleteInputError(msg: String): Unit = throw new MalformedInput(source.content.length - 1, msg)
- /** the markup parser */
- lazy val xmlp = new MarkupParser(this, preserveWS = true)
-
object symbXMLBuilder extends SymbolicXMLBuilder(this, preserveWS = true) { // DEBUG choices
val global: self.global.type = self.global
- def freshName(prefix: String): Name = SourceFileParser.this.freshName(prefix)
}
- def xmlLiteral : Tree = xmlp.xLiteral
- def xmlLiteralPattern : Tree = xmlp.xLiteralPattern
+ /** the markup parser
+ * The first time this lazy val is accessed, we assume we were trying to parse an xml literal.
+ * The current position is recorded for later error reporting if it turns out
+ * that we don't have the xml library on the compilation classpath.
+ */
+ private[this] lazy val xmlp = {
+ unit.encounteredXml(o2p(in.offset))
+ new MarkupParser(this, preserveWS = true)
+ }
+
+ def xmlLiteral() : Tree = xmlp.xLiteral
+ def xmlLiteralPattern() : Tree = xmlp.xLiteralPattern
}
class OutlineParser(source: SourceFile) extends SourceFileParser(source) {
@@ -192,23 +196,19 @@ self =>
override def blockExpr(): Tree = skipBraces(EmptyTree)
- override def templateBody(isPre: Boolean) = skipBraces((emptyValDef, EmptyTree.asList))
+ override def templateBody(isPre: Boolean) = skipBraces((noSelfType, EmptyTree.asList))
}
- class UnitParser(val unit: global.CompilationUnit, patches: List[BracePatch]) extends SourceFileParser(unit.source) {
-
- def this(unit: global.CompilationUnit) = this(unit, List())
+ class UnitParser(override val unit: global.CompilationUnit, patches: List[BracePatch]) extends SourceFileParser(unit.source) { uself =>
+ def this(unit: global.CompilationUnit) = this(unit, Nil)
- override def newScanner = new UnitScanner(unit, patches)
+ override def newScanner() = new UnitScanner(unit, patches)
- override def freshTermName(prefix: String): TermName = unit.freshTermName(prefix)
- override def freshTypeName(prefix: String): TypeName = unit.freshTypeName(prefix)
-
- override def warning(offset: Int, msg: String) {
+ override def warning(offset: Offset, msg: String) {
unit.warning(o2p(offset), msg)
}
- override def deprecationWarning(offset: Int, msg: String) {
+ override def deprecationWarning(offset: Offset, msg: String) {
unit.deprecationWarning(o2p(offset), msg)
}
@@ -219,13 +219,14 @@ self =>
try body
finally smartParsing = saved
}
+ def withPatches(patches: List[BracePatch]): UnitParser = new UnitParser(unit, patches)
val syntaxErrors = new ListBuffer[(Int, String)]
def showSyntaxErrors() =
for ((offset, msg) <- syntaxErrors)
unit.error(o2p(offset), msg)
- override def syntaxError(offset: Int, msg: String) {
+ override def syntaxError(offset: Offset, msg: String) {
if (smartParsing) syntaxErrors += ((offset, msg))
else unit.error(o2p(offset), msg)
}
@@ -244,14 +245,15 @@ self =>
if (syntaxErrors.isEmpty) firstTry
else in.healBraces() match {
case Nil => showSyntaxErrors() ; firstTry
- case patches => new UnitParser(unit, patches).parse()
+ case patches => (this withPatches patches).parse()
}
}
}
- final val Local = 0
- final val InBlock = 1
- final val InTemplate = 2
+ type Location = Int
+ final val Local: Location = 0
+ final val InBlock: Location = 1
+ final val InTemplate: Location = 2
// These symbols may not yet be loaded (e.g. in the ide) so don't go
// through definitions to obtain the names.
@@ -268,20 +270,57 @@ self =>
import nme.raw
- abstract class Parser extends ParserCommon {
+ abstract class Parser extends ParserCommon { parser =>
val in: Scanner
+ def unit: CompilationUnit
+ def source: SourceFile
- def freshName(prefix: String): Name
- def freshTermName(prefix: String): TermName
- def freshTypeName(prefix: String): TypeName
- def o2p(offset: Int): Position
- def r2p(start: Int, mid: Int, end: Int): Position
+ /** Scoping operator used to temporarily look into the future.
+ * Backs up scanner data before evaluating a block and restores it after.
+ */
+ @inline final def lookingAhead[T](body: => T): T = {
+ val saved = new ScannerData {} copyFrom in
+ in.nextToken()
+ try body finally in copyFrom saved
+ }
+
+ /** Perform an operation while peeking ahead.
+ * Pushback if the operation yields an empty tree or blows to pieces.
+ */
+ @inline def peekingAhead(tree: =>Tree): Tree = {
+ @inline def peekahead() = {
+ in.prev copyFrom in
+ in.nextToken()
+ }
+ @inline def pushback() = {
+ in.next copyFrom in
+ in copyFrom in.prev
+ }
+ peekahead()
+ // try it, in case it is recoverable
+ val res = try tree catch { case e: Exception => pushback() ; throw e }
+ if (res.isEmpty) pushback()
+ res
+ }
+
+ class ParserTreeBuilder extends TreeBuilder {
+ val global: self.global.type = self.global
+ def unit = parser.unit
+ def source = parser.source
+ }
+ val treeBuilder = new ParserTreeBuilder
+ import treeBuilder.{global => _, unit => _, source => _, fresh => _, _}
+
+ implicit def fresh: FreshNameCreator = unit.fresh
+
+ def o2p(offset: Offset): Position = Position.offset(source, offset)
+ def r2p(start: Offset, mid: Offset, end: Offset): Position = rangePos(source, start, mid, end)
+ def r2p(start: Offset, mid: Offset): Position = r2p(start, mid, in.lastOffset max start)
+ def r2p(offset: Offset): Position = r2p(offset, offset)
/** whether a non-continuable syntax error has been seen */
private var lastErrorOffset : Int = -1
- import treeBuilder.{global => _, _}
-
/** The types of the context bounds of type parameters of the surrounding class
*/
private var classContextBounds: List[Tree] = Nil
@@ -291,6 +330,7 @@ self =>
finally classContextBounds = saved
}
+
/** Are we inside the Scala package? Set for files that start with package scala
*/
private var inScalaPackage = false
@@ -299,112 +339,108 @@ self =>
inScalaPackage = false
currentPackage = ""
}
- private lazy val primitiveNames: Set[Name] = tpnme.ScalaValueNames.toSet
-
- private def inScalaRootPackage = inScalaPackage && currentPackage == "scala"
- private def isScalaArray(name: Name) = inScalaRootPackage && name == tpnme.Array
- private def isPrimitiveType(name: Name) = inScalaRootPackage && primitiveNames(name)
+ private def inScalaRootPackage = inScalaPackage && currentPackage == "scala"
def parseStartRule: () => Tree
- /** This is the general parse entry point.
- */
- def parse(): Tree = {
- val t = parseStartRule()
+ def parseRule[T](rule: this.type => T): T = {
+ val t = rule(this)
accept(EOF)
t
}
+ /** This is the general parse entry point.
+ */
+ def parse(): Tree = parseRule(_.parseStartRule())
+
+ /** These are alternative entry points for repl, script runner, toolbox and parsing in macros.
+ */
+ def parseStats(): List[Tree] = parseRule(_.templateStats())
+ def parseStatsOrPackages(): List[Tree] = parseRule(_.templateOrTopStatSeq())
+
/** This is the parse entry point for code which is not self-contained, e.g.
* a script which is a series of template statements. They will be
* swaddled in Trees until the AST is equivalent to the one returned
* by compilationUnit().
*/
def scriptBody(): Tree = {
- val stmts = templateStats()
- accept(EOF)
+ val stmts = parseStats()
def mainModuleName = newTermName(settings.script.value)
- /** If there is only a single object template in the file and it has a
- * suitable main method, we will use it rather than building another object
- * around it. Since objects are loaded lazily the whole script would have
- * been a no-op, so we're not taking much liberty.
+ /* If there is only a single object template in the file and it has a
+ * suitable main method, we will use it rather than building another object
+ * around it. Since objects are loaded lazily the whole script would have
+ * been a no-op, so we're not taking much liberty.
*/
def searchForMain(): Option[Tree] = {
- /** Have to be fairly liberal about what constitutes a main method since
- * nothing has been typed yet - for instance we can't assume the parameter
- * type will look exactly like "Array[String]" as it could have been renamed
- * via import, etc.
+ /* Have to be fairly liberal about what constitutes a main method since
+ * nothing has been typed yet - for instance we can't assume the parameter
+ * type will look exactly like "Array[String]" as it could have been renamed
+ * via import, etc.
*/
def isMainMethod(t: Tree) = t match {
case DefDef(_, nme.main, Nil, List(_), _, _) => true
case _ => false
}
- /** For now we require there only be one top level object. */
+ /* For now we require there only be one top level object. */
var seenModule = false
val newStmts = stmts collect {
case t @ Import(_, _) => t
case md @ ModuleDef(mods, name, template) if !seenModule && (md exists isMainMethod) =>
seenModule = true
- /** This slightly hacky situation arises because we have no way to communicate
- * back to the scriptrunner what the name of the program is. Even if we were
- * willing to take the sketchy route of settings.script.value = progName, that
- * does not work when using fsc. And to find out in advance would impose a
- * whole additional parse. So instead, if the actual object's name differs from
- * what the script is expecting, we transform it to match.
+ /* This slightly hacky situation arises because we have no way to communicate
+ * back to the scriptrunner what the name of the program is. Even if we were
+ * willing to take the sketchy route of settings.script.value = progName, that
+ * does not work when using fsc. And to find out in advance would impose a
+ * whole additional parse. So instead, if the actual object's name differs from
+ * what the script is expecting, we transform it to match.
*/
if (name == mainModuleName) md
else treeCopy.ModuleDef(md, mods, mainModuleName, template)
case _ =>
- /** If we see anything but the above, fail. */
+ /* If we see anything but the above, fail. */
return None
}
- Some(makePackaging(0, emptyPkg, newStmts))
+ Some(makeEmptyPackage(0, newStmts))
}
if (mainModuleName == newTermName(ScriptRunner.defaultScriptMain))
searchForMain() foreach { return _ }
- /** Here we are building an AST representing the following source fiction,
+ /* Here we are building an AST representing the following source fiction,
* where `moduleName` is from -Xscript (defaults to "Main") and <stmts> are
* the result of parsing the script file.
*
* {{{
* object moduleName {
- * def main(argv: Array[String]): Unit = {
- * val args = argv
+ * def main(args: Array[String]): Unit =
* new AnyRef {
* stmts
* }
- * }
* }
* }}}
*/
- import definitions._
-
- def emptyPkg = atPos(0, 0, 0) { Ident(nme.EMPTY_PACKAGE_NAME) }
def emptyInit = DefDef(
NoMods,
nme.CONSTRUCTOR,
Nil,
ListOfNil,
TypeTree(),
- Block(List(Apply(gen.mkSuperSelect, Nil)), Literal(Constant(())))
+ Block(List(Apply(gen.mkSuperInitCall, Nil)), literalUnit)
)
// def main
def mainParamType = AppliedTypeTree(Ident(tpnme.Array), List(Ident(tpnme.String)))
- def mainParameter = List(ValDef(Modifiers(Flags.PARAM), nme.argv, mainParamType, EmptyTree))
- def mainSetArgv = List(ValDef(NoMods, nme.args, TypeTree(), Ident(nme.argv)))
- def mainDef = DefDef(NoMods, nme.main, Nil, List(mainParameter), scalaDot(tpnme.Unit), Block(mainSetArgv, makeAnonymousNew(stmts)))
+ def mainParameter = List(ValDef(Modifiers(Flags.PARAM), nme.args, mainParamType, EmptyTree))
+ def mainDef = DefDef(NoMods, nme.main, Nil, List(mainParameter), scalaDot(tpnme.Unit), gen.mkAnonymousNew(stmts))
// object Main
def moduleName = newTermName(ScriptRunner scriptMain settings)
- def moduleBody = Template(List(atPos(o2p(in.offset))(scalaAnyRefConstr)), emptyValDef, List(emptyInit, mainDef))
+ def moduleBody = Template(atInPos(scalaAnyRefConstr) :: Nil, noSelfType, List(emptyInit, mainDef))
def moduleDef = ModuleDef(NoMods, moduleName, moduleBody)
// package <empty> { ... }
- makePackaging(0, emptyPkg, List(moduleDef))
+ makeEmptyPackage(0, moduleDef :: Nil)
}
/* --------------- PLACEHOLDERS ------------------------------------------- */
@@ -429,13 +465,13 @@ self =>
placeholderParams match {
case vd :: _ =>
- syntaxError(vd.pos, "unbound placeholder parameter", false)
+ syntaxError(vd.pos, "unbound placeholder parameter", skipIt = false)
placeholderParams = List()
case _ =>
}
placeholderTypes match {
case td :: _ =>
- syntaxError(td.pos, "unbound wildcard type", false)
+ syntaxError(td.pos, "unbound wildcard type", skipIt = false)
placeholderTypes = List()
case _ =>
}
@@ -468,7 +504,7 @@ self =>
/* ------------- ERROR HANDLING ------------------------------------------- */
- var assumedClosingParens = scala.collection.mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0)
+ val assumedClosingParens = mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0)
private var inFunReturnType = false
@inline private def fromWithinReturnType[T](body: => T): T = {
@@ -478,7 +514,7 @@ self =>
finally inFunReturnType = saved
}
- protected def skip(targetToken: Int) {
+ protected def skip(targetToken: Token) {
var nparens = 0
var nbraces = 0
while (true) {
@@ -506,17 +542,17 @@ self =>
in.nextToken()
}
}
- def warning(offset: Int, msg: String): Unit
+ def warning(offset: Offset, msg: String): Unit
def incompleteInputError(msg: String): Unit
private def syntaxError(pos: Position, msg: String, skipIt: Boolean) {
syntaxError(pos pointOrElse in.offset, msg, skipIt)
}
- def syntaxError(offset: Int, msg: String): Unit
+ def syntaxError(offset: Offset, msg: String): Unit
def syntaxError(msg: String, skipIt: Boolean) {
syntaxError(in.offset, msg, skipIt)
}
- def syntaxError(offset: Int, msg: String, skipIt: Boolean) {
+ def syntaxError(offset: Offset, msg: String, skipIt: Boolean) {
if (offset > lastErrorOffset) {
syntaxError(offset, msg)
// no more errors on this token.
@@ -534,15 +570,19 @@ self =>
else
syntaxError(in.offset, msg, skipIt)
}
+ def syntaxErrorOrIncompleteAnd[T](msg: String, skipIt: Boolean)(and: T): T = {
+ syntaxErrorOrIncomplete(msg, skipIt)
+ and
+ }
- def expectedMsg(token: Int): String =
- token2string(token) + " expected but " +token2string(in.token) + " found."
+ def expectedMsgTemplate(exp: String, fnd: String) = s"$exp expected but $fnd found."
+ def expectedMsg(token: Token): String = expectedMsgTemplate(token2string(token), token2string(in.token))
/** Consume one token of the specified type, or signal an error if it is not there. */
- def accept(token: Int): Int = {
+ def accept(token: Token): Offset = {
val offset = in.offset
if (in.token != token) {
- syntaxErrorOrIncomplete(expectedMsg(token), false)
+ syntaxErrorOrIncomplete(expectedMsg(token), skipIt = false)
if ((token == RPAREN || token == RBRACE || token == RBRACKET))
if (in.parenBalance(token) + assumedClosingParens(token) < 0)
assumedClosingParens(token) += 1
@@ -568,25 +608,16 @@ self =>
if (!isStatSeqEnd)
acceptStatSep()
- def errorTypeTree = TypeTree() setType ErrorType setPos o2p(in.offset)
- def errorTermTree = Literal(Constant(null)) setPos o2p(in.offset)
- def errorPatternTree = Ident(nme.WILDCARD) setPos o2p(in.offset)
+ def errorTypeTree = setInPos(TypeTree() setType ErrorType)
+ def errorTermTree = setInPos(newLiteral(null))
+ def errorPatternTree = setInPos(Ident(nme.WILDCARD))
/** Check that type parameter is not by name or repeated. */
def checkNotByNameOrVarargs(tpt: Tree) = {
if (treeInfo isByNameParamType tpt)
- syntaxError(tpt.pos, "no by-name parameter type allowed here", false)
+ syntaxError(tpt.pos, "no by-name parameter type allowed here", skipIt = false)
else if (treeInfo isRepeatedParamType tpt)
- syntaxError(tpt.pos, "no * parameter type allowed here", false)
- }
-
- /** Check that tree is a legal clause of a forSome. */
- def checkLegalExistential(t: Tree) = t match {
- case TypeDef(_, _, _, TypeBoundsTree(_, _)) |
- ValDef(_, _, _, EmptyTree) | EmptyTree =>
- ;
- case _ =>
- syntaxError(t.pos, "not a legal existential clause", false)
+ syntaxError(tpt.pos, "no * parameter type allowed here", skipIt = false)
}
/* -------------- TOKEN CLASSES ------------------------------------------- */
@@ -597,6 +628,8 @@ self =>
case _ => false
}
+ def isAnnotation: Boolean = in.token == AT
+
def isLocalModifier: Boolean = in.token match {
case ABSTRACT | FINAL | SEALED | IMPLICIT | LAZY => true
case _ => false
@@ -617,20 +650,26 @@ self =>
case INTLIT | LONGLIT | FLOATLIT | DOUBLELIT => true
case _ => false
}
- def isUnaryOp = isIdent && raw.isUnary(in.name)
- def isRawStar = isIdent && in.name == raw.STAR
- def isRawBar = isIdent && in.name == raw.BAR
+
+ def isIdentExcept(except: Name) = isIdent && in.name != except
+ def isIdentOf(name: Name) = isIdent && in.name == name
+
+ def isUnaryOp = isIdent && raw.isUnary(in.name)
+ def isRawStar = isRawIdent && in.name == raw.STAR
+ def isRawBar = isRawIdent && in.name == raw.BAR
+ def isRawIdent = in.token == IDENTIFIER
def isIdent = in.token == IDENTIFIER || in.token == BACKQUOTED_IDENT
+ def isMacro = in.token == IDENTIFIER && in.name == nme.MACROkw
- def isLiteralToken(token: Int) = token match {
+ def isLiteralToken(token: Token) = token match {
case CHARLIT | INTLIT | LONGLIT | FLOATLIT | DOUBLELIT |
STRINGLIT | INTERPOLATIONID | SYMBOLLIT | TRUE | FALSE | NULL => true
case _ => false
}
def isLiteral = isLiteralToken(in.token)
- def isExprIntroToken(token: Int): Boolean = isLiteralToken(token) || (token match {
+ def isExprIntroToken(token: Token): Boolean = isLiteralToken(token) || (token match {
case IDENTIFIER | BACKQUOTED_IDENT |
THIS | SUPER | IF | FOR | NEW | USCORE | TRY | WHILE |
DO | RETURN | THROW | LPAREN | LBRACE | XMLSTART => true
@@ -639,17 +678,17 @@ self =>
def isExprIntro: Boolean = isExprIntroToken(in.token)
- def isTypeIntroToken(token: Int): Boolean = token match {
+ def isTypeIntroToken(token: Token): Boolean = token match {
case IDENTIFIER | BACKQUOTED_IDENT | THIS |
SUPER | USCORE | LPAREN | AT => true
case _ => false
}
- def isTypeIntro: Boolean = isTypeIntroToken(in.token)
-
def isStatSeqEnd = in.token == RBRACE || in.token == EOF
- def isStatSep(token: Int): Boolean =
+ def isCaseDefEnd = in.token == RBRACE || in.token == CASE || in.token == EOF
+
+ def isStatSep(token: Token): Boolean =
token == NEWLINE || token == NEWLINES || token == SEMI
def isStatSep: Boolean = isStatSep(in.token)
@@ -657,42 +696,20 @@ self =>
/* --------- COMMENT AND ATTRIBUTE COLLECTION ----------------------------- */
- /** Join the comment associated with a definition. */
- def joinComment(trees: => List[Tree]): List[Tree] = {
- val doc = in.flushDoc
- if ((doc ne null) && doc.raw.length > 0) {
- val joined = trees map {
- t =>
- DocDef(doc, t) setPos {
- if (t.pos.isDefined) {
- val pos = doc.pos.withEnd(t.pos.endOrPoint)
- // always make the position transparent
- pos.makeTransparent
- } else {
- t.pos
- }
- }
- }
- joined.find(_.pos.isOpaqueRange) foreach {
- main =>
- val mains = List(main)
- joined foreach { t => if (t ne main) ensureNonOverlapping(t, mains) }
- }
- joined
- }
- else trees
- }
+ /** A hook for joining the comment associated with a definition.
+ * Overridden by scaladoc.
+ */
+ def joinComment(trees: => List[Tree]): List[Tree] = trees
/* ---------- TREE CONSTRUCTION ------------------------------------------- */
- def atPos[T <: Tree](offset: Int)(t: T): T =
- global.atPos(r2p(offset, offset, in.lastOffset max offset))(t)
- def atPos[T <: Tree](start: Int, point: Int)(t: T): T =
- global.atPos(r2p(start, point, in.lastOffset max start))(t)
- def atPos[T <: Tree](start: Int, point: Int, end: Int)(t: T): T =
- global.atPos(r2p(start, point, end))(t)
- def atPos[T <: Tree](pos: Position)(t: T): T =
- global.atPos(pos)(t)
+ def atPos[T <: Tree](offset: Offset)(t: T): T = atPos(r2p(offset))(t)
+ def atPos[T <: Tree](start: Offset, point: Offset)(t: T): T = atPos(r2p(start, point))(t)
+ def atPos[T <: Tree](start: Offset, point: Offset, end: Offset)(t: T): T = atPos(r2p(start, point, end))(t)
+ def atPos[T <: Tree](pos: Position)(t: T): T = global.atPos(pos)(t)
+
+ def atInPos[T <: Tree](t: T): T = atPos(o2p(in.offset))(t)
+ def setInPos[T <: Tree](t: T): T = t setPos o2p(in.offset)
/** Convert tree to formal parameter list. */
def convertToParams(tree: Tree): List[ValDef] = tree match {
@@ -705,29 +722,40 @@ self =>
def removeAsPlaceholder(name: Name) {
placeholderParams = placeholderParams filter (_.name != name)
}
+ def errorParam = makeParam(nme.ERROR, errorTypeTree setPos o2p(tree.pos.end))
tree match {
case Ident(name) =>
removeAsPlaceholder(name)
- makeParam(name, TypeTree() setPos o2p(tree.pos.endOrPoint))
+ makeParam(name.toTermName, TypeTree() setPos o2p(tree.pos.end))
case Typed(Ident(name), tpe) if tpe.isType => // get the ident!
removeAsPlaceholder(name)
- makeParam(name, tpe)
+ makeParam(name.toTermName, tpe)
+ case build.SyntacticTuple(as) =>
+ val arity = as.length
+ val example = analyzer.exampleTuplePattern(as map { case Ident(name) => name; case _ => nme.EMPTY })
+ val msg =
+ sm"""|not a legal formal parameter.
+ |Note: Tuples cannot be directly destructured in method or function parameters.
+ | Either create a single parameter accepting the Tuple${arity},
+ | or consider a pattern matching anonymous function: `{ case $example => ... }"""
+ syntaxError(tree.pos, msg, skipIt = false)
+ errorParam
case _ =>
- syntaxError(tree.pos, "not a legal formal parameter", false)
- makeParam(nme.ERROR, errorTypeTree setPos o2p(tree.pos.endOrPoint))
+ syntaxError(tree.pos, "not a legal formal parameter", skipIt = false)
+ errorParam
}
}
/** Convert (qual)ident to type identifier. */
def convertToTypeId(tree: Tree): Tree = atPos(tree.pos) {
convertToTypeName(tree) getOrElse {
- syntaxError(tree.pos, "identifier expected", false)
+ syntaxError(tree.pos, "identifier expected", skipIt = false)
errorTypeTree
}
}
/** {{{ part { `sep` part } }}},or if sepFirst is true, {{{ { `sep` part } }}}. */
- final def tokenSeparated[T](separator: Int, sepFirst: Boolean, part: => T): List[T] = {
+ final def tokenSeparated[T](separator: Token, sepFirst: Boolean, part: => T): List[T] = {
val ts = new ListBuffer[T]
if (!sepFirst)
ts += part
@@ -740,7 +768,7 @@ self =>
}
@inline final def commaSeparated[T](part: => T): List[T] = tokenSeparated(COMMA, sepFirst = false, part)
@inline final def caseSeparated[T](part: => T): List[T] = tokenSeparated(CASE, sepFirst = true, part)
- @inline final def readAnnots[T](part: => T): List[T] = tokenSeparated(AT, sepFirst = true, part)
+ def readAnnots(part: => Tree): List[Tree] = tokenSeparated(AT, sepFirst = true, part)
/* --------- OPERAND/OPERATOR STACK --------------------------------------- */
@@ -751,53 +779,72 @@ self =>
var opstack: List[OpInfo] = Nil
- def precedence(operator: Name): Int =
- if (operator eq nme.ERROR) -1
- else {
- val firstCh = operator.startChar
- if (isScalaLetter(firstCh)) 1
- else if (nme.isOpAssignmentName(operator)) 0
- else firstCh match {
- case '|' => 2
- case '^' => 3
- case '&' => 4
- case '=' | '!' => 5
- case '<' | '>' => 6
- case ':' => 7
- case '+' | '-' => 8
- case '*' | '/' | '%' => 9
- case _ => 10
- }
- }
+ @deprecated("Use `scala.reflect.internal.Precedence`", "2.11.0")
+ def precedence(operator: Name): Int = Precedence(operator.toString).level
- def checkSize(kind: String, size: Int, max: Int) {
- if (size > max) syntaxError("too many "+kind+", maximum = "+max, false)
+ private def opHead = opstack.head
+ private def headPrecedence = opHead.precedence
+ private def popOpInfo(): OpInfo = try opHead finally opstack = opstack.tail
+ private def pushOpInfo(top: Tree): Unit = {
+ val name = in.name
+ val offset = in.offset
+ ident()
+ val targs = if (in.token == LBRACKET) exprTypeArgs() else Nil
+ val opinfo = OpInfo(top, name, targs, offset)
+ opstack ::= opinfo
}
- def checkAssoc(offset: Int, op: Name, leftAssoc: Boolean) =
+ def checkHeadAssoc(leftAssoc: Boolean) = checkAssoc(opHead.offset, opHead.operator, leftAssoc)
+ def checkAssoc(offset: Offset, op: Name, leftAssoc: Boolean) = (
if (treeInfo.isLeftAssoc(op) != leftAssoc)
- syntaxError(
- offset, "left- and right-associative operators with same precedence may not be mixed", false)
-
- def reduceStack(isExpr: Boolean, base: List[OpInfo], top0: Tree, prec: Int, leftAssoc: Boolean): Tree = {
- var top = top0
- if (opstack != base && precedence(opstack.head.operator) == prec)
- checkAssoc(opstack.head.offset, opstack.head.operator, leftAssoc)
- while (opstack != base &&
- (prec < precedence(opstack.head.operator) ||
- leftAssoc && prec == precedence(opstack.head.operator))) {
- val opinfo = opstack.head
- opstack = opstack.tail
- val opPos = r2p(opinfo.offset, opinfo.offset, opinfo.offset+opinfo.operator.length)
- val lPos = opinfo.operand.pos
- val start = if (lPos.isDefined) lPos.startOrPoint else opPos.startOrPoint
- val rPos = top.pos
- val end = if (rPos.isDefined) rPos.endOrPoint else opPos.endOrPoint
- top = atPos(start, opinfo.offset, end) {
- makeBinop(isExpr, opinfo.operand, opinfo.operator, top, opPos)
- }
- }
- top
+ syntaxError(offset, "left- and right-associative operators with same precedence may not be mixed", skipIt = false)
+ )
+
+ def finishPostfixOp(start: Int, base: List[OpInfo], opinfo: OpInfo): Tree = {
+ if (opinfo.targs.nonEmpty)
+ syntaxError(opinfo.offset, "type application is not allowed for postfix operators")
+
+ val od = stripParens(reduceExprStack(base, opinfo.lhs))
+ makePostfixSelect(start, opinfo.offset, od, opinfo.operator)
+ }
+
+ def finishBinaryOp(isExpr: Boolean, opinfo: OpInfo, rhs: Tree): Tree = {
+ import opinfo._
+ val operatorPos: Position = Position.range(rhs.pos.source, offset, offset, offset + operator.length)
+ val pos = lhs.pos union rhs.pos union operatorPos withPoint offset
+
+ atPos(pos)(makeBinop(isExpr, lhs, operator, rhs, operatorPos, opinfo.targs))
+ }
+
+ def reduceExprStack(base: List[OpInfo], top: Tree): Tree = reduceStack(isExpr = true, base, top)
+ def reducePatternStack(base: List[OpInfo], top: Tree): Tree = reduceStack(isExpr = false, base, top)
+
+ def reduceStack(isExpr: Boolean, base: List[OpInfo], top: Tree): Tree = {
+ val opPrecedence = if (isIdent) Precedence(in.name.toString) else Precedence(0)
+ val leftAssoc = !isIdent || (treeInfo isLeftAssoc in.name)
+
+ reduceStack(isExpr, base, top, opPrecedence, leftAssoc)
+ }
+
+ def reduceStack(isExpr: Boolean, base: List[OpInfo], top: Tree, opPrecedence: Precedence, leftAssoc: Boolean): Tree = {
+ def isDone = opstack == base
+ def lowerPrecedence = !isDone && (opPrecedence < headPrecedence)
+ def samePrecedence = !isDone && (opPrecedence == headPrecedence)
+ def canReduce = lowerPrecedence || leftAssoc && samePrecedence
+
+ if (samePrecedence)
+ checkHeadAssoc(leftAssoc)
+
+ def loop(top: Tree): Tree = if (canReduce) {
+ val info = popOpInfo()
+ if (!isExpr && info.targs.nonEmpty) {
+ syntaxError(info.offset, "type application is not allowed in pattern")
+ info.targs.foreach(_.setType(ErrorType))
+ }
+ loop(finishBinaryOp(isExpr, info, top))
+ } else top
+
+ loop(top)
}
/* -------- IDENTIFIERS AND LITERALS ------------------------------------------- */
@@ -814,7 +861,7 @@ self =>
def argType(): Tree
def functionArgType(): Tree
- private def tupleInfixType(start: Int) = {
+ private def tupleInfixType(start: Offset) = {
in.nextToken()
if (in.token == RPAREN) {
in.nextToken()
@@ -827,7 +874,7 @@ self =>
atPos(start, in.skipToken()) { makeFunctionTypeTree(ts, typ()) }
else {
ts foreach checkNotByNameOrVarargs
- val tuple = atPos(start) { makeTupleType(ts, flattenUnary = true) }
+ val tuple = atPos(start) { makeTupleType(ts) }
infixTypeRest(
compoundTypeRest(
annotTypeRest(
@@ -839,9 +886,14 @@ self =>
}
}
private def makeExistentialTypeTree(t: Tree) = {
- val whereClauses = refinement()
- whereClauses foreach checkLegalExistential
- ExistentialTypeTree(t, whereClauses)
+ // EmptyTrees in the result of refinement() stand for parse errors
+ // so it's okay for us to filter them out here
+ ExistentialTypeTree(t, refinement() flatMap {
+ case t @ TypeDef(_, _, _, TypeBoundsTree(_, _)) => Some(t)
+ case t @ ValDef(_, _, _, EmptyTree) => Some(t)
+ case EmptyTree => None
+ case _ => syntaxError(t.pos, "not a legal existential clause", skipIt = false); None
+ })
}
/** {{{
@@ -889,7 +941,7 @@ self =>
def simpleType(): Tree = {
val start = in.offset
simpleTypeRest(in.token match {
- case LPAREN => atPos(start)(makeTupleType(inParens(types()), flattenUnary = true))
+ case LPAREN => atPos(start)(makeTupleType(inParens(types())))
case USCORE => wildcardType(in.skipToken())
case _ =>
path(thisOK = false, typeOK = true) match {
@@ -904,11 +956,11 @@ self =>
val nameOffset = in.offset
val name = identForType(skipIt = false)
val point = if (name == tpnme.ERROR) hashOffset else nameOffset
- atPos(t.pos.startOrPoint, point)(SelectFromTypeTree(t, name))
+ atPos(t.pos.start, point)(SelectFromTypeTree(t, name))
}
def simpleTypeRest(t: Tree): Tree = in.token match {
case HASH => simpleTypeRest(typeProjection(t))
- case LBRACKET => simpleTypeRest(atPos(t.pos.startOrPoint, t.pos.point)(AppliedTypeTree(t, typeArgs())))
+ case LBRACKET => simpleTypeRest(atPos(t.pos.start, t.pos.point)(AppliedTypeTree(t, typeArgs())))
case _ => t
}
@@ -918,49 +970,62 @@ self =>
* }}}
*/
def compoundType(): Tree = compoundTypeRest(
- if (in.token == LBRACE) atPos(o2p(in.offset))(scalaAnyRefConstr)
+ if (in.token == LBRACE) atInPos(scalaAnyRefConstr)
else annotType()
)
def compoundTypeRest(t: Tree): Tree = {
- var ts = new ListBuffer[Tree] += t
+ val ts = new ListBuffer[Tree] += t
while (in.token == WITH) {
in.nextToken()
ts += annotType()
}
newLineOptWhenFollowedBy(LBRACE)
- atPos(t.pos.startOrPoint) {
- if (in.token == LBRACE) {
- // Warn if they are attempting to refine Unit; we can't be certain it's
- // scala.Unit they're refining because at this point all we have is an
- // identifier, but at a later stage we lose the ability to tell an empty
- // refinement from no refinement at all. See bug #284.
- for (Ident(name) <- ts) name.toString match {
- case "Unit" | "scala.Unit" =>
- warning("Detected apparent refinement of Unit; are you missing an '=' sign?")
- case _ =>
- }
- CompoundTypeTree(Template(ts.toList, emptyValDef, refinement()))
- }
- else
- makeIntersectionTypeTree(ts.toList)
+ val types = ts.toList
+ val braceOffset = in.offset
+ val hasRefinement = in.token == LBRACE
+ val refinements = if (hasRefinement) refinement() else Nil
+ // Warn if they are attempting to refine Unit; we can't be certain it's
+ // scala.Unit they're refining because at this point all we have is an
+ // identifier, but at a later stage we lose the ability to tell an empty
+ // refinement from no refinement at all. See bug #284.
+ if (hasRefinement) types match {
+ case Ident(name) :: Nil if name endsWith "Unit" => warning(braceOffset, "Detected apparent refinement of Unit; are you missing an '=' sign?")
+ case _ =>
+ }
+ // The second case includes an empty refinement - refinements is empty, but
+ // it still gets a CompoundTypeTree.
+ ts.toList match {
+ case tp :: Nil if !hasRefinement => tp // single type, no refinement, already positioned
+ case tps => atPos(t.pos.start)(CompoundTypeTree(Template(tps, noSelfType, refinements)))
}
}
def infixTypeRest(t: Tree, mode: InfixMode.Value): Tree = {
- if (isIdent && in.name != nme.STAR) {
- val opOffset = in.offset
+ // Detect postfix star for repeated args.
+ // Only RPAREN can follow, but accept COMMA and EQUALS for error's sake.
+ // Take RBRACE as a paren typo.
+ def checkRepeatedParam = if (isRawStar) {
+ lookingAhead (in.token match {
+ case RPAREN | COMMA | EQUALS | RBRACE => t
+ case _ => EmptyTree
+ })
+ } else EmptyTree
+ def asInfix = {
+ val opOffset = in.offset
val leftAssoc = treeInfo.isLeftAssoc(in.name)
- if (mode != InfixMode.FirstOp) checkAssoc(opOffset, in.name, leftAssoc = mode == InfixMode.LeftOp)
- val op = identForType()
- val tycon = atPos(opOffset) { Ident(op) }
+ if (mode != InfixMode.FirstOp)
+ checkAssoc(opOffset, in.name, leftAssoc = mode == InfixMode.LeftOp)
+ val tycon = atPos(opOffset) { Ident(identForType()) }
newLineOptWhenFollowing(isTypeIntroToken)
- def mkOp(t1: Tree) = atPos(t.pos.startOrPoint, opOffset) { AppliedTypeTree(tycon, List(t, t1)) }
+ def mkOp(t1: Tree) = atPos(t.pos.start, opOffset) { AppliedTypeTree(tycon, List(t, t1)) }
if (leftAssoc)
infixTypeRest(mkOp(compoundType()), InfixMode.LeftOp)
else
mkOp(infixType(InfixMode.RightOp))
- } else t
+ }
+ if (isIdent) checkRepeatedParam orElse asInfix
+ else t
}
/** {{{
@@ -979,15 +1044,15 @@ self =>
}
/** Assumed (provisionally) to be TermNames. */
- def ident(skipIt: Boolean): Name =
+ def ident(skipIt: Boolean): Name = (
if (isIdent) {
val name = in.name.encode
in.nextToken()
name
- } else {
- syntaxErrorOrIncomplete(expectedMsg(IDENTIFIER), skipIt)
- nme.ERROR
}
+ else syntaxErrorOrIncompleteAnd(expectedMsg(IDENTIFIER), skipIt)(nme.ERROR)
+ )
+
def ident(): Name = ident(skipIt = true)
def rawIdent(): Name = try in.name finally in.nextToken()
@@ -995,11 +1060,13 @@ self =>
def identForType(): TypeName = ident().toTypeName
def identForType(skipIt: Boolean): TypeName = ident(skipIt).toTypeName
+ def identOrMacro(): Name = if (isMacro) rawIdent() else ident()
+
def selector(t: Tree): Tree = {
val point = in.offset
//assert(t.pos.isDefined, t)
if (t != EmptyTree)
- Select(t, ident(skipIt = false)) setPos r2p(t.pos.startOrPoint, point, in.lastOffset)
+ Select(t, ident(skipIt = false)) setPos r2p(t.pos.start, point, in.lastOffset)
else
errorTermTree // has already been reported
}
@@ -1053,10 +1120,10 @@ self =>
t
}
- def selectors(t: Tree, typeOK: Boolean, dotOffset: Int): Tree =
+ def selectors(t: Tree, typeOK: Boolean, dotOffset: Offset): Tree =
if (typeOK && in.token == TYPE) {
in.nextToken()
- atPos(t.pos.startOrPoint, dotOffset) { SingletonTypeTree(t) }
+ atPos(t.pos.start, dotOffset) { SingletonTypeTree(t) }
}
else {
val t1 = selector(t)
@@ -1111,72 +1178,90 @@ self =>
* | null
* }}}
*/
- def literal(isNegated: Boolean = false, inPattern: Boolean = false, start: Int = in.offset): Tree = {
- atPos(start) {
- def finish(value: Any): Tree = {
- val t = Literal(Constant(value))
- in.nextToken()
- t
- }
- if (in.token == SYMBOLLIT)
- Apply(scalaDot(nme.Symbol), List(finish(in.strVal)))
- else if (in.token == INTERPOLATIONID)
- interpolatedString(inPattern = inPattern)
- else finish(in.token match {
- case CHARLIT => in.charVal
- case INTLIT => in.intVal(isNegated).toInt
- case LONGLIT => in.intVal(isNegated)
- case FLOATLIT => in.floatVal(isNegated).toFloat
- case DOUBLELIT => in.floatVal(isNegated)
- case STRINGLIT | STRINGPART => in.strVal.intern()
- case TRUE => true
- case FALSE => false
- case NULL => null
- case _ =>
- syntaxErrorOrIncomplete("illegal literal", true)
- null
+ def literal(isNegated: Boolean = false, inPattern: Boolean = false, start: Offset = in.offset): Tree = atPos(start) {
+ def finish(value: Any): Tree = try newLiteral(value) finally in.nextToken()
+ if (in.token == SYMBOLLIT)
+ Apply(scalaDot(nme.Symbol), List(finish(in.strVal)))
+ else if (in.token == INTERPOLATIONID)
+ interpolatedString(inPattern = inPattern)
+ else finish(in.token match {
+ case CHARLIT => in.charVal
+ case INTLIT => in.intVal(isNegated).toInt
+ case LONGLIT => in.intVal(isNegated)
+ case FLOATLIT => in.floatVal(isNegated).toFloat
+ case DOUBLELIT => in.floatVal(isNegated)
+ case STRINGLIT | STRINGPART => in.strVal.intern()
+ case TRUE => true
+ case FALSE => false
+ case NULL => null
+ case _ => syntaxErrorOrIncompleteAnd("illegal literal", skipIt = true)(null)
+ })
+ }
+
+ /** Handle placeholder syntax.
+ * If evaluating the tree produces placeholders, then make it a function.
+ */
+ private def withPlaceholders(tree: =>Tree, isAny: Boolean): Tree = {
+ val savedPlaceholderParams = placeholderParams
+ placeholderParams = List()
+ var res = tree
+ if (placeholderParams.nonEmpty && !isWildcard(res)) {
+ res = atPos(res.pos)(Function(placeholderParams.reverse, res))
+ if (isAny) placeholderParams foreach (_.tpt match {
+ case tpt @ TypeTree() => tpt setType definitions.AnyTpe
+ case _ => // some ascription
})
+ placeholderParams = List()
}
+ placeholderParams = placeholderParams ::: savedPlaceholderParams
+ res
}
- private def stringOp(t: Tree, op: TermName) = {
- val str = in.strVal
+ /** Consume a USCORE and create a fresh synthetic placeholder param. */
+ private def freshPlaceholder(): Tree = {
+ val start = in.offset
+ val pname = freshTermName()
in.nextToken()
- if (str.length == 0) t
- else atPos(t.pos.startOrPoint) {
- Apply(Select(t, op), List(Literal(Constant(str))))
- }
+ val id = atPos(start)(Ident(pname))
+ val param = atPos(id.pos.focus)(gen.mkSyntheticParam(pname.toTermName))
+ placeholderParams = param :: placeholderParams
+ id
}
- private def interpolatedString(inPattern: Boolean = false): Tree = atPos(in.offset) {
- val start = in.offset
- val interpolator = in.name
+ private def interpolatedString(inPattern: Boolean): Tree = {
+ def errpolation() = syntaxErrorOrIncompleteAnd("error in interpolated string: identifier or block expected",
+ skipIt = true)(EmptyTree)
+ // Like Swiss cheese, with holes
+ def stringCheese: Tree = atPos(in.offset) {
+ val start = in.offset
+ val interpolator = in.name.encoded // ident() for INTERPOLATIONID
- val partsBuf = new ListBuffer[Tree]
- val exprBuf = new ListBuffer[Tree]
- in.nextToken()
- while (in.token == STRINGPART) {
- partsBuf += literal()
- exprBuf += {
- if (inPattern) dropAnyBraces(pattern())
- else {
- if (in.token == IDENTIFIER) atPos(in.offset)(Ident(ident()))
- else if(in.token == LBRACE) expr()
- else if(in.token == THIS) { in.nextToken(); atPos(in.offset)(This(tpnme.EMPTY)) }
- else {
- syntaxErrorOrIncomplete("error in interpolated string: identifier or block expected", true)
- EmptyTree
+ val partsBuf = new ListBuffer[Tree]
+ val exprBuf = new ListBuffer[Tree]
+ in.nextToken()
+ while (in.token == STRINGPART) {
+ partsBuf += literal()
+ exprBuf += (
+ if (inPattern) dropAnyBraces(pattern())
+ else in.token match {
+ case IDENTIFIER => atPos(in.offset)(Ident(ident()))
+ //case USCORE => freshPlaceholder() // ifonly etapolation
+ case LBRACE => expr() // dropAnyBraces(expr0(Local))
+ case THIS => in.nextToken(); atPos(in.offset)(This(tpnme.EMPTY))
+ case _ => errpolation()
}
- }
+ )
}
- }
- if (in.token == STRINGLIT) partsBuf += literal()
+ if (in.token == STRINGLIT) partsBuf += literal()
- val t1 = atPos(o2p(start)) { Ident(nme.StringContext) }
- val t2 = atPos(start) { Apply(t1, partsBuf.toList) }
- t2 setPos t2.pos.makeTransparent
- val t3 = Select(t2, interpolator) setPos t2.pos
- atPos(start) { Apply(t3, exprBuf.toList) }
+ val t1 = atPos(o2p(start)) { Ident(nme.StringContext) }
+ val t2 = atPos(start) { Apply(t1, partsBuf.toList) }
+ t2 setPos t2.pos.makeTransparent
+ val t3 = Select(t2, interpolator) setPos t2.pos
+ atPos(start) { Apply(t3, exprBuf.toList) }
+ }
+ if (inPattern) stringCheese
+ else withPlaceholders(stringCheese, isAny = true) // strinterpolator params are Any* by definition
}
/* ------------- NEW LINES ------------------------------------------------- */
@@ -1190,12 +1275,12 @@ self =>
in.nextToken()
}
- def newLineOptWhenFollowedBy(token: Int) {
+ def newLineOptWhenFollowedBy(token: Offset) {
// note: next is defined here because current == NEWLINE
if (in.token == NEWLINE && in.next.token == token) newLineOpt()
}
- def newLineOptWhenFollowing(p: Int => Boolean) {
+ def newLineOptWhenFollowing(p: Token => Boolean) {
// note: next is defined here because current == NEWLINE
if (in.token == NEWLINE && p(in.next.token)) newLineOpt()
}
@@ -1210,7 +1295,7 @@ self =>
if (in.token == COLON) { in.nextToken(); typ() }
else TypeTree()
- def typeOrInfixType(location: Int): Tree =
+ def typeOrInfixType(location: Location): Tree =
if (location == Local) typ()
else startInfixType()
@@ -1221,7 +1306,7 @@ self =>
* WildcardType ::= `_' TypeBounds
* }}}
*/
- def wildcardType(start: Int) = {
+ def wildcardType(start: Offset) = {
val pname = freshTypeName("_$")
val t = atPos(start)(Ident(pname))
val bounds = typeBounds()
@@ -1232,15 +1317,6 @@ self =>
/* ----------- EXPRESSIONS ------------------------------------------------ */
- /** {{{
- * EqualsExpr ::= `=' Expr
- * }}}
- */
- def equalsExpr(): Tree = {
- accept(EQUALS)
- expr()
- }
-
def condExpr(): Tree = {
if (in.token == LPAREN) {
in.nextToken()
@@ -1249,14 +1325,14 @@ self =>
r
} else {
accept(LPAREN)
- Literal(Constant(true))
+ newLiteral(true)
}
}
/* hook for IDE, unlike expression can be stubbed
* don't use for any tree that can be inspected in the parser!
*/
- def statement(location: Int): Tree = expr(location) // !!! still needed?
+ def statement(location: Location): Tree = expr(location) // !!! still needed?
/** {{{
* Expr ::= (Bindings | [`implicit'] Id | `_') `=>' Expr
@@ -1283,27 +1359,16 @@ self =>
*/
def expr(): Tree = expr(Local)
- def expr(location: Int): Tree = {
- var savedPlaceholderParams = placeholderParams
- placeholderParams = List()
- var res = expr0(location)
- if (!placeholderParams.isEmpty && !isWildcard(res)) {
- res = atPos(res.pos){ Function(placeholderParams.reverse, res) }
- placeholderParams = List()
- }
- placeholderParams = placeholderParams ::: savedPlaceholderParams
- res
- }
+ def expr(location: Location): Tree = withPlaceholders(expr0(location), isAny = false)
-
- def expr0(location: Int): Tree = (in.token: @scala.annotation.switch) match {
+ def expr0(location: Location): Tree = (in.token: @scala.annotation.switch) match {
case IF =>
def parseIf = atPos(in.skipToken()) {
val cond = condExpr()
newLinesOpt()
val thenp = expr()
val elsep = if (in.token == ELSE) { in.nextToken(); expr() }
- else Literal(Constant())
+ else literalUnit
If(cond, thenp, elsep)
}
parseIf
@@ -1345,14 +1410,13 @@ self =>
parseWhile
case DO =>
def parseDo = {
- val start = in.offset
atPos(in.skipToken()) {
val lname: Name = freshTermName(nme.DO_WHILE_PREFIX)
val body = expr()
if (isStatSep) in.nextToken()
accept(WHILE)
val cond = condExpr()
- makeDoWhile(lname, body, cond)
+ makeDoWhile(lname.toTermName, body, cond)
}
}
parseDo
@@ -1365,9 +1429,9 @@ self =>
newLinesOpt()
if (in.token == YIELD) {
in.nextToken()
- makeForYield(enums, expr())
+ gen.mkFor(enums, gen.Yield(expr()))
} else {
- makeFor(enums, expr())
+ gen.mkFor(enums, expr())
}
}
def adjustStart(tree: Tree) =
@@ -1378,7 +1442,7 @@ self =>
case RETURN =>
def parseReturn =
atPos(in.skipToken()) {
- Return(if (isExprIntro) expr() else Literal(Constant()))
+ Return(if (isExprIntro) expr() else literalUnit)
}
parseReturn
case THROW =>
@@ -1395,7 +1459,7 @@ self =>
if (in.token == EQUALS) {
t match {
case Ident(_) | Select(_, _) | Apply(_, _) =>
- t = atPos(t.pos.startOrPoint, in.skipToken()) { makeAssign(t, expr()) }
+ t = atPos(t.pos.start, in.skipToken()) { gen.mkAssign(t, expr()) }
case _ =>
}
} else if (in.token == COLON) {
@@ -1406,16 +1470,16 @@ self =>
val uscorePos = in.skipToken()
if (isIdent && in.name == nme.STAR) {
in.nextToken()
- t = atPos(t.pos.startOrPoint, colonPos) {
+ t = atPos(t.pos.start, colonPos) {
Typed(t, atPos(uscorePos) { Ident(tpnme.WILDCARD_STAR) })
}
} else {
- syntaxErrorOrIncomplete("`*' expected", true)
+ syntaxErrorOrIncomplete("`*' expected", skipIt = true)
}
- } else if (in.token == AT) {
+ } else if (isAnnotation) {
t = (t /: annotations(skipNewLines = false))(makeAnnotated)
} else {
- t = atPos(t.pos.startOrPoint, colonPos) {
+ t = atPos(t.pos.start, colonPos) {
val tpt = typeOrInfixType(location)
if (isWildcard(t))
(placeholderParams: @unchecked) match {
@@ -1428,18 +1492,18 @@ self =>
}
}
} else if (in.token == MATCH) {
- t = atPos(t.pos.startOrPoint, in.skipToken())(Match(stripParens(t), inBracesOrNil(caseClauses())))
+ t = atPos(t.pos.start, in.skipToken())(Match(stripParens(t), inBracesOrNil(caseClauses())))
}
// in order to allow anonymous functions as statements (as opposed to expressions) inside
// templates, we have to disambiguate them from self type declarations - bug #1565
// The case still missed is unparenthesized single argument, like "x: Int => x + 1", which
// may be impossible to distinguish from a self-type and so remains an error. (See #1564)
def lhsIsTypedParamList() = t match {
- case Parens(xs) if xs forall (_.isInstanceOf[Typed]) => true
+ case Parens(xs) if xs.forall(isTypedParam) => true
case _ => false
}
if (in.token == ARROW && (location != InTemplate || lhsIsTypedParamList)) {
- t = atPos(t.pos.startOrPoint, in.skipToken()) {
+ t = atPos(t.pos.start, in.skipToken()) {
Function(convertToParams(t), if (location != InBlock) expr() else block())
}
}
@@ -1448,12 +1512,14 @@ self =>
parseOther
}
+ def isTypedParam(t: Tree) = t.isInstanceOf[Typed]
+
/** {{{
* Expr ::= implicit Id => Expr
* }}}
*/
- def implicitClosure(start: Int, location: Int): Tree = {
+ def implicitClosure(start: Offset, location: Location): Tree = {
val param0 = convertToParam {
atPos(in.offset) {
Ident(ident()) match {
@@ -1479,28 +1545,19 @@ self =>
def postfixExpr(): Tree = {
val start = in.offset
val base = opstack
- var top = prefixExpr()
- while (isIdent) {
- top = reduceStack(isExpr = true, base, top, precedence(in.name), leftAssoc = treeInfo.isLeftAssoc(in.name))
- val op = in.name
- opstack = OpInfo(top, op, in.offset) :: opstack
- ident()
+ def loop(top: Tree): Tree = if (!isIdent) top else {
+ pushOpInfo(reduceExprStack(base, top))
newLineOptWhenFollowing(isExprIntroToken)
- if (isExprIntro) {
- val next = prefixExpr()
- if (next == EmptyTree)
- return reduceStack(isExpr = true, base, top, 0, leftAssoc = true)
- top = next
- } else {
- // postfix expression
- val topinfo = opstack.head
- opstack = opstack.tail
- val od = stripParens(reduceStack(isExpr = true, base, topinfo.operand, 0, leftAssoc = true))
- return makePostfixSelect(start, topinfo.offset, od, topinfo.operator)
- }
+ if (isExprIntro)
+ prefixExpr() match {
+ case EmptyTree => reduceExprStack(base, top)
+ case next => loop(next)
+ }
+ else finishPostfixOp(start, base, popOpInfo())
}
- reduceStack(isExpr = true, base, top, 0, leftAssoc = true)
+
+ reduceExprStack(base, loop(prefixExpr()))
}
/** {{{
@@ -1510,7 +1567,7 @@ self =>
def prefixExpr(): Tree = {
if (isUnaryOp) {
atPos(in.offset) {
- val name = nme.toUnaryName(rawIdent())
+ val name = nme.toUnaryName(rawIdent().toTermName)
if (name == nme.UNARY_- && isNumericLit)
simpleExprRest(literal(isNegated = true), canApply = true)
else
@@ -1544,15 +1601,9 @@ self =>
case IDENTIFIER | BACKQUOTED_IDENT | THIS | SUPER =>
path(thisOK = true, typeOK = false)
case USCORE =>
- val start = in.offset
- val pname = freshName("x$")
- in.nextToken()
- val id = atPos(start) (Ident(pname))
- val param = atPos(id.pos.focus){ makeSyntheticParam(pname) }
- placeholderParams = param :: placeholderParams
- id
+ freshPlaceholder()
case LPAREN =>
- atPos(in.offset)(makeParens(commaSeparated(expr)))
+ atPos(in.offset)(makeParens(commaSeparated(expr())))
case LBRACE =>
canApply = false
blockExpr()
@@ -1561,12 +1612,11 @@ self =>
val nstart = in.skipToken()
val npos = r2p(nstart, nstart, in.lastOffset)
val tstart = in.offset
- val (parents, argss, self, stats) = template(isTrait = false)
+ val (parents, self, stats) = template()
val cpos = r2p(tstart, tstart, in.lastOffset max tstart)
- makeNew(parents, self, stats, argss, npos, cpos)
+ gen.mkNew(parents, self, stats, npos, cpos)
case _ =>
- syntaxErrorOrIncomplete("illegal start of simple expression", true)
- errorTermTree
+ syntaxErrorOrIncompleteAnd("illegal start of simple expression", skipIt = true)(errorTermTree)
}
simpleExprRest(t, canApply = canApply)
}
@@ -1583,14 +1633,14 @@ self =>
case Ident(_) | Select(_, _) | Apply(_, _) =>
var app: Tree = t1
while (in.token == LBRACKET)
- app = atPos(app.pos.startOrPoint, in.offset)(TypeApply(app, exprTypeArgs()))
+ app = atPos(app.pos.start, in.offset)(TypeApply(app, exprTypeArgs()))
simpleExprRest(app, canApply = true)
case _ =>
t1
}
case LPAREN | LBRACE if (canApply) =>
- val app = atPos(t.pos.startOrPoint, in.offset) {
+ val app = atPos(t.pos.start, in.offset) {
// look for anonymous function application like (f _)(x) and
// translate to (f _).apply(x), bug #460
val sel = t match {
@@ -1603,7 +1653,7 @@ self =>
}
simpleExprRest(app, canApply = true)
case USCORE =>
- atPos(t.pos.startOrPoint, in.skipToken()) {
+ atPos(t.pos.start, in.skipToken()) {
Typed(stripParens(t), Function(Nil, EmptyTree))
}
case _ =>
@@ -1617,14 +1667,9 @@ self =>
* }}}
*/
def argumentExprs(): List[Tree] = {
- def args(): List[Tree] = commaSeparated {
- val maybeNamed = isIdent
- expr() match {
- case a @ Assign(id, rhs) if maybeNamed =>
- atPos(a.pos) { AssignOrNamedArg(id, rhs) }
- case e => e
- }
- }
+ def args(): List[Tree] = commaSeparated(
+ if (isIdent) treeInfo.assignmentToMaybeNamedArg(expr()) else expr()
+ )
in.token match {
case LBRACE => List(blockExpr())
case LPAREN => inParens(if (in.token == RPAREN) Nil else args())
@@ -1655,13 +1700,16 @@ self =>
*/
def block(): Tree = makeBlock(blockStatSeq())
+ def caseClause(): CaseDef =
+ atPos(in.offset)(makeCaseDef(pattern(), guard(), caseBlock()))
+
/** {{{
* CaseClauses ::= CaseClause {CaseClause}
* CaseClause ::= case Pattern [Guard] `=>' Block
* }}}
*/
def caseClauses(): List[CaseDef] = {
- val cases = caseSeparated { atPos(in.offset)(makeCaseDef(pattern(), guard(), caseBlock())) }
+ val cases = caseSeparated { caseClause() }
if (cases.isEmpty) // trigger error if there are no cases
accept(CASE)
@@ -1687,22 +1735,25 @@ self =>
* | val Pattern1 `=' Expr
* }}}
*/
- def enumerators(): List[Enumerator] = {
- val enums = new ListBuffer[Enumerator]
- generator(enums, eqOK = false)
+ def enumerators(): List[Tree] = {
+ val enums = new ListBuffer[Tree]
+ enums ++= enumerator(isFirst = true)
while (isStatSep) {
in.nextToken()
- if (in.token == IF) enums += makeFilter(in.offset, guard())
- else generator(enums, eqOK = true)
+ enums ++= enumerator(isFirst = false)
}
enums.toList
}
+ def enumerator(isFirst: Boolean, allowNestedIf: Boolean = true): List[Tree] =
+ if (in.token == IF && !isFirst) makeFilter(in.offset, guard()) :: Nil
+ else generator(!isFirst, allowNestedIf)
+
/** {{{
* Generator ::= Pattern1 (`<-' | `=') Expr [Guard]
* }}}
*/
- def generator(enums: ListBuffer[Enumerator], eqOK: Boolean) {
+ def generator(eqOK: Boolean, allowNestedIf: Boolean = true): List[Tree] = {
val start = in.offset
val hasVal = in.token == VAL
if (hasVal)
@@ -1720,13 +1771,22 @@ self =>
if (hasEq && eqOK) in.nextToken()
else accept(LARROW)
val rhs = expr()
- enums += makeGenerator(r2p(start, point, in.lastOffset max start), pat, hasEq, rhs)
- // why max above? IDE stress tests have shown that lastOffset could be less than start,
+
+ def loop(): List[Tree] =
+ if (in.token != IF) Nil
+ else makeFilter(in.offset, guard()) :: loop()
+
+ val tail =
+ if (allowNestedIf) loop()
+ else Nil
+
+ // why max? IDE stress tests have shown that lastOffset could be less than start,
// I guess this happens if instead if a for-expression we sit on a closing paren.
- while (in.token == IF) enums += makeFilter(in.offset, guard())
+ val genPos = r2p(start, point, in.lastOffset max start)
+ gen.mkGenerator(genPos, pat, hasEq, rhs) :: tail
}
- def makeFilter(start: Int, tree: Tree) = Filter(r2p(start, tree.pos.point, tree.pos.endOrPoint), tree)
+ def makeFilter(start: Offset, tree: Tree) = gen.Filter(tree).setPos(r2p(start, tree.pos.point, tree.pos.end))
/* -------- PATTERNS ------------------------------------------- */
@@ -1749,10 +1809,12 @@ self =>
in.nextToken()
if (in.token == SUBTYPE || in.token == SUPERTYPE) wildcardType(start)
else atPos(start) { Bind(tpnme.WILDCARD, EmptyTree) }
- case IDENTIFIER if nme.isVariableName(in.name) =>
- atPos(start) { Bind(identForType(), EmptyTree) }
case _ =>
- typ()
+ typ() match {
+ case Ident(name: TypeName) if nme.isVariableName(name) =>
+ atPos(start) { Bind(name, EmptyTree) }
+ case t => t
+ }
}
}
@@ -1792,7 +1854,7 @@ self =>
def pattern1(): Tree = pattern2() match {
case p @ Ident(name) if in.token == COLON =>
if (treeInfo.isVarPattern(p))
- atPos(p.pos.startOrPoint, in.skipToken())(Typed(p, compoundType()))
+ atPos(p.pos.start, in.skipToken())(Typed(p, compoundType()))
else {
syntaxError(in.offset, "Pattern variables must start with a lower-case letter. (SLS 8.1.1.)")
p
@@ -1808,7 +1870,6 @@ self =>
* }}}
*/
def pattern2(): Tree = {
- val nameOffset = in.offset
val p = pattern3()
if (in.token != AT) p
@@ -1818,7 +1879,7 @@ self =>
pattern3()
case Ident(name) if treeInfo.isVarPattern(p) =>
in.nextToken()
- atPos(p.pos.startOrPoint) { Bind(name, pattern3()) }
+ atPos(p.pos.start) { Bind(name, pattern3()) }
case _ => p
}
}
@@ -1829,71 +1890,51 @@ self =>
* }}}
*/
def pattern3(): Tree = {
- var top = simplePattern(badPattern3)
- // after peekahead
- def acceptWildStar() = atPos(top.pos.startOrPoint, in.prev.offset)(Star(stripParens(top)))
- def peekahead() = {
- in.prev copyFrom in
- in.nextToken()
- }
- def pushback() = {
- in.next copyFrom in
- in copyFrom in.prev
- }
+ val top = simplePattern(badPattern3)
+ val base = opstack
// See SI-3189, SI-4832 for motivation. Cf SI-3480 for counter-motivation.
- // TODO: dredge out the remnants of regexp patterns.
- // /{/ peek for _*) or _*} (for xml escape)
- if (isSequenceOK) {
- top match {
- case Ident(nme.WILDCARD) if (isRawStar) =>
- peekahead()
- in.token match {
- case RBRACE if (isXML) => return acceptWildStar()
- case RPAREN if (!isXML) => return acceptWildStar()
- case _ => pushback()
- }
- case _ =>
- }
+ def isCloseDelim = in.token match {
+ case RBRACE => isXML
+ case RPAREN => !isXML
+ case _ => false
}
- val base = opstack
- while (isIdent && in.name != raw.BAR) {
- top = reduceStack(isExpr = false, base, top, precedence(in.name), leftAssoc = treeInfo.isLeftAssoc(in.name))
- val op = in.name
- opstack = OpInfo(top, op, in.offset) :: opstack
- ident()
- top = simplePattern(badPattern3)
+ def checkWildStar: Tree = top match {
+ case Ident(nme.WILDCARD) if isSequenceOK && isRawStar => peekingAhead (
+ if (isCloseDelim) atPos(top.pos.start, in.prev.offset)(Star(stripParens(top)))
+ else EmptyTree
+ )
+ case _ => EmptyTree
}
- stripParens(reduceStack(isExpr = false, base, top, 0, leftAssoc = true))
+ def loop(top: Tree): Tree = reducePatternStack(base, top) match {
+ case next if isIdentExcept(raw.BAR) => pushOpInfo(next) ; loop(simplePattern(badPattern3))
+ case next => next
+ }
+ checkWildStar orElse stripParens(loop(top))
}
+
def badPattern3(): Tree = {
- def isComma = in.token == COMMA
- def isAnyBrace = in.token == RPAREN || in.token == RBRACE
- val badStart = "illegal start of simple pattern"
+ def isComma = in.token == COMMA
+ def isDelimiter = in.token == RPAREN || in.token == RBRACE
+ def isCommaOrDelimiter = isComma || isDelimiter
+ val (isUnderscore, isStar) = opstack match {
+ case OpInfo(Ident(nme.WILDCARD), nme.STAR, _, _) :: _ => (true, true)
+ case OpInfo(_, nme.STAR, _, _) :: _ => (false, true)
+ case _ => (false, false)
+ }
+ def isSeqPatternClose = isUnderscore && isStar && isSequenceOK && isDelimiter
+ val preamble = "bad simple pattern:"
+ val subtext = (isUnderscore, isStar, isSequenceOK) match {
+ case (true, true, true) if isComma => "bad use of _* (a sequence pattern must be the last pattern)"
+ case (true, true, true) if isDelimiter => "bad brace or paren after _*"
+ case (true, true, false) if isDelimiter => "bad use of _* (sequence pattern not allowed)"
+ case (false, true, true) if isDelimiter => "use _* to match a sequence"
+ case (false, true, _) if isCommaOrDelimiter => "trailing * is not a valid pattern"
+ case _ => null
+ }
+ val msg = if (subtext != null) s"$preamble $subtext" else "illegal start of simple pattern"
// better recovery if don't skip delims of patterns
- var skip = !(isComma || isAnyBrace)
- val msg = if (!opstack.isEmpty && opstack.head.operator == nme.STAR) {
- opstack.head.operand match {
- case Ident(nme.WILDCARD) =>
- if (isSequenceOK && isComma)
- "bad use of _* (a sequence pattern must be the last pattern)"
- else if (isSequenceOK && isAnyBrace) {
- skip = true // do skip bad paren; scanner may skip bad brace already
- "bad brace or paren after _*"
- } else if (!isSequenceOK && isAnyBrace)
- "bad use of _* (sequence pattern not allowed)"
- else badStart
- case _ =>
- if (isSequenceOK && isAnyBrace)
- "use _* to match a sequence"
- else if (isComma || isAnyBrace)
- "trailing * is not a valid pattern"
- else badStart
- }
- } else {
- badStart
- }
- syntaxErrorOrIncomplete(msg, skip)
- errorPatternTree
+ val skip = !isCommaOrDelimiter || isSeqPatternClose
+ syntaxErrorOrIncompleteAnd(msg, skip)(errorPatternTree)
}
/** {{{
@@ -1909,19 +1950,15 @@ self =>
*
* XXX: Hook for IDE
*/
- def simplePattern(): Tree = {
+ def simplePattern(): Tree = (
// simple diagnostics for this entry point
- def badStart(): Tree = {
- syntaxErrorOrIncomplete("illegal start of simple pattern", true)
- errorPatternTree
- }
- simplePattern(badStart)
- }
+ simplePattern(() => syntaxErrorOrIncompleteAnd("illegal start of simple pattern", skipIt = true)(errorPatternTree))
+ )
def simplePattern(onError: () => Tree): Tree = {
val start = in.offset
in.token match {
case IDENTIFIER | BACKQUOTED_IDENT | THIS =>
- var t = stableId()
+ val t = stableId()
in.token match {
case INTLIT | LONGLIT | FLOATLIT | DOUBLELIT =>
t match {
@@ -1983,7 +2020,6 @@ self =>
/** Default entry points into some pattern contexts. */
def pattern(): Tree = noSeq.pattern()
- def patterns(): List[Tree] = noSeq.patterns()
def seqPatterns(): List[Tree] = seqOK.patterns()
def xmlSeqPatterns(): List[Tree] = xmlSeqOK.patterns() // Called from xml parser
def argumentPatterns(): List[Tree] = inParens {
@@ -1997,16 +2033,16 @@ self =>
/** Drop `private` modifier when followed by a qualifier.
* Contract `abstract` and `override` to ABSOVERRIDE
*/
- private def normalize(mods: Modifiers): Modifiers =
+ private def normalizeModifers(mods: Modifiers): Modifiers =
if (mods.isPrivate && mods.hasAccessBoundary)
- normalize(mods &~ Flags.PRIVATE)
+ normalizeModifers(mods &~ Flags.PRIVATE)
else if (mods hasAllFlags (Flags.ABSTRACT | Flags.OVERRIDE))
- normalize(mods &~ (Flags.ABSTRACT | Flags.OVERRIDE) | Flags.ABSOVERRIDE)
+ normalizeModifers(mods &~ (Flags.ABSTRACT | Flags.OVERRIDE) | Flags.ABSOVERRIDE)
else
mods
private def addMod(mods: Modifiers, mod: Long, pos: Position): Modifiers = {
- if (mods hasFlag mod) syntaxError(in.offset, "repeated modifier", false)
+ if (mods hasFlag mod) syntaxError(in.offset, "repeated modifier", skipIt = false)
in.nextToken()
(mods | mod) withPosition (mod, pos)
}
@@ -2023,7 +2059,7 @@ self =>
if (in.token == LBRACKET) {
in.nextToken()
if (mods.hasAccessBoundary)
- syntaxError("duplicate private/protected qualifier", false)
+ syntaxError("duplicate private/protected qualifier", skipIt = false)
result = if (in.token == THIS) { in.nextToken(); mods | Flags.LOCAL }
else Modifiers(mods.flags, identForType())
accept(RBRACKET)
@@ -2046,7 +2082,7 @@ self =>
* AccessModifier ::= (private | protected) [AccessQualifier]
* }}}
*/
- def accessModifierOpt(): Modifiers = normalize {
+ def accessModifierOpt(): Modifiers = normalizeModifers {
in.token match {
case m @ (PRIVATE | PROTECTED) => in.nextToken() ; accessQualifierOpt(Modifiers(flagTokens(m)))
case _ => NoMods
@@ -2060,7 +2096,7 @@ self =>
* | override
* }}}
*/
- def modifiers(): Modifiers = normalize {
+ def modifiers(): Modifiers = normalizeModifers {
def loop(mods: Modifiers): Modifiers = in.token match {
case PRIVATE | PROTECTED =>
loop(accessQualifierOpt(addMod(mods, flagTokens(in.token), tokenRange(in))))
@@ -2105,7 +2141,7 @@ self =>
def annotationExpr(): Tree = atPos(in.offset) {
val t = exprSimpleType()
if (in.token == LPAREN) New(t, multipleArgumentExprs())
- else New(t, ListOfNil)
+ else New(t, Nil)
}
/* -------- PARAMETERS ------------------------------------------- */
@@ -2124,56 +2160,6 @@ self =>
def paramClauses(owner: Name, contextBounds: List[Tree], ofCaseClass: Boolean): List[List[ValDef]] = {
var implicitmod = 0
var caseParam = ofCaseClass
- def param(): ValDef = {
- val start = in.offset
- val annots = annotations(skipNewLines = false)
- var mods = Modifiers(Flags.PARAM)
- if (owner.isTypeName) {
- mods = modifiers() | Flags.PARAMACCESSOR
- if (mods.isLazy) syntaxError("lazy modifier not allowed here. Use call-by-name parameters instead", false)
- in.token match {
- case v @ (VAL | VAR) =>
- mods = mods withPosition (in.token, tokenRange(in))
- if (v == VAR) mods |= Flags.MUTABLE
- in.nextToken()
- case _ =>
- if (mods.flags != Flags.PARAMACCESSOR) accept(VAL)
- if (!caseParam) mods |= Flags.PrivateLocal
- }
- if (caseParam) mods |= Flags.CASEACCESSOR
- }
- val nameOffset = in.offset
- val name = ident()
- var bynamemod = 0
- val tpt =
- if (settings.YmethodInfer.value && !owner.isTypeName && in.token != COLON) {
- TypeTree()
- } else { // XX-METHOD-INFER
- accept(COLON)
- if (in.token == ARROW) {
- if (owner.isTypeName && !mods.hasLocalFlag)
- syntaxError(
- in.offset,
- (if (mods.isMutable) "`var'" else "`val'") +
- " parameters may not be call-by-name", false)
- else if (implicitmod != 0)
- syntaxError(
- in.offset,
- "implicit parameters may not be call-by-name", false)
- else bynamemod = Flags.BYNAMEPARAM
- }
- paramType()
- }
- val default =
- if (in.token == EQUALS) {
- in.nextToken()
- mods |= Flags.DEFAULTPARAM
- expr()
- } else EmptyTree
- atPos(start, if (name == nme.ERROR) start else nameOffset) {
- ValDef((mods | implicitmod | bynamemod) withAnnotations annots, name, tpt, default)
- }
- }
def paramClause(): List[ValDef] = {
if (in.token == RPAREN)
return Nil
@@ -2182,14 +2168,14 @@ self =>
in.nextToken()
implicitmod = Flags.IMPLICIT
}
- commaSeparated(param())
+ commaSeparated(param(owner, implicitmod, caseParam ))
}
val vds = new ListBuffer[List[ValDef]]
val start = in.offset
newLineOptWhenFollowedBy(LPAREN)
if (ofCaseClass && in.token != LPAREN)
- deprecationWarning(in.lastOffset, "case classes without a parameter list have been deprecated;\n"+
- "use either case objects or case classes with `()' as parameter list.")
+ syntaxError(in.lastOffset, "case classes without a parameter list are not allowed;\n"+
+ "use either case objects or case classes with an explicit `()' as a parameter list.")
while (implicitmod == 0 && in.token == LPAREN) {
in.nextToken()
vds += paramClause()
@@ -2200,9 +2186,9 @@ self =>
val result = vds.toList
if (owner == nme.CONSTRUCTOR && (result.isEmpty || (result.head take 1 exists (_.mods.isImplicit)))) {
in.token match {
- case LBRACKET => syntaxError(in.offset, "no type parameters allowed here", false)
+ case LBRACKET => syntaxError(in.offset, "no type parameters allowed here", skipIt = false)
case EOF => incompleteInputError("auxiliary constructor needs non-implicit parameter list")
- case _ => syntaxError(start, "auxiliary constructor needs non-implicit parameter list", false)
+ case _ => syntaxError(start, "auxiliary constructor needs non-implicit parameter list", skipIt = false)
}
}
addEvidenceParams(owner, result, contextBounds)
@@ -2224,12 +2210,63 @@ self =>
if (isRawStar) {
in.nextToken()
if (useStartAsPosition) atPos(start)(repeatedApplication(t))
- else atPos(t.pos.startOrPoint, t.pos.point)(repeatedApplication(t))
+ else atPos(t.pos.start, t.pos.point)(repeatedApplication(t))
}
else t
}
}
+ def param(owner: Name, implicitmod: Int, caseParam: Boolean): ValDef = {
+ val start = in.offset
+ val annots = annotations(skipNewLines = false)
+ var mods = Modifiers(Flags.PARAM)
+ if (owner.isTypeName) {
+ mods = modifiers() | Flags.PARAMACCESSOR
+ if (mods.isLazy) syntaxError("lazy modifier not allowed here. Use call-by-name parameters instead", skipIt = false)
+ in.token match {
+ case v @ (VAL | VAR) =>
+ mods = mods withPosition (in.token.toLong, tokenRange(in))
+ if (v == VAR) mods |= Flags.MUTABLE
+ in.nextToken()
+ case _ =>
+ if (mods.flags != Flags.PARAMACCESSOR) accept(VAL)
+ if (!caseParam) mods |= Flags.PrivateLocal
+ }
+ if (caseParam) mods |= Flags.CASEACCESSOR
+ }
+ val nameOffset = in.offset
+ val name = ident()
+ var bynamemod = 0
+ val tpt =
+ if ((settings.YmethodInfer && !owner.isTypeName) && in.token != COLON) {
+ TypeTree()
+ } else { // XX-METHOD-INFER
+ accept(COLON)
+ if (in.token == ARROW) {
+ if (owner.isTypeName && !mods.isLocalToThis)
+ syntaxError(
+ in.offset,
+ (if (mods.isMutable) "`var'" else "`val'") +
+ " parameters may not be call-by-name", skipIt = false)
+ else if (implicitmod != 0)
+ syntaxError(
+ in.offset,
+ "implicit parameters may not be call-by-name", skipIt = false)
+ else bynamemod = Flags.BYNAMEPARAM
+ }
+ paramType()
+ }
+ val default =
+ if (in.token == EQUALS) {
+ in.nextToken()
+ mods |= Flags.DEFAULTPARAM
+ expr()
+ } else EmptyTree
+ atPos(start, if (name == nme.ERROR) start else nameOffset) {
+ ValDef((mods | implicitmod.toLong | bynamemod) withAnnotations annots, name.toTermName, tpt, default)
+ }
+ }
+
/** {{{
* TypeParamClauseOpt ::= [TypeParamClause]
* TypeParamClause ::= `[' VariantTypeParam {`,' VariantTypeParam} `]']
@@ -2253,7 +2290,7 @@ self =>
}
}
val nameOffset = in.offset
- // TODO AM: freshName(o2p(in.skipToken()), "_$$"), will need to update test suite
+ // TODO AM: freshTermName(o2p(in.skipToken()), "_$$"), will need to update test suite
val pname: TypeName = wildcardOrIdent().toTypeName
val param = atPos(start, nameOffset) {
val tparams = typeParamClauseOpt(pname, null) // @M TODO null --> no higher-order context bounds for now
@@ -2261,9 +2298,10 @@ self =>
}
if (contextBoundBuf ne null) {
while (in.token == VIEWBOUND) {
- contextBoundBuf += atPos(in.skipToken()) {
- makeFunctionTypeTree(List(Ident(pname)), typ())
- }
+ val msg = "Use an implicit parameter instead.\nExample: Instead of `def f[A <% Int](a: A)` use `def f[A](a: A)(implicit ev: A => Int)`."
+ if (settings.future)
+ deprecationWarning(in.offset, s"View bounds are deprecated. $msg")
+ contextBoundBuf += atPos(in.skipToken())(makeFunctionTypeTree(List(Ident(pname)), typ()))
}
while (in.token == COLON) {
contextBoundBuf += atPos(in.skipToken()) {
@@ -2283,16 +2321,18 @@ self =>
* }}}
*/
def typeBounds(): TypeBoundsTree = {
- val t = TypeBoundsTree(
- bound(SUPERTYPE, tpnme.Nothing),
- bound(SUBTYPE, tpnme.Any)
- )
- t setPos wrappingPos(List(t.hi, t.lo))
+ val lo = bound(SUPERTYPE)
+ val hi = bound(SUBTYPE)
+ val t = TypeBoundsTree(lo, hi)
+ val defined = List(t.hi, t.lo) filter (_.pos.isDefined)
+
+ if (defined.nonEmpty)
+ t setPos wrappingPos(defined)
+ else
+ t setPos o2p(in.offset)
}
- def bound(tok: Int, default: TypeName): Tree =
- if (in.token == tok) { in.nextToken(); typ() }
- else atPos(o2p(in.lastOffset)) { rootScalaDot(default) }
+ def bound(tok: Token): Tree = if (in.token == tok) { in.nextToken(); typ() } else EmptyTree
/* -------- DEFS ------------------------------------------- */
@@ -2326,8 +2366,8 @@ self =>
accept(DOT)
result
}
- /** Walks down import `foo.bar.baz.{ ... }` until it ends at a
- * an underscore, a left brace, or an undotted identifier.
+ /* Walks down import `foo.bar.baz.{ ... }` until it ends at a
+ * an underscore, a left brace, or an undotted identifier.
*/
def loop(expr: Tree): Tree = {
expr setPos expr.pos.makeTransparent
@@ -2411,9 +2451,9 @@ self =>
* | type [nl] TypeDcl
* }}}
*/
- def defOrDcl(pos: Int, mods: Modifiers): List[Tree] = {
+ def defOrDcl(pos: Offset, mods: Modifiers): List[Tree] = {
if (mods.isLazy && in.token != VAL)
- syntaxError("lazy not allowed here. Only vals can be lazy", false)
+ syntaxError("lazy not allowed here. Only vals can be lazy", skipIt = false)
in.token match {
case VAL =>
patDefOrDcl(pos, mods withPosition(VAL, tokenRange(in)))
@@ -2462,17 +2502,15 @@ self =>
EmptyTree
}
def mkDefs(p: Tree, tp: Tree, rhs: Tree): List[Tree] = {
- //Console.println("DEBUG: p = "+p.toString()); // DEBUG
- val trees =
- makePatDef(newmods,
- if (tp.isEmpty) p
- else Typed(p, tp) setPos (p.pos union tp.pos),
- rhs)
+ val trees = {
+ val pat = if (tp.isEmpty) p else Typed(p, tp) setPos (p.pos union tp.pos)
+ makePatDef(newmods, pat, rhs)
+ }
if (newmods.isDeferred) {
trees match {
case List(ValDef(_, _, _, EmptyTree)) =>
- if (mods.isLazy) syntaxError(p.pos, "lazy values may not be abstract", false)
- case _ => syntaxError(p.pos, "pattern definition may not be abstract", false)
+ if (mods.isLazy) syntaxError(p.pos, "lazy values may not be abstract", skipIt = false)
+ case _ => syntaxError(p.pos, "pattern definition may not be abstract", skipIt = false)
}
}
trees
@@ -2522,7 +2560,7 @@ self =>
* }}}
*/
def funDefOrDcl(start : Int, mods: Modifiers): Tree = {
- in.nextToken
+ in.nextToken()
if (in.token == THIS) {
atPos(start, in.skipToken()) {
val vparamss = paramClauses(nme.CONSTRUCTOR, classContextBounds map (_.duplicate), ofCaseClass = false)
@@ -2536,12 +2574,12 @@ self =>
}
else {
val nameOffset = in.offset
- val name = ident()
+ val name = identOrMacro()
funDefRest(start, nameOffset, mods, name)
}
}
- def funDefRest(start: Int, nameOffset: Int, mods: Modifiers, name: Name): Tree = {
+ def funDefRest(start: Offset, nameOffset: Offset, mods: Modifiers, name: Name): Tree = {
val result = atPos(start, if (name.toTermName == nme.ERROR) start else nameOffset) {
var newmods = mods
// contextBoundBuf is for context bounded type parameters of the form
@@ -2554,16 +2592,22 @@ self =>
var restype = fromWithinReturnType(typedOpt())
val rhs =
if (isStatSep || in.token == RBRACE) {
- if (restype.isEmpty) restype = scalaUnitConstr
+ if (restype.isEmpty) {
+ if (settings.future)
+ deprecationWarning(in.lastOffset, s"Procedure syntax is deprecated. Convert procedure `$name` to method by adding `: Unit`.")
+ restype = scalaUnitConstr
+ }
newmods |= Flags.DEFERRED
EmptyTree
} else if (restype.isEmpty && in.token == LBRACE) {
+ if (settings.future)
+ deprecationWarning(in.offset, s"Procedure syntax is deprecated. Convert procedure `$name` to method by adding `: Unit =`.")
restype = scalaUnitConstr
blockExpr()
} else {
if (in.token == EQUALS) {
in.nextTokenAllow(nme.MACROkw)
- if (in.token == IDENTIFIER && in.name == nme.MACROkw) {
+ if (isMacro) {
in.nextToken()
newmods |= Flags.MACRO
}
@@ -2572,7 +2616,7 @@ self =>
}
expr()
}
- DefDef(newmods, name, tparams, vparamss, restype, rhs)
+ DefDef(newmods, name.toTermName, tparams, vparamss, restype, rhs)
}
signalParseProgress(result.pos)
result
@@ -2585,7 +2629,7 @@ self =>
*/
def constrExpr(vparamss: List[List[ValDef]]): Tree =
if (in.token == LBRACE) constrBlock(vparamss)
- else Block(List(selfInvocation(vparamss)), Literal(Constant()))
+ else Block(selfInvocation(vparamss) :: Nil, literalUnit)
/** {{{
* SelfInvocation ::= this ArgumentExprs {ArgumentExprs}
@@ -2615,7 +2659,7 @@ self =>
else Nil
}
accept(RBRACE)
- Block(stats, Literal(Constant()))
+ Block(stats, literalUnit)
}
/** {{{
@@ -2624,11 +2668,10 @@ self =>
* TypeDcl ::= type Id [TypeParamClause] TypeBounds
* }}}
*/
- def typeDefOrDcl(start: Int, mods: Modifiers): Tree = {
+ def typeDefOrDcl(start: Offset, mods: Modifiers): Tree = {
in.nextToken()
newLinesOpt()
atPos(start, in.offset) {
- val nameOffset = in.offset
val name = identForType()
// @M! a type alias as well as an abstract type may declare type parameters
val tparams = typeParamClauseOpt(name, null)
@@ -2636,11 +2679,10 @@ self =>
case EQUALS =>
in.nextToken()
TypeDef(mods, name, tparams, typ())
- case SUPERTYPE | SUBTYPE | SEMI | NEWLINE | NEWLINES | COMMA | RBRACE =>
+ case t if t == SUPERTYPE || t == SUBTYPE || t == COMMA || t == RBRACE || isStatSep(t) =>
TypeDef(mods | Flags.DEFERRED, name, tparams, typeBounds())
case _ =>
- syntaxErrorOrIncomplete("`=', `>:', or `<:' expected", true)
- EmptyTree
+ syntaxErrorOrIncompleteAnd("`=', `>:', or `<:' expected", skipIt = true)(EmptyTree)
}
}
}
@@ -2659,8 +2701,8 @@ self =>
* | [override] trait TraitDef
* }}}
*/
- def tmplDef(pos: Int, mods: Modifiers): Tree = {
- if (mods.isLazy) syntaxError("classes cannot be lazy", false)
+ def tmplDef(pos: Offset, mods: Modifiers): Tree = {
+ if (mods.isLazy) syntaxError("classes cannot be lazy", skipIt = false)
in.token match {
case TRAIT =>
classDef(pos, (mods | Flags.TRAIT | Flags.ABSTRACT) withPosition (Flags.TRAIT, tokenRange(in)))
@@ -2673,8 +2715,7 @@ self =>
case CASEOBJECT =>
objectDef(pos, (mods | Flags.CASE) withPosition (Flags.CASE, tokenRange(in.prev /*scanner skips on 'case' to 'object', thus take prev*/)))
case _ =>
- syntaxErrorOrIncomplete("expected start of definition", true)
- EmptyTree
+ syntaxErrorOrIncompleteAnd("expected start of definition", skipIt = true)(EmptyTree)
}
}
@@ -2684,8 +2725,8 @@ self =>
* TraitDef ::= Id [TypeParamClause] RequiresTypeOpt TraitTemplateOpt
* }}}
*/
- def classDef(start: Int, mods: Modifiers): ClassDef = {
- in.nextToken
+ def classDef(start: Offset, mods: Modifiers): ClassDef = {
+ in.nextToken()
val nameOffset = in.offset
val name = identForType()
atPos(start, if (name == tpnme.ERROR) start else nameOffset) {
@@ -2693,12 +2734,13 @@ self =>
val contextBoundBuf = new ListBuffer[Tree]
val tparams = typeParamClauseOpt(name, contextBoundBuf)
classContextBounds = contextBoundBuf.toList
- val tstart = (in.offset :: classContextBounds.map(_.pos.startOrPoint)).min
+ val tstart = (in.offset :: classContextBounds.map(_.pos.start)).min
if (!classContextBounds.isEmpty && mods.isTrait) {
- syntaxError("traits cannot have type parameters with context bounds `: ...' nor view bounds `<% ...'", false)
+ val viewBoundsExist = if (settings.future) "" else " nor view bounds `<% ...'"
+ syntaxError(s"traits cannot have type parameters with context bounds `: ...'$viewBoundsExist", skipIt = false)
classContextBounds = List()
}
- val constrAnnots = constructorAnnotations()
+ val constrAnnots = if (!mods.isTrait) constructorAnnotations() else Nil
val (constrMods, vparamss) =
if (mods.isTrait) (Modifiers(Flags.TRAIT), List())
else (accessModifierOpt(), paramClauses(name, classContextBounds, ofCaseClass = mods.isCase))
@@ -2706,11 +2748,10 @@ self =>
if (mods.isTrait) {
if (settings.YvirtClasses && in.token == SUBTYPE) mods1 |= Flags.DEFERRED
} else if (in.token == SUBTYPE) {
- syntaxError("classes are not allowed to be virtual", false)
+ syntaxError("classes are not allowed to be virtual", skipIt = false)
}
val template = templateOpt(mods1, name, constrMods withAnnotations constrAnnots, vparamss, tstart)
- if (isInterface(mods1, template.body)) mods1 |= Flags.INTERFACE
- val result = ClassDef(mods1, name, tparams, template)
+ val result = gen.mkClassDef(mods1, name, tparams, template)
// Context bounds generate implicit parameters (part of the template) with types
// from tparams: we need to ensure these don't overlap
if (!classContextBounds.isEmpty)
@@ -2724,37 +2765,69 @@ self =>
* ObjectDef ::= Id ClassTemplateOpt
* }}}
*/
- def objectDef(start: Int, mods: Modifiers): ModuleDef = {
- in.nextToken
+ def objectDef(start: Offset, mods: Modifiers): ModuleDef = {
+ in.nextToken()
val nameOffset = in.offset
val name = ident()
val tstart = in.offset
atPos(start, if (name == nme.ERROR) start else nameOffset) {
val mods1 = if (in.token == SUBTYPE) mods | Flags.DEFERRED else mods
val template = templateOpt(mods1, name, NoMods, Nil, tstart)
- ModuleDef(mods1, name, template)
+ ModuleDef(mods1, name.toTermName, template)
}
}
+ /** Create a tree representing a package object, converting
+ * {{{
+ * package object foo { ... }
+ * }}}
+ * to
+ * {{{
+ * package foo {
+ * object `package` { ... }
+ * }
+ * }}}
+ */
+ def packageObjectDef(start: Offset): PackageDef = {
+ val defn = objectDef(in.offset, NoMods)
+ val pidPos = o2p(defn.pos.startOrPoint)
+ val pkgPos = r2p(start, pidPos.point)
+ gen.mkPackageObject(defn, pidPos, pkgPos)
+ }
+ def packageOrPackageObject(start: Offset): Tree = (
+ if (in.token == OBJECT)
+ joinComment(packageObjectDef(start) :: Nil).head
+ else {
+ in.flushDoc
+ makePackaging(start, pkgQualId(), inBracesOrNil(topStatSeq()))
+ }
+ )
+ // TODO - eliminate this and use "def packageObjectDef" (see call site of this
+ // method for small elaboration.)
+ def makePackageObject(start: Offset, objDef: ModuleDef): PackageDef = objDef match {
+ case ModuleDef(mods, name, impl) =>
+ makePackaging(
+ start, atPos(o2p(objDef.pos.start)){ Ident(name) }, List(ModuleDef(mods, nme.PACKAGEkw, impl)))
+ }
+
/** {{{
* ClassParents ::= AnnotType {`(' [Exprs] `)'} {with AnnotType}
* TraitParents ::= AnnotType {with AnnotType}
* }}}
*/
- def templateParents(isTrait: Boolean): (List[Tree], List[List[Tree]]) = {
- val parents = new ListBuffer[Tree] += startAnnotType()
- val argss = (
- // TODO: the insertion of ListOfNil here is where "new Foo" becomes
- // indistinguishable from "new Foo()".
- if (in.token == LPAREN && !isTrait) multipleArgumentExprs()
- else ListOfNil
- )
-
- while (in.token == WITH) {
- in.nextToken()
- parents += startAnnotType()
+ def templateParents(): List[Tree] = {
+ val parents = new ListBuffer[Tree]
+ def readAppliedParent() = {
+ val start = in.offset
+ val parent = startAnnotType()
+ parents += (in.token match {
+ case LPAREN => atPos(start)((parent /: multipleArgumentExprs())(Apply.apply))
+ case _ => parent
+ })
}
- (parents.toList, argss)
+ readAppliedParent()
+ while (in.token == WITH) { in.nextToken(); readAppliedParent() }
+ parents.toList
}
/** {{{
@@ -2764,79 +2837,75 @@ self =>
* EarlyDef ::= Annotations Modifiers PatDef
* }}}
*/
- def template(isTrait: Boolean): (List[Tree], List[List[Tree]], ValDef, List[Tree]) = {
+ def template(): (List[Tree], ValDef, List[Tree]) = {
newLineOptWhenFollowedBy(LBRACE)
if (in.token == LBRACE) {
// @S: pre template body cannot stub like post body can!
val (self, body) = templateBody(isPre = true)
- if (in.token == WITH && self.isEmpty) {
- val earlyDefs: List[Tree] = body flatMap {
- case vdef @ ValDef(mods, _, _, _) if !mods.isDeferred =>
- List(copyValDef(vdef)(mods = mods | Flags.PRESUPER))
- case tdef @ TypeDef(mods, name, tparams, rhs) =>
- List(treeCopy.TypeDef(tdef, mods | Flags.PRESUPER, name, tparams, rhs))
- case docdef @ DocDef(comm, rhs) =>
- List(treeCopy.DocDef(docdef, comm, rhs))
- case stat if !stat.isEmpty =>
- syntaxError(stat.pos, "only type definitions and concrete field definitions allowed in early object initialization section", false)
- List()
- case _ => List()
- }
+ if (in.token == WITH && (self eq noSelfType)) {
+ val earlyDefs: List[Tree] = body.map(ensureEarlyDef).filter(_.nonEmpty)
in.nextToken()
- val (parents, argss) = templateParents(isTrait = isTrait)
- val (self1, body1) = templateBodyOpt(traitParentSeen = isTrait)
- (parents, argss, self1, earlyDefs ::: body1)
+ val parents = templateParents()
+ val (self1, body1) = templateBodyOpt(parenMeansSyntaxError = false)
+ (parents, self1, earlyDefs ::: body1)
} else {
- (List(), ListOfNil, self, body)
+ (List(), self, body)
}
} else {
- val (parents, argss) = templateParents(isTrait = isTrait)
- val (self, body) = templateBodyOpt(traitParentSeen = isTrait)
- (parents, argss, self, body)
- }
+ val parents = templateParents()
+ val (self, body) = templateBodyOpt(parenMeansSyntaxError = false)
+ (parents, self, body)
+ }
+ }
+
+ def ensureEarlyDef(tree: Tree): Tree = tree match {
+ case vdef @ ValDef(mods, _, _, _) if !mods.isDeferred =>
+ copyValDef(vdef)(mods = mods | Flags.PRESUPER)
+ case tdef @ TypeDef(mods, name, tparams, rhs) =>
+ deprecationWarning(tdef.pos.point, "early type members are deprecated. Move them to the regular body: the semantics are the same.")
+ treeCopy.TypeDef(tdef, mods | Flags.PRESUPER, name, tparams, rhs)
+ case docdef @ DocDef(comm, rhs) =>
+ treeCopy.DocDef(docdef, comm, rhs)
+ case stat if !stat.isEmpty =>
+ syntaxError(stat.pos, "only concrete field definitions allowed in early object initialization section", skipIt = false)
+ EmptyTree
+ case _ =>
+ EmptyTree
}
- def isInterface(mods: Modifiers, body: List[Tree]): Boolean =
- mods.isTrait && (body forall treeInfo.isInterfaceMember)
-
/** {{{
* ClassTemplateOpt ::= `extends' ClassTemplate | [[`extends'] TemplateBody]
* TraitTemplateOpt ::= TraitExtends TraitTemplate | [[`extends'] TemplateBody] | `<:' TemplateBody
* TraitExtends ::= `extends' | `<:'
* }}}
*/
- def templateOpt(mods: Modifiers, name: Name, constrMods: Modifiers, vparamss: List[List[ValDef]], tstart: Int): Template = {
- val (parents0, argss, self, body) = (
+ def templateOpt(mods: Modifiers, name: Name, constrMods: Modifiers, vparamss: List[List[ValDef]], tstart: Offset): Template = {
+ val (parents, self, body) = (
if (in.token == EXTENDS || in.token == SUBTYPE && mods.isTrait) {
in.nextToken()
- template(isTrait = mods.isTrait)
+ template()
}
else {
newLineOptWhenFollowedBy(LBRACE)
- val (self, body) = templateBodyOpt(traitParentSeen = false)
- (List(), ListOfNil, self, body)
+ val (self, body) = templateBodyOpt(parenMeansSyntaxError = mods.isTrait || name.isTermName)
+ (List(), self, body)
}
)
- def anyrefParents() = {
- val caseParents = if (mods.isCase) List(productConstr, serializableConstr) else Nil
- parents0 ::: caseParents match {
- case Nil => List(atPos(o2p(in.offset))(scalaAnyRefConstr))
- case ps => ps
- }
- }
def anyvalConstructor() = (
// Not a well-formed constructor, has to be finished later - see note
// regarding AnyVal constructor in AddInterfaces.
- DefDef(NoMods, nme.CONSTRUCTOR, Nil, ListOfNil, TypeTree(), Block(Nil, Literal(Constant())))
+ DefDef(NoMods, nme.CONSTRUCTOR, Nil, ListOfNil, TypeTree(), Block(Nil, literalUnit))
)
- val tstart0 = if (body.isEmpty && in.lastOffset < tstart) in.lastOffset else tstart
+ val parentPos = o2p(in.offset)
+ val tstart1 = if (body.isEmpty && in.lastOffset < tstart) in.lastOffset else tstart
- atPos(tstart0) {
+ atPos(tstart1) {
// Exclude only the 9 primitives plus AnyVal.
if (inScalaRootPackage && ScalaValueClassNames.contains(name))
- Template(parents0, self, anyvalConstructor :: body)
+ Template(parents, self, anyvalConstructor :: body)
else
- Template(anyrefParents, self, constrMods, vparamss, argss, body, o2p(tstart))
+ gen.mkTemplate(gen.mkParents(mods, parents, parentPos),
+ self, constrMods, vparamss, body, o2p(tstart))
}
}
@@ -2851,15 +2920,16 @@ self =>
case (self, Nil) => (self, EmptyTree.asList)
case result => result
}
- def templateBodyOpt(traitParentSeen: Boolean): (ValDef, List[Tree]) = {
+ def templateBodyOpt(parenMeansSyntaxError: Boolean): (ValDef, List[Tree]) = {
newLineOptWhenFollowedBy(LBRACE)
if (in.token == LBRACE) {
templateBody(isPre = false)
} else {
- if (in.token == LPAREN)
- syntaxError((if (traitParentSeen) "parents of traits" else "traits or objects")+
- " may not have parameters", true)
- (emptyValDef, List())
+ if (in.token == LPAREN) {
+ if (parenMeansSyntaxError) syntaxError(s"traits or objects may not have parameters", skipIt = true)
+ else abort("unexpected opening parenthesis")
+ }
+ (noSelfType, List())
}
}
@@ -2872,45 +2942,24 @@ self =>
/* -------- STATSEQS ------------------------------------------- */
/** Create a tree representing a packaging. */
- def makePackaging(start: Int, pkg: Tree, stats: List[Tree]): PackageDef = pkg match {
+ def makePackaging(start: Offset, pkg: Tree, stats: List[Tree]): PackageDef = pkg match {
case x: RefTree => atPos(start, pkg.pos.point)(PackageDef(x, stats))
}
-/*
- pkg match {
- case id @ Ident(_) =>
- PackageDef(id, stats)
- case Select(qual, name) => // drop this to flatten packages
- makePackaging(start, qual, List(PackageDef(Ident(name), stats)))
- }
- }
-*/
- /** Create a tree representing a package object, converting
- * {{{
- * package object foo { ... }
- * }}}
- * to
- * {{{
- * package foo {
- * object `package` { ... }
- * }
- * }}}
- */
- def makePackageObject(start: Int, objDef: ModuleDef): PackageDef = objDef match {
- case ModuleDef(mods, name, impl) =>
- makePackaging(
- start, atPos(o2p(objDef.pos.startOrPoint)){ Ident(name) }, List(ModuleDef(mods, nme.PACKAGEkw, impl)))
- }
+ def makeEmptyPackage(start: Offset, stats: List[Tree]): PackageDef = (
+ makePackaging(start, atPos(start, start, start)(Ident(nme.EMPTY_PACKAGE_NAME)), stats)
+ )
- /** {{{
- * Packaging ::= package QualId [nl] `{' TopStatSeq `}'
- * }}}
- */
- def packaging(start: Int): Tree = {
- val nameOffset = in.offset
- val pkg = pkgQualId()
- val stats = inBracesOrNil(topStatSeq())
- makePackaging(start, pkg, stats)
+ def statSeq(stat: PartialFunction[Token, List[Tree]], errorMsg: String = "illegal start of definition"): List[Tree] = {
+ val stats = new ListBuffer[Tree]
+ def default(tok: Token) =
+ if (isStatSep) Nil
+ else syntaxErrorOrIncompleteAnd(errorMsg, skipIt = true)(Nil)
+ while (!isStatSeqEnd) {
+ stats ++= stat.applyOrElse(in.token, default)
+ acceptStatSepOpt()
+ }
+ stats.toList
}
/** {{{
@@ -2922,54 +2971,25 @@ self =>
* |
* }}}
*/
- def topStatSeq(): List[Tree] = {
- val stats = new ListBuffer[Tree]
- while (!isStatSeqEnd) {
- stats ++= (in.token match {
- case PACKAGE =>
- val start = in.skipToken()
- if (in.token == OBJECT)
- joinComment(List(makePackageObject(start, objectDef(in.offset, NoMods))))
- else {
- in.flushDoc
- List(packaging(start))
- }
- case IMPORT =>
- in.flushDoc
- importClause()
- case x if x == AT || isTemplateIntro || isModifier =>
- joinComment(List(topLevelTmplDef))
- case _ =>
- if (!isStatSep)
- syntaxErrorOrIncomplete("expected class or object definition", true)
- Nil
- })
- acceptStatSepOpt()
- }
- stats.toList
- }
-
- /** Informal - for the repl and other direct parser accessors.
- */
- def templateStats(): List[Tree] = templateStatSeq(isPre = false)._2 match {
- case Nil => EmptyTree.asList
- case stats => stats
+ def topStatSeq(): List[Tree] = statSeq(topStat, errorMsg = "expected class or object definition")
+ def topStat: PartialFunction[Token, List[Tree]] = {
+ case PACKAGE =>
+ packageOrPackageObject(in.skipToken()) :: Nil
+ case IMPORT =>
+ in.flushDoc
+ importClause()
+ case _ if isAnnotation || isTemplateIntro || isModifier =>
+ joinComment(topLevelTmplDef :: Nil)
}
/** {{{
- * TemplateStatSeq ::= [id [`:' Type] `=>'] TemplateStat {semi TemplateStat}
- * TemplateStat ::= Import
- * | Annotations Modifiers Def
- * | Annotations Modifiers Dcl
- * | Expr1
- * | super ArgumentExprs {ArgumentExprs}
- * |
+ * TemplateStatSeq ::= [id [`:' Type] `=>'] TemplateStats
* }}}
* @param isPre specifies whether in early initializer (true) or not (false)
*/
def templateStatSeq(isPre : Boolean): (ValDef, List[Tree]) = checkNoEscapingPlaceholders {
- var self: ValDef = emptyValDef
- val stats = new ListBuffer[Tree]
+ var self: ValDef = noSelfType
+ var firstOpt: Option[Tree] = None
if (isExprIntro) {
in.flushDoc
val first = expr(InTemplate) // @S: first statement is potentially converted so cannot be stubbed.
@@ -2986,27 +3006,37 @@ self =>
}
in.nextToken()
} else {
- stats += first
+ firstOpt = Some(first)
acceptStatSepOpt()
}
}
- while (!isStatSeqEnd) {
- if (in.token == IMPORT) {
- in.flushDoc
- stats ++= importClause()
- } else if (isExprIntro) {
- in.flushDoc
- stats += statement(InTemplate)
- } else if (isDefIntro || isModifier || in.token == AT) {
- stats ++= joinComment(nonLocalDefOrDcl)
- } else if (!isStatSep) {
- syntaxErrorOrIncomplete("illegal start of definition", true)
- }
- acceptStatSepOpt()
- }
- (self, stats.toList)
+ (self, firstOpt ++: templateStats())
+ }
+
+ /** {{{
+ * TemplateStats ::= TemplateStat {semi TemplateStat}
+ * TemplateStat ::= Import
+ * | Annotations Modifiers Def
+ * | Annotations Modifiers Dcl
+ * | Expr1
+ * | super ArgumentExprs {ArgumentExprs}
+ * |
+ * }}}
+ */
+ def templateStats(): List[Tree] = statSeq(templateStat)
+ def templateStat: PartialFunction[Token, List[Tree]] = {
+ case IMPORT =>
+ in.flushDoc
+ importClause()
+ case _ if isDefIntro || isModifier || isAnnotation =>
+ joinComment(nonLocalDefOrDcl)
+ case _ if isExprIntro =>
+ in.flushDoc
+ statement(InTemplate) :: Nil
}
+ def templateOrTopStatSeq(): List[Tree] = statSeq(templateStat.orElse(topStat))
+
/** {{{
* RefineStatSeq ::= RefineStat {semi RefineStat}
* RefineStat ::= Dcl
@@ -3017,19 +3047,23 @@ self =>
def refineStatSeq(): List[Tree] = checkNoEscapingPlaceholders {
val stats = new ListBuffer[Tree]
while (!isStatSeqEnd) {
- if (isDclIntro) { // don't IDE hook
- stats ++= joinComment(defOrDcl(in.offset, NoMods))
- } else if (!isStatSep) {
- syntaxErrorOrIncomplete(
- "illegal start of declaration"+
- (if (inFunReturnType) " (possible cause: missing `=' in front of current method body)"
- else ""), true)
- }
+ stats ++= refineStat()
if (in.token != RBRACE) acceptStatSep()
}
stats.toList
}
+ def refineStat(): List[Tree] =
+ if (isDclIntro) { // don't IDE hook
+ joinComment(defOrDcl(in.offset, NoMods))
+ } else if (!isStatSep) {
+ syntaxErrorOrIncomplete(
+ "illegal start of declaration"+
+ (if (inFunReturnType) " (possible cause: missing `=' in front of current method body)"
+ else ""), skipIt = true)
+ Nil
+ } else Nil
+
/** overridable IDE hook for local definitions of blockStatSeq
* Here's an idea how to fill in start and end positions.
def localDef : List[Tree] = {
@@ -3047,13 +3081,13 @@ self =>
def localDef(implicitMod: Int): List[Tree] = {
val annots = annotations(skipNewLines = true)
val pos = in.offset
- val mods = (localModifiers() | implicitMod) withAnnotations annots
+ val mods = (localModifiers() | implicitMod.toLong) withAnnotations annots
val defs =
if (!(mods hasFlag ~(Flags.IMPLICIT | Flags.LAZY))) defOrDcl(pos, mods)
else List(tmplDef(pos, mods))
in.token match {
- case RBRACE | CASE => defs :+ (Literal(Constant()) setPos o2p(in.offset))
+ case RBRACE | CASE => defs :+ setInPos(literalUnit)
case _ => defs
}
}
@@ -3069,16 +3103,12 @@ self =>
*/
def blockStatSeq(): List[Tree] = checkNoEscapingPlaceholders {
val stats = new ListBuffer[Tree]
- while (!isStatSeqEnd && in.token != CASE) {
+ while (!isStatSeqEnd && !isCaseDefEnd) {
if (in.token == IMPORT) {
stats ++= importClause()
acceptStatSepOpt()
}
- else if (isExprIntro) {
- stats += statement(InBlock)
- if (in.token != RBRACE && in.token != CASE) acceptStatSep()
- }
- else if (isDefIntro || isLocalModifier || in.token == AT) {
+ else if (isDefIntro || isLocalModifier || isAnnotation) {
if (in.token == IMPLICIT) {
val start = in.skipToken()
if (isIdent) stats += implicitClosure(start, InBlock)
@@ -3088,12 +3118,16 @@ self =>
}
acceptStatSepOpt()
}
+ else if (isExprIntro) {
+ stats += statement(InBlock)
+ if (!isCaseDefEnd) acceptStatSep()
+ }
else if (isStatSep) {
in.nextToken()
}
else {
val addendum = if (isModifier) " (no modifiers allowed here)" else ""
- syntaxErrorOrIncomplete("illegal start of statement" + addendum, true)
+ syntaxErrorOrIncomplete("illegal start of statement" + addendum, skipIt = true)
}
}
stats.toList
@@ -3103,7 +3137,7 @@ self =>
* CompilationUnit ::= {package QualId semi} TopStatSeq
* }}}
*/
- def compilationUnit(): Tree = checkNoEscapingPlaceholders {
+ def compilationUnit(): PackageDef = checkNoEscapingPlaceholders {
def topstats(): List[Tree] = {
val ts = new ListBuffer[Tree]
while (in.token == SEMI) in.nextToken()
@@ -3111,13 +3145,15 @@ self =>
if (in.token == PACKAGE) {
in.nextToken()
if (in.token == OBJECT) {
+ // TODO - this next line is supposed to be
+ // ts += packageObjectDef(start)
+ // but this broke a scaladoc test (run/diagrams-filtering.scala) somehow.
ts ++= joinComment(List(makePackageObject(start, objectDef(in.offset, NoMods))))
if (in.token != EOF) {
acceptStatSep()
ts ++= topStatSeq()
}
} else {
- val nameOffset = in.offset
in.flushDoc
val pkg = pkgQualId()
@@ -3140,17 +3176,17 @@ self =>
resetPackage()
topstats() match {
- case List(stat @ PackageDef(_, _)) => stat
- case stats =>
+ case (stat @ PackageDef(_, _)) :: Nil => stat
+ case stats =>
val start =
if (stats forall (_ == EmptyTree)) 0
else {
val wpos = wrappingPos(stats)
- if (wpos.isDefined) wpos.startOrPoint
+ if (wpos.isDefined) wpos.start
else 0
}
- makePackaging(start, atPos(start, start, start) { Ident(nme.EMPTY_PACKAGE_NAME) }, stats)
+ makeEmptyPackage(start, stats)
}
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
index 8d295a28d0..e8d46704c3 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
@@ -5,13 +5,15 @@
package scala.tools.nsc
package ast.parser
-import scala.tools.nsc.util.CharArrayReader
+import scala.tools.nsc.util.{ CharArrayReader, CharArrayReaderData }
import scala.reflect.internal.util._
import scala.reflect.internal.Chars._
import Tokens._
-import scala.annotation.switch
-import scala.collection.mutable.{ ListBuffer, ArrayBuffer }
-import scala.xml.Utility.{ isNameStart }
+import scala.annotation.{ switch, tailrec }
+import scala.collection.{ mutable, immutable }
+import mutable.{ ListBuffer, ArrayBuffer }
+import scala.tools.nsc.ast.parser.xml.Utility.isNameStart
+import scala.language.postfixOps
/** See Parsers.scala / ParsersCommon for some explanation of ScannersCommon.
*/
@@ -19,20 +21,24 @@ trait ScannersCommon {
val global : Global
import global._
+ /** Offset into source character array */
+ type Offset = Int
+
+ type Token = Int
+
trait CommonTokenData {
- def token: Int
+ def token: Token
def name: TermName
}
trait ScannerCommon extends CommonTokenData {
// things to fill in, in addition to buf, decodeUni which come from CharArrayReader
- def warning(off: Int, msg: String): Unit
- def error (off: Int, msg: String): Unit
- def incompleteInputError(off: Int, msg: String): Unit
- def deprecationWarning(off: Int, msg: String): Unit
+ def error(off: Offset, msg: String): Unit
+ def incompleteInputError(off: Offset, msg: String): Unit
+ def deprecationWarning(off: Offset, msg: String): Unit
}
- def createKeywordArray(keywords: Seq[(Name, Int)], defaultToken: Int): (Int, Array[Int]) = {
+ def createKeywordArray(keywords: Seq[(Name, Token)], defaultToken: Token): (Token, Array[Token]) = {
val names = keywords sortBy (_._1.start) map { case (k, v) => (k.start, v) }
val low = names.head._1
val high = names.last._1
@@ -47,16 +53,10 @@ trait Scanners extends ScannersCommon {
val global : Global
import global._
- /** Offset into source character array */
- type Offset = Int
-
- /** An undefined offset */
- val NoOffset: Offset = -1
-
trait TokenData extends CommonTokenData {
/** the next token */
- var token: Int = EMPTY
+ var token: Token = EMPTY
/** the offset of the first character of the current token */
var offset: Offset = 0
@@ -73,24 +73,105 @@ trait Scanners extends ScannersCommon {
/** the base of a number */
var base: Int = 0
- def copyFrom(td: TokenData) = {
+ def copyFrom(td: TokenData): this.type = {
this.token = td.token
this.offset = td.offset
this.lastOffset = td.lastOffset
this.name = td.name
this.strVal = td.strVal
this.base = td.base
+ this
+ }
+ }
+
+ /** An interface to most of mutable data in Scanner defined in TokenData
+ * and CharArrayReader (+ next, prev fields) with copyFrom functionality
+ * to backup/restore data (used by quasiquotes' lookingAhead).
+ */
+ trait ScannerData extends TokenData with CharArrayReaderData {
+ /** we need one token lookahead and one token history
+ */
+ val next: TokenData = new TokenData{}
+ val prev: TokenData = new TokenData{}
+
+ def copyFrom(sd: ScannerData): this.type = {
+ this.next copyFrom sd.next
+ this.prev copyFrom sd.prev
+ super[CharArrayReaderData].copyFrom(sd)
+ super[TokenData].copyFrom(sd)
+ this
}
}
- abstract class Scanner extends CharArrayReader with TokenData with ScannerCommon {
+ abstract class Scanner extends CharArrayReader with TokenData with ScannerData with ScannerCommon {
private def isDigit(c: Char) = java.lang.Character isDigit c
- def isAtEnd = charOffset >= buf.length
+ private var openComments = 0
+ protected def putCommentChar(): Unit = nextChar()
- def flush = { charOffset = offset; nextChar(); this }
+ @tailrec private def skipLineComment(): Unit = ch match {
+ case SU | CR | LF =>
+ case _ => nextChar() ; skipLineComment()
+ }
+ private def maybeOpen() {
+ putCommentChar()
+ if (ch == '*') {
+ putCommentChar()
+ openComments += 1
+ }
+ }
+ private def maybeClose(): Boolean = {
+ putCommentChar()
+ (ch == '/') && {
+ putCommentChar()
+ openComments -= 1
+ openComments == 0
+ }
+ }
+ @tailrec final def skipNestedComments(): Unit = ch match {
+ case '/' => maybeOpen() ; skipNestedComments()
+ case '*' => if (!maybeClose()) skipNestedComments()
+ case SU => incompleteInputError("unclosed comment")
+ case _ => putCommentChar() ; skipNestedComments()
+ }
+ def skipDocComment(): Unit = skipNestedComments()
+ def skipBlockComment(): Unit = skipNestedComments()
- def resume(lastCode: Int) = {
+ private def skipToCommentEnd(isLineComment: Boolean) {
+ nextChar()
+ if (isLineComment) skipLineComment()
+ else {
+ openComments = 1
+ val isDocComment = (ch == '*') && { nextChar(); true }
+ if (isDocComment) {
+ // Check for the amazing corner case of /**/
+ if (ch == '/')
+ nextChar()
+ else
+ skipDocComment()
+ }
+ else skipBlockComment()
+ }
+ }
+
+ /** @pre ch == '/'
+ * Returns true if a comment was skipped.
+ */
+ def skipComment(): Boolean = ch match {
+ case '/' | '*' => skipToCommentEnd(isLineComment = ch == '/') ; true
+ case _ => false
+ }
+ def flushDoc(): DocComment = null
+
+ /** To prevent doc comments attached to expressions from leaking out of scope
+ * onto the next documentable entity, they are discarded upon passing a right
+ * brace, bracket, or parenthesis.
+ */
+ def discardDocBuffer(): Unit = ()
+
+ def isAtEnd = charOffset >= buf.length
+
+ def resume(lastCode: Token) = {
token = lastCode
if (next.token != EMPTY && !reporter.hasErrors)
syntaxError("unexpected end of input: possible missing '}' in XML block")
@@ -98,10 +179,6 @@ trait Scanners extends ScannersCommon {
nextToken()
}
- /** the last error offset
- */
- var errOffset: Offset = NoOffset
-
/** A character buffer for literals
*/
val cbuf = new StringBuilder
@@ -119,7 +196,7 @@ trait Scanners extends ScannersCommon {
protected def emitIdentifierDeprecationWarnings = true
/** Clear buffer and set name and token */
- private def finishNamed(idtoken: Int = IDENTIFIER) {
+ private def finishNamed(idtoken: Token = IDENTIFIER) {
name = newTermName(cbuf.toString)
cbuf.clear()
token = idtoken
@@ -127,8 +204,12 @@ trait Scanners extends ScannersCommon {
val idx = name.start - kwOffset
if (idx >= 0 && idx < kwArray.length) {
token = kwArray(idx)
- if (token == IDENTIFIER && allowIdent != name && emitIdentifierDeprecationWarnings)
- deprecationWarning(name+" is now a reserved word; usage as an identifier is deprecated")
+ if (token == IDENTIFIER && allowIdent != name) {
+ if (name == nme.MACROkw)
+ syntaxError(s"$name is now a reserved word; usage as an identifier is disallowed")
+ else if (emitIdentifierDeprecationWarnings)
+ deprecationWarning(s"$name is now a reserved word; usage as an identifier is deprecated")
+ }
}
}
}
@@ -139,29 +220,6 @@ trait Scanners extends ScannersCommon {
cbuf.clear()
}
- /** Should doc comments be built? */
- def buildDocs: Boolean = forScaladoc
-
- /** holder for the documentation comment
- */
- var docComment: DocComment = null
-
- def flushDoc: DocComment = {
- val ret = docComment
- docComment = null
- ret
- }
-
- protected def foundComment(value: String, start: Int, end: Int) = ()
- protected def foundDocComment(value: String, start: Int, end: Int) = ()
-
- private class TokenData0 extends TokenData
-
- /** we need one token lookahead and one token history
- */
- val next : TokenData = new TokenData0
- val prev : TokenData = new TokenData0
-
/** a stack of tokens which indicates whether line-ends can be statement separators
* also used for keeping track of nesting levels.
* We keep track of the closing symbol of a region. This can be
@@ -173,7 +231,7 @@ trait Scanners extends ScannersCommon {
* (the STRINGLIT appears twice in succession on the stack iff the
* expression is a multiline string literal).
*/
- var sepRegions: List[Int] = List()
+ var sepRegions: List[Token] = List()
// Get next token ------------------------------------------------------------
@@ -227,12 +285,15 @@ trait Scanners extends ScannersCommon {
case RBRACE =>
while (!sepRegions.isEmpty && sepRegions.head != RBRACE)
sepRegions = sepRegions.tail
- if (!sepRegions.isEmpty) sepRegions = sepRegions.tail
- docComment = null
+ if (!sepRegions.isEmpty)
+ sepRegions = sepRegions.tail
+
+ discardDocBuffer()
case RBRACKET | RPAREN =>
if (!sepRegions.isEmpty && sepRegions.head == lastToken)
sepRegions = sepRegions.tail
- docComment = null
+
+ discardDocBuffer()
case ARROW =>
if (!sepRegions.isEmpty && sepRegions.head == lastToken)
sepRegions = sepRegions.tail
@@ -262,11 +323,11 @@ trait Scanners extends ScannersCommon {
next.token = EMPTY
}
- /** Insert NEWLINE or NEWLINES if
- * - we are after a newline
- * - we are within a { ... } or on toplevel (wrt sepRegions)
- * - the current token can start a statement and the one before can end it
- * insert NEWLINES if we are past a blank line, NEWLINE otherwise
+ /* Insert NEWLINE or NEWLINES if
+ * - we are after a newline
+ * - we are within a { ... } or on toplevel (wrt sepRegions)
+ * - the current token can start a statement and the one before can end it
+ * insert NEWLINES if we are past a blank line, NEWLINE otherwise
*/
if (!applyBracePatch() && afterLineEnd() && inLastOfStat(lastToken) && inFirstOfStat(token) &&
(sepRegions.isEmpty || sepRegions.head == RBRACE)) {
@@ -328,7 +389,7 @@ trait Scanners extends ScannersCommon {
// println("blank line found at "+lastOffset+":"+(lastOffset to idx).map(buf(_)).toList)
return true
}
- if (idx == end) return false
+ if (idx == end) return false
} while (ch <= ' ')
}
idx += 1; ch = buf(idx)
@@ -375,7 +436,7 @@ trait Scanners extends ScannersCommon {
getOperatorRest()
}
}
- fetchLT
+ fetchLT()
case '~' | '!' | '@' | '#' | '%' |
'^' | '*' | '+' | '-' | /*'<' | */
'>' | '?' | ':' | '=' | '&' |
@@ -399,20 +460,11 @@ trait Scanners extends ScannersCommon {
nextChar()
base = 16
} else {
- /**
- * What should leading 0 be in the future? It is potentially dangerous
- * to let it be base-10 because of history. Should it be an error? Is
- * there a realistic situation where one would need it?
- */
- if (isDigit(ch)) {
- if (opt.future) syntaxError("Non-zero numbers may not have a leading zero.")
- else deprecationWarning("Treating numbers with a leading zero as octal is deprecated.")
- }
base = 8
}
getNumber()
}
- fetchZero
+ fetchZero()
case '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' =>
base = 10
getNumber()
@@ -423,14 +475,17 @@ trait Scanners extends ScannersCommon {
if (token == INTERPOLATIONID) {
nextRawChar()
if (ch == '\"') {
- nextRawChar()
- if (ch == '\"') {
+ val lookahead = lookaheadReader
+ lookahead.nextChar()
+ if (lookahead.ch == '\"') {
+ nextRawChar() // now eat it
offset += 3
nextRawChar()
getStringPart(multiLine = true)
sepRegions = STRINGPART :: sepRegions // indicate string part
sepRegions = STRINGLIT :: sepRegions // once more to indicate multi line string part
} else {
+ nextChar()
token = STRINGLIT
strVal = ""
}
@@ -455,7 +510,7 @@ trait Scanners extends ScannersCommon {
}
}
}
- fetchDoubleQuote
+ fetchDoubleQuote()
case '\'' =>
def fetchSingleQuote() = {
nextChar()
@@ -474,7 +529,7 @@ trait Scanners extends ScannersCommon {
}
}
}
- fetchSingleQuote
+ fetchSingleQuote()
case '.' =>
nextChar()
if ('0' <= ch && ch <= '9') {
@@ -519,72 +574,16 @@ trait Scanners extends ScannersCommon {
nextChar()
getOperatorRest()
} else {
- syntaxError("illegal character '" + ("" + '\\' + 'u' + "%04x".format(ch: Int)) + "'")
+ syntaxError("illegal character '" + ("" + '\\' + 'u' + "%04x".format(ch.toInt)) + "'")
nextChar()
}
}
- fetchOther
- }
- }
-
- private def skipComment(): Boolean = {
-
- if (ch == '/' || ch == '*') {
-
- val comment = new StringBuilder("/")
- def appendToComment() = comment.append(ch)
-
- if (ch == '/') {
- do {
- appendToComment()
- nextChar()
- } while ((ch != CR) && (ch != LF) && (ch != SU))
- } else {
- docComment = null
- var openComments = 1
- appendToComment()
- nextChar()
- appendToComment()
- var buildingDocComment = false
- if (ch == '*' && buildDocs) {
- buildingDocComment = true
- }
- while (openComments > 0) {
- do {
- do {
- if (ch == '/') {
- nextChar(); appendToComment()
- if (ch == '*') {
- nextChar(); appendToComment()
- openComments += 1
- }
- }
- if (ch != '*' && ch != SU) {
- nextChar(); appendToComment()
- }
- } while (ch != '*' && ch != SU)
- while (ch == '*') {
- nextChar(); appendToComment()
- }
- } while (ch != '/' && ch != SU)
- if (ch == '/') nextChar()
- else incompleteInputError("unclosed comment")
- openComments -= 1
- }
-
- if (buildingDocComment)
- foundDocComment(comment.toString, offset, charOffset - 2)
- }
-
- foundComment(comment.toString, offset, charOffset - 2)
- true
- } else {
- false
+ fetchOther()
}
}
/** Can token start a statement? */
- def inFirstOfStat(token: Int) = token match {
+ def inFirstOfStat(token: Token) = token match {
case EOF | CATCH | ELSE | EXTENDS | FINALLY | FORSOME | MATCH | WITH | YIELD |
COMMA | SEMI | NEWLINE | NEWLINES | DOT | COLON | EQUALS | ARROW | LARROW |
SUBTYPE | VIEWBOUND | SUPERTYPE | HASH | RPAREN | RBRACKET | RBRACE | LBRACKET =>
@@ -594,7 +593,7 @@ trait Scanners extends ScannersCommon {
}
/** Can token end a statement? */
- def inLastOfStat(token: Int) = token match {
+ def inLastOfStat(token: Token) = token match {
case CHARLIT | INTLIT | LONGLIT | FLOATLIT | DOUBLELIT | STRINGLIT | SYMBOLLIT |
IDENTIFIER | BACKQUOTED_IDENT | THIS | NULL | TRUE | FALSE | RETURN | USCORE |
TYPE | XMLSTART | RPAREN | RBRACKET | RBRACE =>
@@ -709,7 +708,7 @@ trait Scanners extends ScannersCommon {
}
}
- @annotation.tailrec private def getStringPart(multiLine: Boolean): Unit = {
+ @scala.annotation.tailrec private def getStringPart(multiLine: Boolean): Unit = {
def finishStringPart() = {
setStrVal()
token = STRINGPART
@@ -739,6 +738,10 @@ trait Scanners extends ScannersCommon {
finishStringPart()
nextRawChar()
next.token = LBRACE
+ } else if (ch == '_') {
+ finishStringPart()
+ nextRawChar()
+ next.token = USCORE
} else if (Character.isUnicodeIdentifierStart(ch)) {
finishStringPart()
do {
@@ -803,6 +806,7 @@ trait Scanners extends ScannersCommon {
if (ch == '\\') {
nextChar()
if ('0' <= ch && ch <= '7') {
+ val start = charOffset - 2
val leadch: Char = ch
var oct: Int = digit2int(ch, 8)
nextChar()
@@ -814,6 +818,12 @@ trait Scanners extends ScannersCommon {
nextChar()
}
}
+ val alt = if (oct == LF) "\\n" else "\\u%04x" format oct
+ def msg(what: String) = s"Octal escape literals are $what, use $alt instead."
+ if (settings.future)
+ syntaxError(start, msg("unsupported"))
+ else
+ deprecationWarning(start, msg("deprecated"))
putChar(oct.toChar)
} else {
ch match {
@@ -895,7 +905,7 @@ trait Scanners extends ScannersCommon {
*/
def intVal(negated: Boolean): Long = {
if (token == CHARLIT && !negated) {
- charVal
+ charVal.toLong
} else {
var value: Long = 0
val divider = if (base == 10) 1 else 2
@@ -923,7 +933,7 @@ trait Scanners extends ScannersCommon {
}
}
- def intVal: Long = intVal(false)
+ def intVal: Long = intVal(negated = false)
/** Convert current strVal, base to double value
*/
@@ -943,9 +953,8 @@ trait Scanners extends ScannersCommon {
}
if (value > limit)
syntaxError("floating point number too large")
- if (isDeprecatedForm) {
- deprecationWarning("This lexical syntax is deprecated. From scala 2.11, a dot will only be considered part of a number if it is immediately followed by a digit.")
- }
+ if (isDeprecatedForm)
+ syntaxError("floating point number is missing digit after dot")
if (negated) -value else value
} catch {
@@ -955,7 +964,7 @@ trait Scanners extends ScannersCommon {
}
}
- def floatVal: Double = floatVal(false)
+ def floatVal: Double = floatVal(negated = false)
def checkNoLetter() {
if (isIdentifierPart(ch) && ch >= ' ')
@@ -966,14 +975,19 @@ trait Scanners extends ScannersCommon {
*/
protected def getNumber() {
val base1 = if (base < 10) 10 else base
- // read 8,9's even if format is octal, produce a malformed number error afterwards.
+ // Read 8,9's even if format is octal, produce a malformed number error afterwards.
+ // At this point, we have already read the first digit, so to tell an innocent 0 apart
+ // from an octal literal 0123... (which we want to disallow), we check whether there
+ // are any additional digits coming after the first one we have already read.
+ var notSingleZero = false
while (digit2int(ch, base1) >= 0) {
putChar(ch)
nextChar()
+ notSingleZero = true
}
token = INTLIT
- /** When we know for certain it's a number after using a touch of lookahead */
+ /* When we know for certain it's a number after using a touch of lookahead */
def restOfNumber() = {
putChar(ch)
nextChar()
@@ -986,6 +1000,9 @@ trait Scanners extends ScannersCommon {
if (base <= 10 && isEfd)
getFraction()
else {
+ // Checking for base == 8 is not enough, because base = 8 is set
+ // as soon as a 0 is read in `case '0'` of method fetchToken.
+ if (base == 8 && notSingleZero) syntaxError("Non-zero integral values may not have a leading zero.")
setStrVal()
if (isL) {
nextChar()
@@ -1001,10 +1018,8 @@ trait Scanners extends ScannersCommon {
val lookahead = lookaheadReader
val c = lookahead.getc()
- /** As of scala 2.11, it isn't a number unless c here is a digit, so
- * opt.future excludes the rest of the logic.
- */
- if (opt.future && !isDigit(c))
+ /* Prohibit 1. */
+ if (!isDigit(c))
return setStrVal()
val isDefinitelyNumber = (c: @switch) match {
@@ -1012,16 +1027,16 @@ trait Scanners extends ScannersCommon {
case '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' =>
true
- /** Backquoted idents like 22.`foo`. */
+ /* Backquoted idents like 22.`foo`. */
case '`' =>
return setStrVal() /** Note the early return */
- /** These letters may be part of a literal, or a method invocation on an Int.
+ /* These letters may be part of a literal, or a method invocation on an Int.
*/
case 'd' | 'D' | 'f' | 'F' =>
!isIdentifierPart(lookahead.getc())
- /** A little more special handling for e.g. 5e7 */
+ /* A little more special handling for e.g. 5e7 */
case 'e' | 'E' =>
val ch = lookahead.getc()
!isIdentifierPart(ch) || (isDigit(ch) || ch == '+' || ch == '-')
@@ -1058,7 +1073,6 @@ trait Scanners extends ScannersCommon {
def syntaxError(off: Offset, msg: String) {
error(off, msg)
token = ERROR
- errOffset = off
}
/** generate an error at the current token offset
@@ -1071,7 +1085,6 @@ trait Scanners extends ScannersCommon {
def incompleteInputError(msg: String) {
incompleteInputError(offset, msg)
token = EOF
- errOffset = offset
}
override def toString() = token match {
@@ -1114,7 +1127,7 @@ trait Scanners extends ScannersCommon {
def applyBracePatch(): Boolean = false
/** overridden in UnitScanners */
- def parenBalance(token: Int) = 0
+ def parenBalance(token: Token) = 0
/** overridden in UnitScanners */
def healBraces(): List[BracePatch] = List()
@@ -1129,7 +1142,7 @@ trait Scanners extends ScannersCommon {
// ------------- keyword configuration -----------------------------------
- private val allKeywords = List[(Name, Int)](
+ private val allKeywords = List[(Name, Token)](
nme.ABSTRACTkw -> ABSTRACT,
nme.CASEkw -> CASE,
nme.CATCHkw -> CATCH,
@@ -1183,8 +1196,8 @@ trait Scanners extends ScannersCommon {
nme.MACROkw -> IDENTIFIER,
nme.THENkw -> IDENTIFIER)
- private var kwOffset: Int = -1
- private val kwArray: Array[Int] = {
+ private var kwOffset: Offset = -1
+ private val kwArray: Array[Token] = {
val (offset, arr) = createKeywordArray(allKeywords, IDENTIFIER)
kwOffset = offset
arr
@@ -1195,7 +1208,7 @@ trait Scanners extends ScannersCommon {
// Token representation ----------------------------------------------------
/** Returns the string representation of given token. */
- def token2string(token: Int): String = (token: @switch) match {
+ def token2string(token: Token): String = (token: @switch) match {
case IDENTIFIER | BACKQUOTED_IDENT => "identifier"
case CHARLIT => "character literal"
case INTLIT => "integer literal"
@@ -1226,17 +1239,16 @@ trait Scanners extends ScannersCommon {
}
}
- class MalformedInput(val offset: Int, val msg: String) extends Exception
+ class MalformedInput(val offset: Offset, val msg: String) extends Exception
/** A scanner for a given source file not necessarily attached to a compilation unit.
* Useful for looking inside source files that aren not currently compiled to see what's there
*/
class SourceFileScanner(val source: SourceFile) extends Scanner {
val buf = source.content
- override val decodeUni: Boolean = !settings.nouescape.value
+ override val decodeUni: Boolean = !settings.nouescape
// suppress warnings, throw exception on errors
- def warning(off: Offset, msg: String): Unit = ()
def deprecationWarning(off: Offset, msg: String): Unit = ()
def error (off: Offset, msg: String): Unit = throw new MalformedInput(off, msg)
def incompleteInputError(off: Offset, msg: String): Unit = throw new MalformedInput(off, msg)
@@ -1244,10 +1256,9 @@ trait Scanners extends ScannersCommon {
/** A scanner over a given compilation unit
*/
- class UnitScanner(unit: CompilationUnit, patches: List[BracePatch]) extends SourceFileScanner(unit.source) {
+ class UnitScanner(val unit: CompilationUnit, patches: List[BracePatch]) extends SourceFileScanner(unit.source) {
def this(unit: CompilationUnit) = this(unit, List())
- override def warning(off: Offset, msg: String) = unit.warning(unit.position(off), msg)
override def deprecationWarning(off: Offset, msg: String) = unit.deprecationWarning(unit.position(off), msg)
override def error (off: Offset, msg: String) = unit.error(unit.position(off), msg)
override def incompleteInputError(off: Offset, msg: String) = unit.incompleteInputError(unit.position(off), msg)
@@ -1256,7 +1267,7 @@ trait Scanners extends ScannersCommon {
lazy val parensAnalyzer = new ParensAnalyzer(unit, List())
- override def parenBalance(token: Int) = parensAnalyzer.balance(token)
+ override def parenBalance(token: Token) = parensAnalyzer.balance(token)
override def healBraces(): List[BracePatch] = {
var patches: List[BracePatch] = List()
@@ -1293,23 +1304,21 @@ trait Scanners extends ScannersCommon {
}
}
}
-
- override def foundComment(value: String, start: Int, end: Int) {
- val pos = new RangePosition(unit.source, start, start, end)
- unit.comment(pos, value)
- }
-
- override def foundDocComment(value: String, start: Int, end: Int) {
- val docPos = new RangePosition(unit.source, start, start, end)
- docComment = new DocComment(value, docPos)
- unit.comment(docPos, value)
- }
}
class ParensAnalyzer(unit: CompilationUnit, patches: List[BracePatch]) extends UnitScanner(unit, patches) {
- var balance = scala.collection.mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0)
+ val balance = mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0)
+
+ /** The source code with braces and line starts annotated with [NN] showing the index */
+ private def markedSource = {
+ val code = unit.source.content
+ val braces = code.indices filter (idx => "{}\n" contains code(idx)) toSet;
+ val mapped = code.indices map (idx => if (braces(idx)) s"${code(idx)}[$idx]" else "" + code(idx))
+ mapped.mkString("")
+ }
init()
+ log(s"ParensAnalyzer for ${unit.source} of length ${unit.source.content.length}\n```\n$markedSource\n```")
/** The offset of the first token on this line, or next following line if blank
*/
@@ -1385,23 +1394,30 @@ trait Scanners extends ScannersCommon {
bpbuf += current
}
}
+ def bracePairString(bp: BracePair, indent: Int): String = {
+ val rangeString = {
+ import bp._
+ val lline = line(loff)
+ val rline = line(roff)
+ val tokens = List(lline, lindent, rline, rindent) map (n => if (n < 0) "??" else "" + n)
+ "%s:%s to %s:%s".format(tokens: _*)
+ }
+ val outer = (" " * indent) + rangeString
+ val inners = bp.nested map (bracePairString(_, indent + 2))
- def printBP(bp: BracePair, indent: Int) {
- println(" "*indent+line(bp.loff)+":"+bp.lindent+" to "+line(bp.roff)+":"+bp.rindent)
- if (bp.nested.nonEmpty)
- for (bp1 <- bp.nested) {
- printBP(bp1, indent + 2)
- }
+ if (inners.isEmpty) outer
+ else inners.mkString(outer + "\n", "\n", "")
}
-// println("lineStart = "+lineStart)//DEBUG
-// println("bracepairs = ")
-// for (bp <- bpbuf.toList) printBP(bp, 0)
+ def bpString = bpbuf.toList map ("\n" + bracePairString(_, 0)) mkString ""
+ def startString = lineStart.mkString("line starts: [", ", ", "]")
+
+ log(s"\n$startString\n$bpString")
bpbuf.toList
}
var tabSeen = false
- def line(offset: Int): Int = {
+ def line(offset: Offset): Int = {
def findLine(lo: Int, hi: Int): Int = {
val mid = (lo + hi) / 2
if (offset < lineStart(mid)) findLine(lo, mid - 1)
@@ -1412,7 +1428,7 @@ trait Scanners extends ScannersCommon {
else findLine(0, lineStart.length - 1)
}
- def column(offset: Int): Int = {
+ def column(offset: Offset): Int = {
var col = 0
var i = offset - 1
while (i >= 0 && buf(i) != CR && buf(i) != LF) {
@@ -1429,18 +1445,6 @@ trait Scanners extends ScannersCommon {
else bp :: insertPatch(bps, patch)
}
- def leftColumn(offset: Int) =
- if (offset == -1) -1 else column(lineStart(line(offset)))
-
- def rightColumn(offset: Int, default: Int) =
- if (offset == -1) -1
- else {
- val rlin = line(offset)
- if (lineStart(rlin) == offset) column(offset)
- else if (rlin + 1 < lineStart.length) column(lineStart(rlin + 1))
- else default
- }
-
def insertRBrace(): List[BracePatch] = {
def insert(bps: List[BracePair]): List[BracePatch] = bps match {
case List() => patches
@@ -1455,7 +1459,7 @@ trait Scanners extends ScannersCommon {
while (lin < lineStart.length && column(lineStart(lin)) > lindent)
lin += 1
if (lin < lineStart.length) {
- val patches1 = insertPatch(patches, BracePatch(lineStart(lin), true))
+ val patches1 = insertPatch(patches, BracePatch(lineStart(lin), inserted = true))
//println("patch for "+bp+"/"+imbalanceMeasure+"/"+new ParensAnalyzer(unit, patches1).imbalanceMeasure)
/*if (improves(patches1))*/
patches1
@@ -1476,27 +1480,16 @@ trait Scanners extends ScannersCommon {
else {
val patches1 = delete(nested)
if (patches1 ne patches) patches1
- else insertPatch(patches, BracePatch(roff, false))
+ else insertPatch(patches, BracePatch(roff, inserted = false))
}
}
delete(bracePairs)
}
- def imbalanceMeasure: Int = {
- def measureList(bps: List[BracePair]): Int =
- (bps map measure).sum
- def measure(bp: BracePair): Int =
- (if (bp.lindent != bp.rindent) 1 else 0) + measureList(bp.nested)
- measureList(bracePairs)
- }
-
- def improves(patches1: List[BracePatch]): Boolean =
- imbalanceMeasure > new ParensAnalyzer(unit, patches1).imbalanceMeasure
-
// don't emit deprecation warnings about identifiers like `macro` or `then`
// when skimming through the source file trying to heal braces
override def emitIdentifierDeprecationWarnings = false
- override def error(offset: Int, msg: String) {}
+ override def error(offset: Offset, msg: String) {}
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
index e8ef670222..1abc0c860c 100755
--- a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
@@ -7,11 +7,8 @@ package scala.tools.nsc
package ast.parser
import scala.collection.{ mutable, immutable }
-import scala.xml.{ EntityRef, Text }
-import scala.xml.XML.{ xmlns }
import symtab.Flags.MUTABLE
import scala.reflect.internal.util.StringOps.splitWhere
-import scala.language.implicitConversions
/** This class builds instance of `Tree` that represent XML.
*
@@ -133,7 +130,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
case (Some(pre), rest) => (const(pre), const(rest))
case _ => (wild, const(n))
}
- mkXML(pos, true, prepat, labpat, null, null, false, args)
+ mkXML(pos, isPattern = true, prepat, labpat, null, null, empty = false, args)
}
protected def convertToTextPat(t: Tree): Tree = t match {
@@ -144,14 +141,12 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
(buf map convertToTextPat).toList
def parseAttribute(pos: Position, s: String): Tree = {
- val ts = scala.xml.Utility.parseAttributeValue(s) map {
- case Text(s) => text(pos, s)
- case EntityRef(s) => entityRef(pos, s)
- }
- ts.length match {
- case 0 => gen.mkNil
- case 1 => ts.head
- case _ => makeXMLseq(pos, ts.toList)
+ import xml.Utility.parseAttributeValue
+
+ parseAttributeValue(s, text(pos, _), entityRef(pos, _)) match {
+ case Nil => gen.mkNil
+ case t :: Nil => t
+ case ts => makeXMLseq(pos, ts.toList)
}
}
@@ -169,7 +164,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
}
/** Returns (Some(prefix) | None, rest) based on position of ':' */
- def splitPrefix(name: String): (Option[String], String) = splitWhere(name, _ == ':', true) match {
+ def splitPrefix(name: String): (Option[String], String) = splitWhere(name, _ == ':', doDropIndex = true) match {
case Some((pre, rest)) => (Some(pre), rest)
case _ => (None, name)
}
@@ -197,9 +192,9 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
uri1
}
- /** Extract all the namespaces from the attribute map. */
+ /* Extract all the namespaces from the attribute map. */
val namespaces: List[Tree] =
- for (z <- attrMap.keys.toList ; if z startsWith xmlns) yield {
+ for (z <- attrMap.keys.toList ; if z startsWith "xmlns") yield {
val ns = splitPrefix(z) match {
case (Some(_), rest) => rest
case _ => null
@@ -247,7 +242,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
val body = mkXML(
pos.makeTransparent,
- false,
+ isPattern = false,
const(pre),
const(newlabel),
makeSymbolicAttrs,
diff --git a/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala b/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala
index 8a9ce8907e..3a695c6f59 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala
@@ -11,26 +11,98 @@ import javac._
/** An nsc sub-component.
*/
abstract class SyntaxAnalyzer extends SubComponent with Parsers with MarkupParsers with Scanners with JavaParsers with JavaScanners {
+ import global._
val phaseName = "parser"
-
def newPhase(prev: Phase): StdPhase = new ParserPhase(prev)
- class ParserPhase(prev: scala.tools.nsc.Phase) extends StdPhase(prev) {
+ abstract class MemberDefTraverser extends Traverser {
+ def onMember(defn: MemberDef): Unit
+
+ private var depth: Int = 0
+ private def lower[T](body: => T): T = {
+ depth += 1
+ try body finally depth -= 1
+ }
+ def currentDepth = depth
+
+ /** Prune this tree and all trees beneath it. Can be overridden. */
+ def prune(md: MemberDef): Boolean = (
+ md.mods.isSynthetic
+ || md.mods.isParamAccessor
+ || nme.isConstructorName(md.name)
+ || (md.name containsName nme.ANON_CLASS_NAME)
+ )
+
+ override def traverse(t: Tree): Unit = t match {
+ case md: MemberDef if prune(md) =>
+ case md @ PackageDef(_, stats) => traverseTrees(stats)
+ case md: ImplDef => onMember(md) ; lower(traverseTrees(md.impl.body))
+ case md: ValOrDefDef => onMember(md) ; lower(traverse(md.rhs))
+ case _ => super.traverse(t)
+ }
+ }
+
+ class MemberPosReporter(unit: CompilationUnit) extends MemberDefTraverser {
+ private var outputFn: MemberDef => String = outputForScreen
+ val path = unit.source.file.path
+
+ // If a single line, outputs the line; if it spans multiple lines
+ // outputs NN,NN with start and end lines, e.g. 15,25.
+ def outputPos(md: MemberDef): String = {
+ val pos = md.pos
+ val start = pos.focusStart.line
+ val end = pos.focusEnd.line
+
+ if (start == end) "" + start else s"$start,$end"
+ }
+ def outputForSed(md: MemberDef): String = {
+ val pos_s = "%-12s" format outputPos(md) + "p"
+ s"$pos_s $path # ${md.keyword} ${md.name}"
+ }
+ def outputForScreen(md: MemberDef): String = {
+ val pos_s = "%-20s" format " " * currentDepth + outputPos(md)
+ s"$pos_s ${md.keyword} ${md.name}"
+ }
+
+ def onMember(md: MemberDef) = println(outputFn(md))
+ // It recognizes "sed" and "anything else".
+ def show(style: String) {
+ if (style == "sed") {
+ outputFn = outputForSed
+ traverse(unit.body)
+ }
+ else {
+ outputFn = outputForScreen
+ println(path)
+ traverse(unit.body)
+ }
+ println("")
+ }
+ }
+
+ private def initialUnitBody(unit: CompilationUnit): Tree = {
+ if (unit.isJava) new JavaUnitParser(unit).parse()
+ else if (global.reporter.incompleteHandled) newUnitParser(unit).parse()
+ else newUnitParser(unit).smartParse()
+ }
+
+ class ParserPhase(prev: Phase) extends StdPhase(prev) {
override val checkable = false
override val keepsTypeParams = false
- def apply(unit: global.CompilationUnit) {
- import global._
+ def apply(unit: CompilationUnit) {
informProgress("parsing " + unit)
- unit.body =
- if (unit.isJava) new JavaUnitParser(unit).parse()
- else if (reporter.incompleteHandled) new UnitParser(unit).parse()
- else new UnitParser(unit).smartParse()
+ // if the body is already filled in, don't overwrite it
+ // otherwise compileLate is going to overwrite bodies of synthetic source files
+ if (unit.body == EmptyTree)
+ unit.body = initialUnitBody(unit)
- if (settings.Yrangepos.value && !reporter.hasErrors)
+ if (settings.Yrangepos && !reporter.hasErrors)
validatePositions(unit.body)
+
+ if (settings.Ymemberpos.isSetByUser)
+ new MemberPosReporter(unit) show (style = settings.Ymemberpos.value)
}
}
}
-
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala b/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala
index c3fd414426..e624aec88c 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala
@@ -6,159 +6,57 @@
package scala.tools.nsc
package ast.parser
-import scala.annotation.switch
-
-/** Common code between JavaTokens and Tokens. Not as much (and not as concrete)
- * as one might like because JavaTokens for no clear reason chose new numbers for
- * identical token sets.
- */
-abstract class Tokens {
- import scala.reflect.internal.Chars._
-
- /** special tokens */
- final val EMPTY = -3
- final val UNDEF = -2
- final val ERROR = -1
- final val EOF = 0
-
- /** literals */
- final val CHARLIT = 1
- final val INTLIT = 2
- final val LONGLIT = 3
- final val FLOATLIT = 4
- final val DOUBLELIT = 5
- final val STRINGLIT = 6
-
- def LPAREN: Int
- def RBRACE: Int
-
- def isIdentifier(code: Int): Boolean
- def isLiteral(code: Int): Boolean
- def isKeyword(code: Int): Boolean
- def isSymbol(code: Int): Boolean
-
- final def isSpace(at: Char) = at == ' ' || at == '\t'
- final def isNewLine(at: Char) = at == CR || at == LF || at == FF
- final def isBrace(code: Int) = code >= LPAREN && code <= RBRACE
- final def isOpenBrace(code: Int) = isBrace(code) && (code % 2 == 0)
- final def isCloseBrace(code: Int) = isBrace(code) && (code % 2 == 1)
-}
-
-object Tokens extends Tokens {
- final val STRINGPART = 7 // a part of an interpolated string
+object Tokens extends CommonTokens {
+ final val STRINGPART = 7 // a part of an interpolated string
final val SYMBOLLIT = 8
final val INTERPOLATIONID = 9 // the lead identifier of an interpolated string
- def isLiteral(code: Int) =
- code >= CHARLIT && code <= INTERPOLATIONID
-
+ def isLiteral(code: Int) = code >= CHARLIT && code <= INTERPOLATIONID
/** identifiers */
final val IDENTIFIER = 10
final val BACKQUOTED_IDENT = 11
- def isIdentifier(code: Int) =
- code >= IDENTIFIER && code <= BACKQUOTED_IDENT
-
- @switch def canBeginExpression(code: Int) = code match {
- case IDENTIFIER|BACKQUOTED_IDENT|USCORE => true
- case LBRACE|LPAREN|LBRACKET|COMMENT => true
- case IF|DO|WHILE|FOR|NEW|TRY|THROW => true
- case NULL|THIS|TRUE|FALSE => true
- case code => isLiteral(code)
- }
-
- /** keywords */
- final val IF = 20
- final val FOR = 21
- final val ELSE = 22
- final val THIS = 23
- final val NULL = 24
- final val NEW = 25
- final val WITH = 26
- final val SUPER = 27
- final val CASE = 28
- final val CASECLASS = 29
- final val CASEOBJECT = 30
- final val VAL = 31
- final val ABSTRACT = 32
- final val FINAL = 33
- final val PRIVATE = 34
- final val PROTECTED = 35
- final val OVERRIDE = 36
- final val IMPLICIT = 37
- final val VAR = 38
- final val DEF = 39
- final val TYPE = 40
- final val EXTENDS = 41
- final val TRUE = 42
- final val FALSE = 43
- final val OBJECT = 44
- final val CLASS = 45
-
- final val IMPORT = 46
- final val PACKAGE = 47
- final val YIELD = 48
- final val DO = 49
- final val TRAIT = 50
- final val SEALED = 51
- final val THROW = 52
- final val TRY = 53
- final val CATCH = 54
- final val FINALLY = 55
- final val WHILE = 56
- final val RETURN = 57
- final val MATCH = 58
- final val FORSOME = 59
- final val LAZY = 61
- final val MACRO = 62 // not yet used in 2.10
- final val THEN = 63 // not yet used in 2.10
-
- def isKeyword(code: Int) =
- code >= IF && code <= LAZY
-
- @switch def isDefinition(code: Int) = code match {
- case CLASS|TRAIT|OBJECT => true
- case CASECLASS|CASEOBJECT => true
- case DEF|VAL|VAR => true
- case TYPE => true
- case _ => false
- }
+ def isIdentifier(code: Int) = code == IDENTIFIER || code == BACKQUOTED_IDENT // used by ide
+
+ /** modifiers */
+ final val IMPLICIT = 40
+ final val OVERRIDE = 41
+ final val SEALED = 45
+ final val LAZY = 55
+ final val MACRO = 57
+
+ /** templates */
+ final val CASECLASS = 63
+ final val OBJECT = 64
+ final val CASEOBJECT = 65
+ final val TRAIT = 66
+ final val WITH = 69
+ final val TYPE = 70
+ final val FORSOME = 71
+ final val DEF = 72
+ final val VAL = 73
+ final val VAR = 74
+
+ /** control structures */
+ final val THEN = 81
+ final val YIELD = 86
+ final val MATCH = 95
/** special symbols */
- final val COMMA = 70
- final val SEMI = 71
- final val DOT = 72
- final val USCORE = 73
- final val COLON = 74
- final val EQUALS = 75
- final val LARROW = 76
- final val ARROW = 77
- final val NEWLINE = 78
- final val NEWLINES = 79
- final val SUBTYPE = 80
- final val SUPERTYPE = 81
- final val HASH = 82
- final val AT = 83
- final val VIEWBOUND = 84
-
- def isSymbol(code: Int) =
- code >= COMMA && code <= VIEWBOUND
-
- /** parenthesis */
- final val LPAREN = 90
- final val RPAREN = 91
- final val LBRACKET = 92
- final val RBRACKET = 93
- final val LBRACE = 94
- final val RBRACE = 95
-
- /** XML mode */
- final val XMLSTART = 96
+ final val HASH = 130
+ final val USCORE = 131
+ final val ARROW = 132
+ final val LARROW = 133
+ final val SUBTYPE = 134
+ final val SUPERTYPE = 135
+ final val VIEWBOUND = 136
+ final val NEWLINE = 137
+ final val NEWLINES = 138
+ final val XMLSTART = 139
/** for IDE only */
- final val COMMENT = 97
-
- final val WHITESPACE = 105
- final val IGNORE = 106
- final val ESCAPE = 109
+ final val COMMENT = 200
+ final val WHITESPACE = 201
+ final val IGNORE = 202
+ final val ESCAPE = 203
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
index 1412bff0ab..6e5a3f6ef7 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
@@ -8,119 +8,29 @@ package ast.parser
import symtab.Flags._
import scala.collection.mutable.ListBuffer
+import scala.reflect.internal.util.{Position, SourceFile, FreshNameCreator}
/** Methods for building trees, used in the parser. All the trees
* returned by this class must be untyped.
*/
abstract class TreeBuilder {
-
val global: Global
import global._
- def freshName(): Name = freshName("x$")
- def freshTermName(): TermName = freshTermName("x$")
+ def unit: CompilationUnit
+ def source: SourceFile
- def freshName(prefix: String): Name
- def freshTermName(prefix: String): TermName
- def freshTypeName(prefix: String): TypeName
- def o2p(offset: Int): Position
- def r2p(start: Int, point: Int, end: Int): Position
+ implicit def fresh: FreshNameCreator = unit.fresh
+ def o2p(offset: Int): Position = Position.offset(source, offset)
+ def r2p(start: Int, mid: Int, end: Int): Position = rangePos(source, start, mid, end)
- def rootId(name: Name) = gen.rootId(name)
def rootScalaDot(name: Name) = gen.rootScalaDot(name)
def scalaDot(name: Name) = gen.scalaDot(name)
def scalaAnyRefConstr = scalaDot(tpnme.AnyRef)
- def scalaAnyValConstr = scalaDot(tpnme.AnyVal)
- def scalaAnyConstr = scalaDot(tpnme.Any)
def scalaUnitConstr = scalaDot(tpnme.Unit)
- def productConstr = scalaDot(tpnme.Product)
- def productConstrN(n: Int) = scalaDot(newTypeName("Product" + n))
- def serializableConstr = scalaDot(tpnme.Serializable)
def convertToTypeName(t: Tree) = gen.convertToTypeName(t)
- /** Convert all occurrences of (lower-case) variables in a pattern as follows:
- * x becomes x @ _
- * x: T becomes x @ (_: T)
- */
- private object patvarTransformer extends Transformer {
- override def transform(tree: Tree): Tree = tree match {
- case Ident(name) if (treeInfo.isVarPattern(tree) && name != nme.WILDCARD) =>
- atPos(tree.pos)(Bind(name, atPos(tree.pos.focus) (Ident(nme.WILDCARD))))
- case Typed(id @ Ident(name), tpt) if (treeInfo.isVarPattern(id) && name != nme.WILDCARD) =>
- atPos(tree.pos.withPoint(id.pos.point)) {
- Bind(name, atPos(tree.pos.withStart(tree.pos.point)) {
- Typed(Ident(nme.WILDCARD), tpt)
- })
- }
- case Apply(fn @ Apply(_, _), args) =>
- treeCopy.Apply(tree, transform(fn), transformTrees(args))
- case Apply(fn, args) =>
- treeCopy.Apply(tree, fn, transformTrees(args))
- case Typed(expr, tpt) =>
- treeCopy.Typed(tree, transform(expr), tpt)
- case Bind(name, body) =>
- treeCopy.Bind(tree, name, transform(body))
- case Alternative(_) | Star(_) =>
- super.transform(tree)
- case _ =>
- tree
- }
- }
-
- /** Traverse pattern and collect all variable names with their types in buffer
- * The variables keep their positions; whereas the pattern is converted to be
- * synthetic for all nodes that contain a variable position.
- */
- class GetVarTraverser extends Traverser {
- val buf = new ListBuffer[(Name, Tree, Position)]
-
- def namePos(tree: Tree, name: Name): Position =
- if (!tree.pos.isRange || name.containsName(nme.raw.DOLLAR)) tree.pos.focus
- else {
- val start = tree.pos.start
- val end = start + name.decode.length
- r2p(start, start, end)
- }
-
- override def traverse(tree: Tree): Unit = {
- def seenName(name: Name) = buf exists (_._1 == name)
- def add(name: Name, t: Tree) = if (!seenName(name)) buf += ((name, t, namePos(tree, name)))
- val bl = buf.length
-
- tree match {
- case Bind(nme.WILDCARD, _) =>
- super.traverse(tree)
-
- case Bind(name, Typed(tree1, tpt)) =>
- val newTree = if (treeInfo.mayBeTypePat(tpt)) TypeTree() else tpt.duplicate
- add(name, newTree)
- traverse(tree1)
-
- case Bind(name, tree1) =>
- // can assume only name range as position, as otherwise might overlap
- // with binds embedded in pattern tree1
- add(name, TypeTree())
- traverse(tree1)
-
- case _ =>
- super.traverse(tree)
- }
- if (buf.length > bl)
- tree setPos tree.pos.makeTransparent
- }
- def apply(tree: Tree) = {
- traverse(tree)
- buf.toList
- }
- }
-
- /** Returns list of all pattern variables, possibly with their types,
- * without duplicates
- */
- private def getVariables(tree: Tree): List[(Name, Tree, Position)] =
- new GetVarTraverser apply tree
-
def byNameApplication(tpe: Tree): Tree =
AppliedTypeTree(rootScalaDot(tpnme.BYNAME_PARAM_CLASS_NAME), List(tpe))
def repeatedApplication(tpe: Tree): Tree =
@@ -129,25 +39,12 @@ abstract class TreeBuilder {
def makeImportSelector(name: Name, nameOffset: Int): ImportSelector =
ImportSelector(name, nameOffset, name, nameOffset)
- private def makeTuple(trees: List[Tree], isType: Boolean): Tree = {
- val tupString = "Tuple" + trees.length
- Apply(scalaDot(if (isType) newTypeName(tupString) else newTermName(tupString)), trees)
- }
+ def makeTupleTerm(elems: List[Tree]) = gen.mkTuple(elems)
- def makeTupleTerm(trees: List[Tree], flattenUnary: Boolean): Tree = trees match {
- case Nil => Literal(Constant())
- case List(tree) if flattenUnary => tree
- case _ => makeTuple(trees, false)
- }
-
- def makeTupleType(trees: List[Tree], flattenUnary: Boolean): Tree = trees match {
- case Nil => scalaUnitConstr
- case List(tree) if flattenUnary => tree
- case _ => AppliedTypeTree(scalaDot(newTypeName("Tuple" + trees.length)), trees)
- }
+ def makeTupleType(elems: List[Tree]) = gen.mkTupleType(elems)
def stripParens(t: Tree) = t match {
- case Parens(ts) => atPos(t.pos) { makeTupleTerm(ts, true) }
+ case Parens(ts) => atPos(t.pos) { makeTupleTerm(ts) }
case _ => t
}
@@ -157,323 +54,67 @@ abstract class TreeBuilder {
def makeSelfDef(name: TermName, tpt: Tree): ValDef =
ValDef(Modifiers(PRIVATE), name, tpt, EmptyTree)
- /** If tree is a variable pattern, return Some("its name and type").
- * Otherwise return none */
- private def matchVarPattern(tree: Tree): Option[(Name, Tree)] = {
- def wildType(t: Tree): Option[Tree] = t match {
- case Ident(x) if x.toTermName == nme.WILDCARD => Some(TypeTree())
- case Typed(Ident(x), tpt) if x.toTermName == nme.WILDCARD => Some(tpt)
- case _ => None
- }
- tree match {
- case Ident(name) => Some((name, TypeTree()))
- case Bind(name, body) => wildType(body) map (x => (name, x))
- case Typed(Ident(name), tpt) => Some((name, tpt))
- case _ => None
- }
- }
-
/** Create tree representing (unencoded) binary operation expression or pattern. */
- def makeBinop(isExpr: Boolean, left: Tree, op: TermName, right: Tree, opPos: Position): Tree = {
- def mkNamed(args: List[Tree]) =
- if (isExpr) args map {
- case a @ Assign(id @ Ident(name), rhs) =>
- atPos(a.pos) { AssignOrNamedArg(id, rhs) }
- case e => e
- } else args
+ def makeBinop(isExpr: Boolean, left: Tree, op: TermName, right: Tree, opPos: Position, targs: List[Tree] = Nil): Tree = {
+ require(isExpr || targs.isEmpty || targs.exists(_.isErroneous), s"Incompatible args to makeBinop: !isExpr but targs=$targs")
+
+ def mkSelection(t: Tree) = {
+ def sel = atPos(opPos union t.pos)(Select(stripParens(t), op.encode))
+ if (targs.isEmpty) sel else atPos(left.pos)(TypeApply(sel, targs))
+ }
+ def mkNamed(args: List[Tree]) = if (isExpr) args map treeInfo.assignmentToMaybeNamedArg else args
val arguments = right match {
case Parens(args) => mkNamed(args)
- case _ => List(right)
+ case _ => List(right)
}
if (isExpr) {
if (treeInfo.isLeftAssoc(op)) {
- Apply(atPos(opPos union left.pos) { Select(stripParens(left), op.encode) }, arguments)
+ Apply(mkSelection(left), arguments)
} else {
val x = freshTermName()
Block(
- List(ValDef(Modifiers(SYNTHETIC), x, TypeTree(), stripParens(left))),
- Apply(atPos(opPos union right.pos) { Select(stripParens(right), op.encode) }, List(Ident(x))))
+ List(ValDef(Modifiers(SYNTHETIC | ARTIFACT), x, TypeTree(), stripParens(left))),
+ Apply(mkSelection(right), List(Ident(x))))
}
} else {
Apply(Ident(op.encode), stripParens(left) :: arguments)
}
}
- /** Creates a tree representing new Object { stats }.
- * To make sure an anonymous subclass of Object is created,
- * if there are no stats, a () is added.
- */
- def makeAnonymousNew(stats: List[Tree]): Tree = {
- val stats1 = if (stats.isEmpty) List(Literal(Constant(()))) else stats
- makeNew(Nil, emptyValDef, stats1, ListOfNil, NoPosition, NoPosition)
- }
-
- /** Create positioned tree representing an object creation <new parents { stats }
- * @param npos the position of the new
- * @param cpos the position of the anonymous class starting with parents
- */
- def makeNew(parents: List[Tree], self: ValDef, stats: List[Tree], argss: List[List[Tree]],
- npos: Position, cpos: Position): Tree =
- if (parents.isEmpty)
- makeNew(List(scalaAnyRefConstr), self, stats, argss, npos, cpos)
- else if (parents.tail.isEmpty && stats.isEmpty)
- atPos(npos union cpos) { New(parents.head, argss) }
- else {
- val x = tpnme.ANON_CLASS_NAME
- atPos(npos union cpos) {
- Block(
- List(
- atPos(cpos) {
- ClassDef(
- Modifiers(FINAL), x, Nil,
- Template(parents, self, NoMods, ListOfNil, argss, stats, cpos.focus))
- }),
- atPos(npos) {
- New(
- Ident(x) setPos npos.focus,
- ListOfNil)
- }
- )
- }
- }
-
- /** Create a tree representing an assignment <lhs = rhs> */
- def makeAssign(lhs: Tree, rhs: Tree): Tree = lhs match {
- case Apply(fn, args) =>
- Apply(atPos(fn.pos) { Select(fn, nme.update) }, args ::: List(rhs))
- case _ =>
- Assign(lhs, rhs)
- }
-
/** Tree for `od op`, start is start0 if od.pos is borked. */
def makePostfixSelect(start0: Int, end: Int, od: Tree, op: Name): Tree = {
- val start = if (od.pos.isDefined) od.pos.startOrPoint else start0
+ val start = if (od.pos.isDefined) od.pos.start else start0
atPos(r2p(start, end, end + op.length)) { new PostfixSelect(od, op.encode) }
}
- /** A type tree corresponding to (possibly unary) intersection type */
- def makeIntersectionTypeTree(tps: List[Tree]): Tree =
- if (tps.tail.isEmpty) tps.head
- else CompoundTypeTree(Template(tps, emptyValDef, Nil))
-
/** Create tree representing a while loop */
def makeWhile(startPos: Int, cond: Tree, body: Tree): Tree = {
val lname = freshTermName(nme.WHILE_PREFIX)
def default = wrappingPos(List(cond, body)) match {
- case p if p.isDefined => p.endOrPoint
+ case p if p.isDefined => p.end
case _ => startPos
}
val continu = atPos(o2p(body.pos pointOrElse default)) { Apply(Ident(lname), Nil) }
- val rhs = If(cond, Block(List(body), continu), Literal(Constant()))
+ val rhs = If(cond, Block(List(body), continu), Literal(Constant(())))
LabelDef(lname, Nil, rhs)
}
/** Create tree representing a do-while loop */
def makeDoWhile(lname: TermName, body: Tree, cond: Tree): Tree = {
val continu = Apply(Ident(lname), Nil)
- val rhs = Block(List(body), If(cond, continu, Literal(Constant())))
+ val rhs = Block(List(body), If(cond, continu, Literal(Constant(()))))
LabelDef(lname, Nil, rhs)
}
/** Create block of statements `stats` */
- def makeBlock(stats: List[Tree]): Tree =
- if (stats.isEmpty) Literal(Constant())
- else if (!stats.last.isTerm) Block(stats, Literal(Constant()))
- else if (stats.length == 1) stats.head
- else Block(stats.init, stats.last)
-
- def makeFilter(tree: Tree, condition: Tree, scrutineeName: String): Tree = {
- val cases = List(
- CaseDef(condition, EmptyTree, Literal(Constant(true))),
- CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(Constant(false)))
- )
- val matchTree = makeVisitor(cases, false, scrutineeName)
-
- atPos(tree.pos)(Apply(Select(tree, nme.withFilter), matchTree :: Nil))
- }
-
- /** Create tree for for-comprehension generator <val pat0 <- rhs0> */
- def makeGenerator(pos: Position, pat: Tree, valeq: Boolean, rhs: Tree): Enumerator = {
- val pat1 = patvarTransformer.transform(pat)
- val rhs1 =
- if (valeq || treeInfo.isVarPatternDeep(pat)) rhs
- else makeFilter(rhs, pat1.duplicate, nme.CHECK_IF_REFUTABLE_STRING)
-
- if (valeq) ValEq(pos, pat1, rhs1)
- else ValFrom(pos, pat1, rhs1)
- }
+ def makeBlock(stats: List[Tree]): Tree = gen.mkBlock(stats)
def makeParam(pname: TermName, tpe: Tree) =
ValDef(Modifiers(PARAM), pname, tpe, EmptyTree)
- def makeSyntheticParam(pname: TermName) =
- ValDef(Modifiers(PARAM | SYNTHETIC), pname, TypeTree(), EmptyTree)
-
def makeSyntheticTypeParam(pname: TypeName, bounds: Tree) =
TypeDef(Modifiers(DEFERRED | SYNTHETIC), pname, Nil, bounds)
- abstract class Enumerator { def pos: Position }
- case class ValFrom(pos: Position, pat: Tree, rhs: Tree) extends Enumerator
- case class ValEq(pos: Position, pat: Tree, rhs: Tree) extends Enumerator
- case class Filter(pos: Position, test: Tree) extends Enumerator
-
- /** Create tree for for-comprehension <for (enums) do body> or
- * <for (enums) yield body> where mapName and flatMapName are chosen
- * corresponding to whether this is a for-do or a for-yield.
- * The creation performs the following rewrite rules:
- *
- * 1.
- *
- * for (P <- G) E ==> G.foreach (P => E)
- *
- * Here and in the following (P => E) is interpreted as the function (P => E)
- * if P is a variable pattern and as the partial function { case P => E } otherwise.
- *
- * 2.
- *
- * for (P <- G) yield E ==> G.map (P => E)
- *
- * 3.
- *
- * for (P_1 <- G_1; P_2 <- G_2; ...) ...
- * ==>
- * G_1.flatMap (P_1 => for (P_2 <- G_2; ...) ...)
- *
- * 4.
- *
- * for (P <- G; E; ...) ...
- * =>
- * for (P <- G.filter (P => E); ...) ...
- *
- * 5. For N < MaxTupleArity:
- *
- * for (P_1 <- G; P_2 = E_2; val P_N = E_N; ...)
- * ==>
- * for (TupleN(P_1, P_2, ... P_N) <-
- * for (x_1 @ P_1 <- G) yield {
- * val x_2 @ P_2 = E_2
- * ...
- * val x_N & P_N = E_N
- * TupleN(x_1, ..., x_N)
- * } ...)
- *
- * If any of the P_i are variable patterns, the corresponding `x_i @ P_i' is not generated
- * and the variable constituting P_i is used instead of x_i
- *
- * @param mapName The name to be used for maps (either map or foreach)
- * @param flatMapName The name to be used for flatMaps (either flatMap or foreach)
- * @param enums The enumerators in the for expression
- * @param body The body of the for expression
- */
- private def makeFor(mapName: TermName, flatMapName: TermName, enums: List[Enumerator], body: Tree): Tree = {
-
- /** make a closure pat => body.
- * The closure is assigned a transparent position with the point at pos.point and
- * the limits given by pat and body.
- */
- def makeClosure(pos: Position, pat: Tree, body: Tree): Tree = {
- def splitpos = wrappingPos(List(pat, body)).withPoint(pos.point).makeTransparent
- matchVarPattern(pat) match {
- case Some((name, tpt)) =>
- Function(
- List(atPos(pat.pos) { ValDef(Modifiers(PARAM), name.toTermName, tpt, EmptyTree) }),
- body) setPos splitpos
- case None =>
- atPos(splitpos) {
- makeVisitor(List(CaseDef(pat, EmptyTree, body)), false)
- }
- }
- }
-
- /** Make an application qual.meth(pat => body) positioned at `pos`.
- */
- def makeCombination(pos: Position, meth: TermName, qual: Tree, pat: Tree, body: Tree): Tree =
- Apply(Select(qual, meth) setPos qual.pos, List(makeClosure(pos, pat, body))) setPos pos
-
- /** Optionally, if pattern is a `Bind`, the bound name, otherwise None.
- */
- def patternVar(pat: Tree): Option[Name] = pat match {
- case Bind(name, _) => Some(name)
- case _ => None
- }
-
- /** If `pat` is not yet a `Bind` wrap it in one with a fresh name
- */
- def makeBind(pat: Tree): Tree = pat match {
- case Bind(_, _) => pat
- case _ => Bind(freshName(), pat) setPos pat.pos
- }
-
- /** A reference to the name bound in Bind `pat`.
- */
- def makeValue(pat: Tree): Tree = pat match {
- case Bind(name, _) => Ident(name) setPos pat.pos.focus
- }
-
- /** The position of the closure that starts with generator at position `genpos`.
- */
- def closurePos(genpos: Position) = {
- val end = body.pos match {
- case NoPosition => genpos.point
- case bodypos => bodypos.endOrPoint
- }
- r2p(genpos.startOrPoint, genpos.point, end)
- }
-
-// val result =
- enums match {
- case ValFrom(pos, pat, rhs) :: Nil =>
- makeCombination(closurePos(pos), mapName, rhs, pat, body)
- case ValFrom(pos, pat, rhs) :: (rest @ (ValFrom(_, _, _) :: _)) =>
- makeCombination(closurePos(pos), flatMapName, rhs, pat,
- makeFor(mapName, flatMapName, rest, body))
- case ValFrom(pos, pat, rhs) :: Filter(_, test) :: rest =>
- makeFor(mapName, flatMapName,
- ValFrom(pos, pat, makeCombination(rhs.pos union test.pos, nme.withFilter, rhs, pat.duplicate, test)) :: rest,
- body)
- case ValFrom(pos, pat, rhs) :: rest =>
- val valeqs = rest.take(definitions.MaxTupleArity - 1).takeWhile(_.isInstanceOf[ValEq]);
- assert(!valeqs.isEmpty)
- val rest1 = rest.drop(valeqs.length)
- val pats = valeqs map { case ValEq(_, pat, _) => pat }
- val rhss = valeqs map { case ValEq(_, _, rhs) => rhs }
- val defpat1 = makeBind(pat)
- val defpats = pats map makeBind
- val pdefs = (defpats, rhss).zipped flatMap makePatDef
- val ids = (defpat1 :: defpats) map makeValue
- val rhs1 = makeForYield(
- List(ValFrom(pos, defpat1, rhs)),
- Block(pdefs, atPos(wrappingPos(ids)) { makeTupleTerm(ids, true) }) setPos wrappingPos(pdefs))
- val allpats = (pat :: pats) map (_.duplicate)
- val vfrom1 = ValFrom(r2p(pos.startOrPoint, pos.point, rhs1.pos.endOrPoint), atPos(wrappingPos(allpats)) { makeTuple(allpats, false) } , rhs1)
- makeFor(mapName, flatMapName, vfrom1 :: rest1, body)
- case _ =>
- EmptyTree //may happen for erroneous input
- }
-// println("made for "+result)
-// result
- }
-
- /** Create tree for for-do comprehension <for (enums) body> */
- def makeFor(enums: List[Enumerator], body: Tree): Tree =
- makeFor(nme.foreach, nme.foreach, enums, body)
-
- /** Create tree for for-yield comprehension <for (enums) yield body> */
- def makeForYield(enums: List[Enumerator], body: Tree): Tree =
- makeFor(nme.map, nme.flatMap, enums, body)
-
- /** Create tree for a lifted expression XX-LIFTING
- */
- def makeLifted(gs: List[ValFrom], body: Tree): Tree = {
- def combine(gs: List[ValFrom]): ValFrom = (gs: @unchecked) match {
- case g :: Nil => g
- case ValFrom(pos1, pat1, rhs1) :: gs2 =>
- val ValFrom(pos2, pat2, rhs2) = combine(gs2)
- ValFrom(pos1, makeTuple(List(pat1, pat2), false), Apply(Select(rhs1, nme.zip), List(rhs2)))
- }
- makeForYield(List(combine(gs)), body)
- }
-
/** Create tree for a pattern alternative */
def makeAlternative(ts: List[Tree]): Tree = {
def alternatives(t: Tree): List[Tree] = t match {
@@ -483,21 +124,9 @@ abstract class TreeBuilder {
Alternative(ts flatMap alternatives)
}
- /** Create visitor <x => x match cases> */
- def makeVisitor(cases: List[CaseDef], checkExhaustive: Boolean): Tree =
- makeVisitor(cases, checkExhaustive, "x$")
-
- /** Create visitor <x => x match cases> */
- def makeVisitor(cases: List[CaseDef], checkExhaustive: Boolean, prefix: String): Tree = {
- val x = freshTermName(prefix)
- val id = Ident(x)
- val sel = if (checkExhaustive) id else gen.mkUnchecked(id)
- Function(List(makeSyntheticParam(x)), Match(sel, cases))
- }
-
/** Create tree for case definition <case pat if guard => rhs> */
def makeCaseDef(pat: Tree, guard: Tree, rhs: Tree): CaseDef =
- CaseDef(patvarTransformer.transform(pat), guard, rhs)
+ CaseDef(gen.patvarTransformer.transform(pat), guard, rhs)
/** Creates tree representing:
* { case x: Throwable =>
@@ -506,9 +135,9 @@ abstract class TreeBuilder {
* }
*/
def makeCatchFromExpr(catchExpr: Tree): CaseDef = {
- val binder = freshTermName("x")
+ val binder = freshTermName()
val pat = Bind(binder, Typed(Ident(nme.WILDCARD), Ident(tpnme.Throwable)))
- val catchDef = ValDef(NoMods, freshTermName("catchExpr"), TypeTree(), catchExpr)
+ val catchDef = ValDef(Modifiers(ARTIFACT), freshTermName("catchExpr"), TypeTree(), catchExpr)
val catchFn = Ident(catchDef.name)
val body = atPos(catchExpr.pos.makeTransparent)(Block(
List(catchDef),
@@ -521,79 +150,8 @@ abstract class TreeBuilder {
makeCaseDef(pat, EmptyTree, body)
}
- /** Create tree for pattern definition <val pat0 = rhs> */
- def makePatDef(pat: Tree, rhs: Tree): List[Tree] =
- makePatDef(Modifiers(0), pat, rhs)
-
- /** Create tree for pattern definition <mods val pat0 = rhs> */
- def makePatDef(mods: Modifiers, pat: Tree, rhs: Tree): List[Tree] = matchVarPattern(pat) match {
- case Some((name, tpt)) =>
- List(atPos(pat.pos union rhs.pos) {
- ValDef(mods, name.toTermName, tpt, rhs)
- })
-
- case None =>
- // in case there is exactly one variable x_1 in pattern
- // val/var p = e ==> val/var x_1 = e.match (case p => (x_1))
- //
- // in case there are zero or more than one variables in pattern
- // val/var p = e ==> private synthetic val t$ = e.match (case p => (x_1, ..., x_N))
- // val/var x_1 = t$._1
- // ...
- // val/var x_N = t$._N
-
- val rhsUnchecked = gen.mkUnchecked(rhs)
-
- // TODO: clean this up -- there is too much information packked into makePatDef's `pat` argument
- // when it's a simple identifier (case Some((name, tpt)) -- above),
- // pat should have the type ascription that was specified by the user
- // however, in `case None` (here), we must be careful not to generate illegal pattern trees (such as `(a, b): Tuple2[Int, String]`)
- // i.e., this must hold: pat1 match { case Typed(expr, tp) => assert(expr.isInstanceOf[Ident]) case _ => }
- // if we encounter such an erroneous pattern, we strip off the type ascription from pat and propagate the type information to rhs
- val (pat1, rhs1) = patvarTransformer.transform(pat) match {
- // move the Typed ascription to the rhs
- case Typed(expr, tpt) if !expr.isInstanceOf[Ident] =>
- val rhsTypedUnchecked =
- if (tpt.isEmpty) rhsUnchecked
- else Typed(rhsUnchecked, tpt) setPos (rhs.pos union tpt.pos)
- (expr, rhsTypedUnchecked)
- case ok =>
- (ok, rhsUnchecked)
- }
- val vars = getVariables(pat1)
- val matchExpr = atPos((pat1.pos union rhs.pos).makeTransparent) {
- Match(
- rhs1,
- List(
- atPos(pat1.pos) {
- CaseDef(pat1, EmptyTree, makeTupleTerm(vars map (_._1) map Ident.apply, true))
- }
- ))
- }
- vars match {
- case List((vname, tpt, pos)) =>
- List(atPos(pat.pos union pos union rhs.pos) {
- ValDef(mods, vname.toTermName, tpt, matchExpr)
- })
- case _ =>
- val tmp = freshTermName()
- val firstDef =
- atPos(matchExpr.pos) {
- ValDef(Modifiers(PrivateLocal | SYNTHETIC | (mods.flags & LAZY)),
- tmp, TypeTree(), matchExpr)
- }
- var cnt = 0
- val restDefs = for ((vname, tpt, pos) <- vars) yield atPos(pos) {
- cnt += 1
- ValDef(mods, vname.toTermName, tpt, Select(Ident(tmp), newTermName("_" + cnt)))
- }
- firstDef :: restDefs
- }
- }
-
/** Create a tree representing the function type (argtpes) => restpe */
- def makeFunctionTypeTree(argtpes: List[Tree], restpe: Tree): Tree =
- AppliedTypeTree(rootScalaDot(newTypeName("Function" + argtpes.length)), argtpes ::: List(restpe))
+ def makeFunctionTypeTree(argtpes: List[Tree], restpe: Tree): Tree = gen.mkFunctionTypeTree(argtpes, restpe)
/** Append implicit parameter section if `contextBounds` nonempty */
def addEvidenceParams(owner: Name, vparamss: List[List[ValDef]], contextBounds: List[Tree]): List[List[ValDef]] = {
@@ -610,4 +168,6 @@ abstract class TreeBuilder {
vparamss ::: List(evidenceParams)
}
}
+
+ def makePatDef(mods: Modifiers, pat: Tree, rhs: Tree) = gen.mkPatDef(mods, pat, rhs)
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/xml/MarkupParserCommon.scala b/src/compiler/scala/tools/nsc/ast/parser/xml/MarkupParserCommon.scala
new file mode 100644
index 0000000000..82dce9f1f8
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/ast/parser/xml/MarkupParserCommon.scala
@@ -0,0 +1,211 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.tools.nsc.ast.parser.xml
+
+/** This is not a public trait - it contains common code shared
+ * between the library level XML parser and the compiler's.
+ * All members should be accessed through those.
+ */
+private[scala] trait MarkupParserCommon {
+ import Utility._
+ import scala.reflect.internal.Chars.SU
+
+ protected def unreachable = scala.sys.error("Cannot be reached.")
+
+ type PositionType // Int, Position
+ type ElementType // NodeSeq, Tree
+ type NamespaceType // NamespaceBinding, Any
+ type AttributesType // (MetaData, NamespaceBinding), mutable.Map[String, Tree]
+
+ def mkAttributes(name: String, pscope: NamespaceType): AttributesType
+ def mkProcInstr(position: PositionType, name: String, text: String): ElementType
+
+ /** parse a start or empty tag.
+ * [40] STag ::= '<' Name { S Attribute } [S]
+ * [44] EmptyElemTag ::= '<' Name { S Attribute } [S]
+ */
+ protected def xTag(pscope: NamespaceType): (String, AttributesType) = {
+ val name = xName
+ xSpaceOpt()
+
+ (name, mkAttributes(name, pscope))
+ }
+
+ /** '<?' ProcInstr ::= Name [S ({Char} - ({Char}'>?' {Char})]'?>'
+ *
+ * see [15]
+ */
+ def xProcInstr: ElementType = {
+ val n = xName
+ xSpaceOpt()
+ xTakeUntil(mkProcInstr(_, n, _), () => tmppos, "?>")
+ }
+
+ /** attribute value, terminated by either `'` or `"`. value may not contain `<`.
+ @param endCh either `'` or `"`
+ */
+ def xAttributeValue(endCh: Char): String = {
+ val buf = new StringBuilder
+ while (ch != endCh) {
+ // well-formedness constraint
+ if (ch == '<') return errorAndResult("'<' not allowed in attrib value", "")
+ else if (ch == SU) truncatedError("")
+ else buf append ch_returning_nextch
+ }
+ ch_returning_nextch
+ // @todo: normalize attribute value
+ buf.toString
+ }
+
+ /** [42] '<' xmlEndTag ::= '<' '/' Name S? '>'
+ */
+ def xEndTag(startName: String) {
+ xToken('/')
+ if (xName != startName)
+ errorNoEnd(startName)
+
+ xSpaceOpt()
+ xToken('>')
+ }
+
+ /** actually, Name ::= (Letter | '_' | ':') (NameChar)* but starting with ':' cannot happen
+ * Name ::= (Letter | '_') (NameChar)*
+ *
+ * see [5] of XML 1.0 specification
+ *
+ * pre-condition: ch != ':' // assured by definition of XMLSTART token
+ * post-condition: name does neither start, nor end in ':'
+ */
+ def xName: String = {
+ if (ch == SU)
+ truncatedError("")
+ else if (!isNameStart(ch))
+ return errorAndResult("name expected, but char '%s' cannot start a name" format ch, "")
+
+ val buf = new StringBuilder
+
+ do buf append ch_returning_nextch
+ while (isNameChar(ch))
+
+ if (buf.last == ':') {
+ reportSyntaxError( "name cannot end in ':'" )
+ buf.toString dropRight 1
+ }
+ else buf.toString
+ }
+
+ /** CharRef ::= "&#" '0'..'9' {'0'..'9'} ";"
+ * | "&#x" '0'..'9'|'A'..'F'|'a'..'f' { hexdigit } ";"
+ *
+ * see [66]
+ */
+ def xCharRef(ch: () => Char, nextch: () => Unit): String =
+ Utility.parseCharRef(ch, nextch, reportSyntaxError _, truncatedError _)
+
+ def xCharRef(it: Iterator[Char]): String = {
+ var c = it.next()
+ Utility.parseCharRef(() => c, () => { c = it.next() }, reportSyntaxError _, truncatedError _)
+ }
+
+ def xCharRef: String = xCharRef(() => ch, () => nextch())
+
+ /** Create a lookahead reader which does not influence the input */
+ def lookahead(): BufferedIterator[Char]
+
+ /** The library and compiler parsers had the interesting distinction of
+ * different behavior for nextch (a function for which there are a total
+ * of two plausible behaviors, so we know the design space was fully
+ * explored.) One of them returned the value of nextch before the increment
+ * and one of them the new value. So to unify code we have to at least
+ * temporarily abstract over the nextchs.
+ */
+ def ch: Char
+ def nextch(): Unit
+ protected def ch_returning_nextch: Char
+ def eof: Boolean
+
+ // def handle: HandleType
+ var tmppos: PositionType
+
+ def xHandleError(that: Char, msg: String): Unit
+ def reportSyntaxError(str: String): Unit
+ def reportSyntaxError(pos: Int, str: String): Unit
+
+ def truncatedError(msg: String): Nothing
+ def errorNoEnd(tag: String): Nothing
+
+ protected def errorAndResult[T](msg: String, x: T): T = {
+ reportSyntaxError(msg)
+ x
+ }
+
+ def xToken(that: Char) {
+ if (ch == that) nextch()
+ else xHandleError(that, "'%s' expected instead of '%s'".format(that, ch))
+ }
+ def xToken(that: Seq[Char]) { that foreach xToken }
+
+ /** scan [S] '=' [S]*/
+ def xEQ() = { xSpaceOpt(); xToken('='); xSpaceOpt() }
+
+ /** skip optional space S? */
+ def xSpaceOpt() = while (isSpace(ch) && !eof) nextch()
+
+ /** scan [3] S ::= (#x20 | #x9 | #xD | #xA)+ */
+ def xSpace() =
+ if (isSpace(ch)) { nextch(); xSpaceOpt() }
+ else xHandleError(ch, "whitespace expected")
+
+ /** Apply a function and return the passed value */
+ def returning[T](x: T)(f: T => Unit): T = { f(x); x }
+
+ /** Execute body with a variable saved and restored after execution */
+ def saving[A, B](getter: A, setter: A => Unit)(body: => B): B = {
+ val saved = getter
+ try body
+ finally setter(saved)
+ }
+
+ /** Take characters from input stream until given String "until"
+ * is seen. Once seen, the accumulated characters are passed
+ * along with the current Position to the supplied handler function.
+ */
+ protected def xTakeUntil[T](
+ handler: (PositionType, String) => T,
+ positioner: () => PositionType,
+ until: String): T =
+ {
+ val sb = new StringBuilder
+ val head = until.head
+ val rest = until.tail
+
+ while (true) {
+ if (ch == head && peek(rest))
+ return handler(positioner(), sb.toString)
+ else if (ch == SU)
+ truncatedError("") // throws TruncatedXMLControl in compiler
+
+ sb append ch
+ nextch()
+ }
+ unreachable
+ }
+
+ /** Create a non-destructive lookahead reader and see if the head
+ * of the input would match the given String. If yes, return true
+ * and drop the entire String from input; if no, return false
+ * and leave input unchanged.
+ */
+ private def peek(lookingFor: String): Boolean =
+ (lookahead() take lookingFor.length sameElements lookingFor.iterator) && {
+ // drop the chars from the real reader (all lookahead + orig)
+ (0 to lookingFor.length) foreach (_ => nextch())
+ true
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/xml/Utility.scala b/src/compiler/scala/tools/nsc/ast/parser/xml/Utility.scala
new file mode 100755
index 0000000000..6dcfa173df
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/ast/parser/xml/Utility.scala
@@ -0,0 +1,163 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.tools.nsc.ast.parser.xml
+
+import scala.collection.mutable
+
+
+/**
+ * The `Utility` object provides utility functions for processing instances
+ * of bound and not bound XML classes, as well as escaping text nodes.
+ *
+ * @author Burak Emir
+ */
+object Utility {
+ import scala.reflect.internal.Chars.SU
+
+ private val unescMap = Map(
+ "lt" -> '<',
+ "gt" -> '>',
+ "amp" -> '&',
+ "quot" -> '"',
+ "apos" -> '\''
+ )
+
+ /**
+ * Appends unescaped string to `s`, `amp` becomes `&amp;`,
+ * `lt` becomes `&lt;` etc..
+ *
+ * @return `'''null'''` if `ref` was not a predefined entity.
+ */
+ private final def unescape(ref: String, s: StringBuilder): StringBuilder =
+ ((unescMap get ref) map (s append _)).orNull
+
+ def parseAttributeValue[T](value: String, text: String => T, entityRef: String => T): List[T] = {
+ val sb = new StringBuilder
+ var rfb: StringBuilder = null
+ val nb = new mutable.ListBuffer[T]()
+
+ val it = value.iterator
+ while (it.hasNext) {
+ var c = it.next()
+ // entity! flush buffer into text node
+ if (c == '&') {
+ c = it.next()
+ if (c == '#') {
+ c = it.next()
+ val theChar = parseCharRef ({ ()=> c },{ () => c = it.next() },{s => throw new RuntimeException(s)}, {s => throw new RuntimeException(s)})
+ sb.append(theChar)
+ }
+ else {
+ if (rfb eq null) rfb = new StringBuilder()
+ rfb append c
+ c = it.next()
+ while (c != ';') {
+ rfb.append(c)
+ c = it.next()
+ }
+ val ref = rfb.toString()
+ rfb.clear()
+ unescape(ref,sb) match {
+ case null =>
+ if (!sb.isEmpty) { // flush buffer
+ nb += text(sb.toString())
+ sb.clear()
+ }
+ nb += entityRef(ref) // add entityref
+ case _ =>
+ }
+ }
+ }
+ else sb append c
+ }
+
+ if(!sb.isEmpty) // flush buffer
+ nb += text(sb.toString())
+
+ nb.toList
+ }
+
+ /**
+ * {{{
+ * CharRef ::= "&amp;#" '0'..'9' {'0'..'9'} ";"
+ * | "&amp;#x" '0'..'9'|'A'..'F'|'a'..'f' { hexdigit } ";"
+ * }}}
+ * See [66]
+ */
+ def parseCharRef(ch: () => Char, nextch: () => Unit, reportSyntaxError: String => Unit, reportTruncatedError: String => Unit): String = {
+ val hex = (ch() == 'x') && { nextch(); true }
+ val base = if (hex) 16 else 10
+ var i = 0
+ while (ch() != ';') {
+ ch() match {
+ case '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' =>
+ i = i * base + ch().asDigit
+ case 'a' | 'b' | 'c' | 'd' | 'e' | 'f'
+ | 'A' | 'B' | 'C' | 'D' | 'E' | 'F' =>
+ if (! hex)
+ reportSyntaxError("hex char not allowed in decimal char ref\n" +
+ "Did you mean to write &#x ?")
+ else
+ i = i * base + ch().asDigit
+ case SU =>
+ reportTruncatedError("")
+ case _ =>
+ reportSyntaxError("character '" + ch() + "' not allowed in char ref\n")
+ }
+ nextch()
+ }
+ new String(Array(i), 0, 1)
+ }
+
+ /** {{{
+ * (#x20 | #x9 | #xD | #xA)
+ * }}} */
+ final def isSpace(ch: Char): Boolean = ch match {
+ case '\u0009' | '\u000A' | '\u000D' | '\u0020' => true
+ case _ => false
+ }
+
+ /** {{{
+ * NameChar ::= Letter | Digit | '.' | '-' | '_' | ':'
+ * | CombiningChar | Extender
+ * }}}
+ * See [4] and Appendix B of XML 1.0 specification.
+ */
+ def isNameChar(ch: Char) = {
+ import java.lang.Character._
+ // The constants represent groups Mc, Me, Mn, Lm, and Nd.
+
+ isNameStart(ch) || (getType(ch).toByte match {
+ case COMBINING_SPACING_MARK |
+ ENCLOSING_MARK | NON_SPACING_MARK |
+ MODIFIER_LETTER | DECIMAL_DIGIT_NUMBER => true
+ case _ => ".-:" contains ch
+ })
+ }
+
+ /** {{{
+ * NameStart ::= ( Letter | '_' )
+ * }}}
+ * where Letter means in one of the Unicode general
+ * categories `{ Ll, Lu, Lo, Lt, Nl }`.
+ *
+ * We do not allow a name to start with `:`.
+ * See [3] and Appendix B of XML 1.0 specification
+ */
+ def isNameStart(ch: Char) = {
+ import java.lang.Character._
+
+ getType(ch).toByte match {
+ case LOWERCASE_LETTER |
+ UPPERCASE_LETTER | OTHER_LETTER |
+ TITLECASE_LETTER | LETTER_NUMBER => true
+ case _ => ch == '_'
+ }
+ }
+}