aboutsummaryrefslogtreecommitdiff
path: root/compiler/src/dotty/tools/dotc/parsing
diff options
context:
space:
mode:
authorFelix Mulder <felix.mulder@gmail.com>2016-11-02 11:08:28 +0100
committerGuillaume Martres <smarter@ubuntu.com>2016-11-22 01:35:07 +0100
commit8a61ff432543a29234193cd1f7c14abd3f3d31a0 (patch)
treea8147561d307af862c295cfc8100d271063bb0dd /compiler/src/dotty/tools/dotc/parsing
parent6a455fe6da5ff9c741d91279a2dc6fe2fb1b472f (diff)
downloaddotty-8a61ff432543a29234193cd1f7c14abd3f3d31a0.tar.gz
dotty-8a61ff432543a29234193cd1f7c14abd3f3d31a0.tar.bz2
dotty-8a61ff432543a29234193cd1f7c14abd3f3d31a0.zip
Move compiler and compiler tests to compiler dir
Diffstat (limited to 'compiler/src/dotty/tools/dotc/parsing')
-rw-r--r--compiler/src/dotty/tools/dotc/parsing/CharArrayReader.scala132
-rw-r--r--compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala898
-rw-r--r--compiler/src/dotty/tools/dotc/parsing/JavaScanners.scala538
-rw-r--r--compiler/src/dotty/tools/dotc/parsing/JavaTokens.scala92
-rw-r--r--compiler/src/dotty/tools/dotc/parsing/MarkupParserCommon.scala257
-rw-r--r--compiler/src/dotty/tools/dotc/parsing/MarkupParsers.scala466
-rw-r--r--compiler/src/dotty/tools/dotc/parsing/Parsers.scala2309
-rw-r--r--compiler/src/dotty/tools/dotc/parsing/Scanners.scala1014
-rw-r--r--compiler/src/dotty/tools/dotc/parsing/ScriptParsers.scala145
-rw-r--r--compiler/src/dotty/tools/dotc/parsing/SymbolicXMLBuilder.scala264
-rw-r--r--compiler/src/dotty/tools/dotc/parsing/Tokens.scala238
-rw-r--r--compiler/src/dotty/tools/dotc/parsing/TreeBuilder.scala.unused535
-rw-r--r--compiler/src/dotty/tools/dotc/parsing/Utility.scala170
-rw-r--r--compiler/src/dotty/tools/dotc/parsing/package.scala33
14 files changed, 7091 insertions, 0 deletions
diff --git a/compiler/src/dotty/tools/dotc/parsing/CharArrayReader.scala b/compiler/src/dotty/tools/dotc/parsing/CharArrayReader.scala
new file mode 100644
index 000000000..b84e2eb47
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/parsing/CharArrayReader.scala
@@ -0,0 +1,132 @@
+package dotty.tools
+package dotc
+package parsing
+
+import scala.reflect.internal.Chars._
+
+abstract class CharArrayReader { self =>
+
+ val buf: Array[Char]
+ protected def startFrom = 0
+
+ /** Switch whether unicode should be decoded */
+ protected def decodeUni: Boolean = true
+
+ /** An error routine to call on bad unicode escapes \\uxxxx. */
+ protected def error(msg: String, offset: Int): Unit
+
+ /** the last read character */
+ var ch: Char = _
+
+ /** The offset one past the last read character */
+ var charOffset: Int = startFrom
+
+ /** The offset before the last read character */
+ var lastCharOffset: Int = startFrom
+
+ /** The start offset of the current line */
+ var lineStartOffset: Int = startFrom
+
+ /** The start offset of the line before the current one */
+ var lastLineStartOffset: Int = startFrom
+
+ private var lastUnicodeOffset = -1
+
+ /** Is last character a unicode escape \\uxxxx? */
+ def isUnicodeEscape = charOffset == lastUnicodeOffset
+
+ /** Advance one character; reducing CR;LF pairs to just LF */
+ final def nextChar(): Unit = {
+ val idx = charOffset
+ lastCharOffset = idx
+ if (idx >= buf.length) {
+ ch = SU
+ } else {
+ val c = buf(idx)
+ ch = c
+ charOffset = idx + 1
+ if (c == '\\') potentialUnicode()
+ else if (c < ' ') { skipCR(); potentialLineEnd() }
+ }
+ }
+
+ def getc() = { nextChar() ; ch }
+
+ /** Advance one character, leaving CR;LF pairs intact.
+ * This is for use in multi-line strings, so there are no
+ * "potential line ends" here.
+ */
+ final def nextRawChar(): Unit = {
+ val idx = charOffset
+ lastCharOffset = idx
+ if (idx >= buf.length) {
+ ch = SU
+ } else {
+ val c = buf(charOffset)
+ ch = c
+ charOffset = idx + 1
+ if (c == '\\') potentialUnicode()
+ }
+ }
+
+ /** Interpret \\uxxxx escapes */
+ private def potentialUnicode(): Unit = {
+ def evenSlashPrefix: Boolean = {
+ var p = charOffset - 2
+ while (p >= 0 && buf(p) == '\\') p -= 1
+ (charOffset - p) % 2 == 0
+ }
+ def udigit: Int = {
+ if (charOffset >= buf.length) {
+ // Since the positioning code is very insistent about throwing exceptions,
+ // we have to decrement the position so our error message can be seen, since
+ // we are one past EOF. This happens with e.g. val x = \ u 1 <EOF>
+ error("incomplete unicode escape", charOffset - 1)
+ SU
+ }
+ else {
+ val d = digit2int(buf(charOffset), 16)
+ if (d >= 0) charOffset += 1
+ else error("error in unicode escape", charOffset)
+ d
+ }
+ }
+ if (charOffset < buf.length && buf(charOffset) == 'u' && decodeUni && evenSlashPrefix) {
+ do charOffset += 1
+ while (charOffset < buf.length && buf(charOffset) == 'u')
+ val code = udigit << 12 | udigit << 8 | udigit << 4 | udigit
+ lastUnicodeOffset = charOffset
+ ch = code.toChar
+ }
+ }
+
+ /** replace CR;LF by LF */
+ private def skipCR(): Unit = {
+ if (ch == CR)
+ if (charOffset < buf.length && buf(charOffset) == LF) {
+ charOffset += 1
+ ch = LF
+ }
+ }
+
+ /** Handle line ends */
+ private def potentialLineEnd(): Unit = {
+ if (ch == LF || ch == FF) {
+ lastLineStartOffset = lineStartOffset
+ lineStartOffset = charOffset
+ }
+ }
+
+ def isAtEnd = charOffset >= buf.length
+
+ /** A new reader that takes off at the current character position */
+ def lookaheadReader = new CharArrayLookaheadReader
+
+ class CharArrayLookaheadReader extends CharArrayReader {
+ val buf = self.buf
+ charOffset = self.charOffset
+ ch = self.ch
+ override def decodeUni = self.decodeUni
+ def error(msg: String, offset: Int) = self.error(msg, offset)
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala b/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala
new file mode 100644
index 000000000..0f63b25bb
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala
@@ -0,0 +1,898 @@
+package dotty.tools
+package dotc
+package parsing
+
+import dotty.tools.dotc.core.Constants.Constant
+import dotty.tools.dotc.core.Flags
+import dotty.tools.dotc.core.Flags.FlagSet
+
+import scala.language.implicitConversions
+
+import JavaTokens._
+import JavaScanners._
+import Scanners.Offset
+import Parsers._
+import core._
+import Contexts._
+import Names._
+import NameOps._
+import Types._
+import Symbols._
+import ast.Trees._
+import Decorators._
+import StdNames._
+import dotty.tools.dotc.reporting.diagnostic.messages.IdentifierExpected
+import dotty.tools.dotc.util.SourceFile
+import util.Positions._
+import annotation.switch
+import scala.collection.mutable.ListBuffer
+import scala.reflect.internal.util.Collections._
+
+object JavaParsers {
+
+ import ast.untpd._
+
+ class JavaParser(source: SourceFile)(implicit ctx: Context) extends ParserCommon(source) {
+
+ val definitions = ctx.definitions
+ import definitions._
+
+ val in: JavaScanner = new JavaScanner(source)
+
+ /** The simple name of the package of the currently parsed file */
+ private var thisPackageName: TypeName = tpnme.EMPTY
+
+ /** This is the general parse entry point.
+ * Overridden by ScriptParser
+ */
+ def parse(): Tree = {
+ val t = compilationUnit()
+ accept(EOF)
+ t
+ }
+
+ // -------- error handling ---------------------------------------
+
+ protected def skip(): Unit = {
+ var nparens = 0
+ var nbraces = 0
+ while (true) {
+ in.token match {
+ case EOF =>
+ return
+ case SEMI =>
+ if (nparens == 0 && nbraces == 0) return
+ case RPAREN =>
+ nparens -= 1
+ case RBRACE =>
+ if (nbraces == 0) return
+ nbraces -= 1
+ case LPAREN =>
+ nparens += 1
+ case LBRACE =>
+ nbraces += 1
+ case _ =>
+ }
+ in.nextToken()
+ }
+ }
+
+ def syntaxError(msg: String, skipIt: Boolean): Unit = {
+ syntaxError(in.offset, msg, skipIt)
+ }
+
+ def syntaxError(pos: Int, msg: String, skipIt: Boolean): Unit = {
+ if (pos > lastErrorOffset) {
+ syntaxError(msg, pos)
+ // no more errors on this token.
+ lastErrorOffset = in.offset
+ }
+ if (skipIt)
+ skip()
+ }
+ def errorTypeTree = TypeTree().withType(ErrorType) withPos Position(in.offset)
+
+ // --------- tree building -----------------------------
+
+ def scalaAnnotationDot(name: Name) = Select(scalaDot(nme.annotation), name)
+
+ def javaDot(name: Name): Tree =
+ Select(rootDot(nme.java), name)
+
+ def javaLangDot(name: Name): Tree =
+ Select(javaDot(nme.lang), name)
+
+ def javaLangObject(): Tree = javaLangDot(tpnme.Object)
+
+ def arrayOf(tpt: Tree) =
+ AppliedTypeTree(Ident(nme.Array.toTypeName), List(tpt))
+
+ def unimplementedExpr = Ident("???".toTermName)
+
+ def makeTemplate(parents: List[Tree], stats: List[Tree], tparams: List[TypeDef], needsDummyConstr: Boolean) = {
+ def pullOutFirstConstr(stats: List[Tree]): (Tree, List[Tree]) = stats match {
+ case (meth: DefDef) :: rest if meth.name == CONSTRUCTOR => (meth, rest)
+ case first :: rest =>
+ val (constr, tail) = pullOutFirstConstr(rest)
+ (constr, first :: tail)
+ case nil => (EmptyTree, nil)
+ }
+ var (constr1, stats1) = pullOutFirstConstr(stats)
+ if (constr1 == EmptyTree) constr1 = makeConstructor(List(), tparams)
+ // A dummy first constructor is needed for Java classes so that the real constructors see the
+ // import of the companion object. The constructor has parameter of type Unit so no Java code
+ // can call it.
+ if (needsDummyConstr) {
+ stats1 = constr1 :: stats1
+ constr1 = makeConstructor(List(scalaDot(tpnme.Unit)), tparams, Flags.JavaDefined | Flags.PrivateLocal)
+ }
+ Template(constr1.asInstanceOf[DefDef], parents, EmptyValDef, stats1)
+ }
+
+ def makeSyntheticParam(count: Int, tpt: Tree): ValDef =
+ makeParam(nme.syntheticParamName(count), tpt)
+ def makeParam(name: TermName, tpt: Tree): ValDef =
+ ValDef(name, tpt, EmptyTree).withMods(Modifiers(Flags.JavaDefined | Flags.ParamAccessor))
+
+ def makeConstructor(formals: List[Tree], tparams: List[TypeDef], flags: FlagSet = Flags.JavaDefined) = {
+ val vparams = mapWithIndex(formals)((p, i) => makeSyntheticParam(i + 1, p))
+ DefDef(nme.CONSTRUCTOR, tparams, List(vparams), TypeTree(), EmptyTree).withMods(Modifiers(flags))
+ }
+
+ // ------------- general parsing ---------------------------
+
+ /** skip parent or brace enclosed sequence of things */
+ def skipAhead(): Unit = {
+ var nparens = 0
+ var nbraces = 0
+ do {
+ in.token match {
+ case LPAREN =>
+ nparens += 1
+ case LBRACE =>
+ nbraces += 1
+ case _ =>
+ }
+ in.nextToken()
+ in.token match {
+ case RPAREN =>
+ nparens -= 1
+ case RBRACE =>
+ nbraces -= 1
+ case _ =>
+ }
+ } while (in.token != EOF && (nparens > 0 || nbraces > 0))
+ }
+
+ def skipTo(tokens: Int*): Unit = {
+ while (!(tokens contains in.token) && in.token != EOF) {
+ if (in.token == LBRACE) { skipAhead(); accept(RBRACE) }
+ else if (in.token == LPAREN) { skipAhead(); accept(RPAREN) }
+ else in.nextToken()
+ }
+ }
+
+ /** Consume one token of the specified type, or
+ * signal an error if it is not there.
+ *
+ * @return The offset at the start of the token to accept
+ */
+ def accept(token: Int): Int = {
+ val offset = in.offset
+ if (in.token != token) {
+ val offsetToReport = in.offset
+ val msg =
+ tokenString(token) + " expected but " +
+ tokenString(in.token) + " found."
+
+ syntaxError(offsetToReport, msg, skipIt = true)
+ }
+ if (in.token == token) in.nextToken()
+ offset
+ }
+
+ def acceptClosingAngle(): Unit = {
+ val closers: PartialFunction[Int, Int] = {
+ case GTGTGTEQ => GTGTEQ
+ case GTGTGT => GTGT
+ case GTGTEQ => GTEQ
+ case GTGT => GT
+ case GTEQ => EQUALS
+ }
+ if (closers isDefinedAt in.token) in.token = closers(in.token)
+ else accept(GT)
+ }
+
+ def identForType(): TypeName = ident().toTypeName
+ def ident(): Name =
+ if (in.token == IDENTIFIER) {
+ val name = in.name
+ in.nextToken()
+ name
+ } else {
+ accept(IDENTIFIER)
+ nme.ERROR
+ }
+
+ def repsep[T <: Tree](p: () => T, sep: Int): List[T] = {
+ val buf = ListBuffer[T](p())
+ while (in.token == sep) {
+ in.nextToken()
+ buf += p()
+ }
+ buf.toList
+ }
+
+ /** Convert (qual)ident to type identifier
+ */
+ def convertToTypeId(tree: Tree): Tree = convertToTypeName(tree) match {
+ case Some(t) => t withPos tree.pos
+ case _ => tree match {
+ case AppliedTypeTree(_, _) | Select(_, _) =>
+ tree
+ case _ =>
+ syntaxError(IdentifierExpected(tree.show), tree.pos)
+ errorTypeTree
+ }
+ }
+
+ /** Translate names in Select/Ident nodes to type names.
+ */
+ def convertToTypeName(tree: Tree): Option[RefTree] = tree match {
+ case Select(qual, name) => Some(Select(qual, name.toTypeName))
+ case Ident(name) => Some(Ident(name.toTypeName))
+ case _ => None
+ }
+ // -------------------- specific parsing routines ------------------
+
+ def qualId(): RefTree = {
+ var t: RefTree = atPos(in.offset) { Ident(ident()) }
+ while (in.token == DOT) {
+ in.nextToken()
+ t = atPos(t.pos.start, in.offset) { Select(t, ident()) }
+ }
+ t
+ }
+
+ def optArrayBrackets(tpt: Tree): Tree =
+ if (in.token == LBRACKET) {
+ val tpt1 = atPos(tpt.pos.start, in.offset) { arrayOf(tpt) }
+ in.nextToken()
+ accept(RBRACKET)
+ optArrayBrackets(tpt1)
+ } else tpt
+
+ def basicType(): Tree =
+ atPos(in.offset) {
+ in.token match {
+ case BYTE => in.nextToken(); TypeTree(ByteType)
+ case SHORT => in.nextToken(); TypeTree(ShortType)
+ case CHAR => in.nextToken(); TypeTree(CharType)
+ case INT => in.nextToken(); TypeTree(IntType)
+ case LONG => in.nextToken(); TypeTree(LongType)
+ case FLOAT => in.nextToken(); TypeTree(FloatType)
+ case DOUBLE => in.nextToken(); TypeTree(DoubleType)
+ case BOOLEAN => in.nextToken(); TypeTree(BooleanType)
+ case _ => syntaxError("illegal start of type", skipIt = true); errorTypeTree
+ }
+ }
+
+ def typ(): Tree =
+ optArrayBrackets {
+ if (in.token == FINAL) in.nextToken()
+ if (in.token == IDENTIFIER) {
+ var t = typeArgs(atPos(in.offset)(Ident(ident())))
+ // typeSelect generates Select nodes if the lhs is an Ident or Select,
+ // For other nodes it always assumes that the selected item is a type.
+ def typeSelect(t: Tree, name: Name) = t match {
+ case Ident(_) | Select(_, _) => Select(t, name)
+ case _ => Select(t, name.toTypeName)
+ }
+ while (in.token == DOT) {
+ in.nextToken()
+ t = typeArgs(atPos(t.pos.start, in.offset)(typeSelect(t, ident())))
+ }
+ convertToTypeId(t)
+ } else {
+ basicType()
+ }
+ }
+
+ def typeArgs(t: Tree): Tree = {
+ var wildnum = 0
+ def typeArg(): Tree =
+ if (in.token == QMARK) {
+ val offset = in.offset
+ in.nextToken()
+ val hi = if (in.token == EXTENDS) { in.nextToken() ; typ() } else EmptyTree
+ val lo = if (in.token == SUPER) { in.nextToken() ; typ() } else EmptyTree
+ atPos(offset) {
+ /*
+ TypeDef(
+ Modifiers(Flags.JavaDefined | Flags.Deferred),
+ typeName("_$" +(wildnum += 1)),
+ List(),
+ TypeBoundsTree(lo, hi))
+ */
+ TypeBoundsTree(lo, hi)
+ }
+ } else {
+ typ()
+ }
+ if (in.token == LT) {
+ in.nextToken()
+ val t1 = convertToTypeId(t)
+ val args = repsep(typeArg, COMMA)
+ acceptClosingAngle()
+ atPos(t1.pos.start) {
+ AppliedTypeTree(t1, args)
+ }
+ } else t
+ }
+
+ def annotations(): List[Tree] = {
+ //var annots = new ListBuffer[Tree]
+ while (in.token == AT) {
+ in.nextToken()
+ annotation()
+ }
+ List() // don't pass on annotations for now
+ }
+
+ /** Annotation ::= TypeName [`(` AnnotationArgument {`,` AnnotationArgument} `)`]
+ */
+ def annotation(): Unit = {
+ qualId()
+ if (in.token == LPAREN) { skipAhead(); accept(RPAREN) }
+ else if (in.token == LBRACE) { skipAhead(); accept(RBRACE) }
+ }
+
+ def modifiers(inInterface: Boolean): Modifiers = {
+ var flags = Flags.JavaDefined
+ // assumed true unless we see public/private/protected
+ var isPackageAccess = true
+ var annots: List[Tree] = Nil
+ def addAnnot(sym: ClassSymbol) =
+ annots :+= atPos(in.offset) {
+ in.nextToken()
+ New(TypeTree(sym.typeRef))
+ }
+
+ while (true) {
+ in.token match {
+ case AT if (in.lookaheadToken != INTERFACE) =>
+ in.nextToken()
+ annotation()
+ case PUBLIC =>
+ isPackageAccess = false
+ in.nextToken()
+ case PROTECTED =>
+ flags |= Flags.Protected
+ in.nextToken()
+ case PRIVATE =>
+ isPackageAccess = false
+ flags |= Flags.Private
+ in.nextToken()
+ case STATIC =>
+ flags |= Flags.JavaStatic
+ in.nextToken()
+ case ABSTRACT =>
+ flags |= Flags.Abstract
+ in.nextToken()
+ case FINAL =>
+ flags |= Flags.Final
+ in.nextToken()
+ case DEFAULT =>
+ flags |= Flags.DefaultMethod
+ in.nextToken()
+ case NATIVE =>
+ addAnnot(NativeAnnot)
+ case TRANSIENT =>
+ addAnnot(TransientAnnot)
+ case VOLATILE =>
+ addAnnot(VolatileAnnot)
+ case SYNCHRONIZED | STRICTFP =>
+ in.nextToken()
+ case _ =>
+ val privateWithin: TypeName =
+ if (isPackageAccess && !inInterface) thisPackageName
+ else tpnme.EMPTY
+
+ return Modifiers(flags, privateWithin) withAnnotations annots
+ }
+ }
+ assert(false, "should not be here")
+ throw new RuntimeException
+ }
+
+ def typeParams(flags: FlagSet = Flags.JavaDefined | Flags.PrivateLocal | Flags.Param): List[TypeDef] =
+ if (in.token == LT) {
+ in.nextToken()
+ val tparams = repsep(() => typeParam(flags), COMMA)
+ acceptClosingAngle()
+ tparams
+ } else List()
+
+ def typeParam(flags: FlagSet): TypeDef =
+ atPos(in.offset) {
+ val name = identForType()
+ val hi = if (in.token == EXTENDS) { in.nextToken() ; bound() } else EmptyTree
+ TypeDef(name, TypeBoundsTree(EmptyTree, hi)).withMods(Modifiers(flags))
+ }
+
+ def bound(): Tree =
+ atPos(in.offset) {
+ val buf = ListBuffer[Tree](typ())
+ while (in.token == AMP) {
+ in.nextToken()
+ buf += typ()
+ }
+ val ts = buf.toList
+ if (ts.tail.isEmpty) ts.head
+ else ts.reduce(AndTypeTree(_,_))
+ }
+
+ def formalParams(): List[ValDef] = {
+ accept(LPAREN)
+ val vparams = if (in.token == RPAREN) List() else repsep(formalParam, COMMA)
+ accept(RPAREN)
+ vparams
+ }
+
+ def formalParam(): ValDef = {
+ val start = in.offset
+ if (in.token == FINAL) in.nextToken()
+ annotations()
+ var t = typ()
+ if (in.token == DOTDOTDOT) {
+ in.nextToken()
+ t = atPos(t.pos.start) {
+ PostfixOp(t, nme.raw.STAR)
+ }
+ }
+ atPos(start, in.offset) {
+ varDecl(Modifiers(Flags.JavaDefined | Flags.Param), t, ident().toTermName)
+ }
+ }
+
+ def optThrows(): Unit = {
+ if (in.token == THROWS) {
+ in.nextToken()
+ repsep(typ, COMMA)
+ }
+ }
+
+ def methodBody(): Tree = atPos(in.offset) {
+ skipAhead()
+ accept(RBRACE) // skip block
+ unimplementedExpr
+ }
+
+ def definesInterface(token: Int) = token == INTERFACE || token == AT
+
+ def termDecl(start: Offset, mods: Modifiers, parentToken: Int, parentTParams: List[TypeDef]): List[Tree] = {
+ val inInterface = definesInterface(parentToken)
+ val tparams = if (in.token == LT) typeParams(Flags.JavaDefined | Flags.Param) else List()
+ val isVoid = in.token == VOID
+ var rtpt =
+ if (isVoid)
+ atPos(in.offset) {
+ in.nextToken()
+ TypeTree(UnitType)
+ }
+ else typ()
+ var nameOffset = in.offset
+ val rtptName = rtpt match {
+ case Ident(name) => name
+ case _ => nme.EMPTY
+ }
+ if (in.token == LPAREN && rtptName != nme.EMPTY && !inInterface) {
+ // constructor declaration
+ val vparams = formalParams()
+ optThrows()
+ List {
+ atPos(start) {
+ DefDef(nme.CONSTRUCTOR, parentTParams,
+ List(vparams), TypeTree(), methodBody()).withMods(mods)
+ }
+ }
+ } else {
+ var mods1 = mods
+ if (mods is Flags.Abstract) mods1 = mods &~ Flags.Abstract
+ nameOffset = in.offset
+ val name = ident()
+ if (in.token == LPAREN) {
+ // method declaration
+ val vparams = formalParams()
+ if (!isVoid) rtpt = optArrayBrackets(rtpt)
+ optThrows()
+ val bodyOk = !inInterface || (mods is Flags.DefaultMethod)
+ val body =
+ if (bodyOk && in.token == LBRACE) {
+ methodBody()
+ } else {
+ if (parentToken == AT && in.token == DEFAULT) {
+ val annot =
+ atPos(nameOffset) {
+ New(Select(scalaDot(nme.runtime), tpnme.AnnotationDefaultATTR), Nil)
+ }
+ mods1 = mods1 withAddedAnnotation annot
+ val unimplemented = unimplementedExpr
+ skipTo(SEMI)
+ accept(SEMI)
+ unimplemented
+ } else {
+ accept(SEMI)
+ EmptyTree
+ }
+ }
+ //if (inInterface) mods1 |= Flags.Deferred
+ List {
+ atPos(start, nameOffset) {
+ DefDef(name.toTermName, tparams, List(vparams), rtpt, body).withMods(mods1 | Flags.Method)
+ }
+ }
+ } else {
+ if (inInterface) mods1 |= Flags.Final | Flags.JavaStatic
+ val result = fieldDecls(start, nameOffset, mods1, rtpt, name)
+ accept(SEMI)
+ result
+ }
+ }
+ }
+
+ /** Parse a sequence of field declarations, separated by commas.
+ * This one is tricky because a comma might also appear in an
+ * initializer. Since we don't parse initializers we don't know
+ * what the comma signifies.
+ * We solve this with a second list buffer `maybe` which contains
+ * potential variable definitions.
+ * Once we have reached the end of the statement, we know whether
+ * these potential definitions are real or not.
+ */
+ def fieldDecls(start: Offset, firstNameOffset: Offset, mods: Modifiers, tpt: Tree, name: Name): List[Tree] = {
+ val buf = ListBuffer[Tree](
+ atPos(start, firstNameOffset) { varDecl(mods, tpt, name.toTermName) })
+ val maybe = new ListBuffer[Tree] // potential variable definitions.
+ while (in.token == COMMA) {
+ in.nextToken()
+ if (in.token == IDENTIFIER) { // if there's an ident after the comma ...
+ val nextNameOffset = in.offset
+ val name = ident()
+ if (in.token == EQUALS || in.token == SEMI) { // ... followed by a `=` or `;`, we know it's a real variable definition
+ buf ++= maybe
+ buf += atPos(start, nextNameOffset) { varDecl(mods, tpt, name.toTermName) }
+ maybe.clear()
+ } else if (in.token == COMMA) { // ... if there's a comma after the ident, it could be a real vardef or not.
+ maybe += atPos(start, nextNameOffset) { varDecl(mods, tpt, name.toTermName) }
+ } else { // ... if there's something else we were still in the initializer of the
+ // previous var def; skip to next comma or semicolon.
+ skipTo(COMMA, SEMI)
+ maybe.clear()
+ }
+ } else { // ... if there's no ident following the comma we were still in the initializer of the
+ // previous var def; skip to next comma or semicolon.
+ skipTo(COMMA, SEMI)
+ maybe.clear()
+ }
+ }
+ if (in.token == SEMI) {
+ buf ++= maybe // every potential vardef that survived until here is real.
+ }
+ buf.toList
+ }
+
+ def varDecl(mods: Modifiers, tpt: Tree, name: TermName): ValDef = {
+ val tpt1 = optArrayBrackets(tpt)
+ if (in.token == EQUALS && !(mods is Flags.Param)) skipTo(COMMA, SEMI)
+ val mods1 = if (mods is Flags.Final) mods else mods | Flags.Mutable
+ ValDef(name, tpt1, if (mods is Flags.Param) EmptyTree else unimplementedExpr).withMods(mods1)
+ }
+
+ def memberDecl(start: Offset, mods: Modifiers, parentToken: Int, parentTParams: List[TypeDef]): List[Tree] = in.token match {
+ case CLASS | ENUM | INTERFACE | AT =>
+ typeDecl(start, if (definesInterface(parentToken)) mods | Flags.JavaStatic else mods)
+ case _ =>
+ termDecl(start, mods, parentToken, parentTParams)
+ }
+
+ def makeCompanionObject(cdef: TypeDef, statics: List[Tree]): Tree =
+ atPos(cdef.pos) {
+ assert(cdef.pos.exists)
+ ModuleDef(cdef.name.toTermName,
+ makeTemplate(List(), statics, List(), false)).withMods((cdef.mods & (Flags.AccessFlags | Flags.JavaDefined)).toTermFlags)
+ }
+
+ def importCompanionObject(cdef: TypeDef): Tree =
+ Import(Ident(cdef.name.toTermName).withPos(NoPosition), Ident(nme.WILDCARD) :: Nil)
+
+ // Importing the companion object members cannot be done uncritically: see
+ // ticket #2377 wherein a class contains two static inner classes, each of which
+ // has a static inner class called "Builder" - this results in an ambiguity error
+ // when each performs the import in the enclosing class's scope.
+ //
+ // To address this I moved the import Companion._ inside the class, as the first
+ // statement. This should work without compromising the enclosing scope, but may (?)
+ // end up suffering from the same issues it does in scala - specifically that this
+ // leaves auxiliary constructors unable to access members of the companion object
+ // as unqualified identifiers.
+ def addCompanionObject(statics: List[Tree], cdef: TypeDef): List[Tree] = {
+ // if there are no statics we can use the original cdef, but we always
+ // create the companion so import A._ is not an error (see ticket #1700)
+ val cdefNew =
+ if (statics.isEmpty) cdef
+ else {
+ val template = cdef.rhs.asInstanceOf[Template]
+ cpy.TypeDef(cdef)(cdef.name,
+ cpy.Template(template)(template.constr, template.parents, template.self,
+ importCompanionObject(cdef) :: template.body)).withMods(cdef.mods)
+ }
+
+ List(makeCompanionObject(cdefNew, statics), cdefNew)
+ }
+
+ def importDecl(): List[Tree] = {
+ val start = in.offset
+ accept(IMPORT)
+ val buf = new ListBuffer[Name]
+ def collectIdents() : Int = {
+ if (in.token == ASTERISK) {
+ val starOffset = in.offset
+ in.nextToken()
+ buf += nme.WILDCARD
+ starOffset
+ } else {
+ val nameOffset = in.offset
+ buf += ident()
+ if (in.token == DOT) {
+ in.nextToken()
+ collectIdents()
+ } else nameOffset
+ }
+ }
+ if (in.token == STATIC) in.nextToken()
+ else buf += nme.ROOTPKG
+ val lastnameOffset = collectIdents()
+ accept(SEMI)
+ val names = buf.toList
+ if (names.length < 2) {
+ syntaxError(start, "illegal import", skipIt = false)
+ List()
+ } else {
+ val qual = ((Ident(names.head): Tree) /: names.tail.init) (Select(_, _))
+ val lastname = names.last
+ val ident = Ident(lastname) withPos Position(lastnameOffset)
+// val selector = lastname match {
+// case nme.WILDCARD => Pair(ident, Ident(null) withPos Position(-1))
+// case _ => Pair(ident, ident)
+// }
+ val imp = atPos(start) { Import(qual, List(ident)) }
+ imp :: Nil
+ }
+ }
+
+ def interfacesOpt() =
+ if (in.token == IMPLEMENTS) {
+ in.nextToken()
+ repsep(typ, COMMA)
+ } else {
+ List()
+ }
+
+ def classDecl(start: Offset, mods: Modifiers): List[Tree] = {
+ accept(CLASS)
+ val nameOffset = in.offset
+ val name = identForType()
+ val tparams = typeParams()
+ val superclass =
+ if (in.token == EXTENDS) {
+ in.nextToken()
+ typ()
+ } else {
+ javaLangObject()
+ }
+ val interfaces = interfacesOpt()
+ val (statics, body) = typeBody(CLASS, name, tparams)
+ val cls = atPos(start, nameOffset) {
+ TypeDef(name, makeTemplate(superclass :: interfaces, body, tparams, true)).withMods(mods)
+ }
+ addCompanionObject(statics, cls)
+ }
+
+ def interfaceDecl(start: Offset, mods: Modifiers): List[Tree] = {
+ accept(INTERFACE)
+ val nameOffset = in.offset
+ val name = identForType()
+ val tparams = typeParams()
+ val parents =
+ if (in.token == EXTENDS) {
+ in.nextToken()
+ repsep(typ, COMMA)
+ } else {
+ List(javaLangObject())
+ }
+ val (statics, body) = typeBody(INTERFACE, name, tparams)
+ val iface = atPos(start, nameOffset) {
+ TypeDef(
+ name,
+ makeTemplate(parents, body, tparams, false)).withMods(mods | Flags.Trait | Flags.JavaInterface | Flags.Abstract)
+ }
+ addCompanionObject(statics, iface)
+ }
+
+ def typeBody(leadingToken: Int, parentName: Name, parentTParams: List[TypeDef]): (List[Tree], List[Tree]) = {
+ accept(LBRACE)
+ val defs = typeBodyDecls(leadingToken, parentName, parentTParams)
+ accept(RBRACE)
+ defs
+ }
+
+ def typeBodyDecls(parentToken: Int, parentName: Name, parentTParams: List[TypeDef]): (List[Tree], List[Tree]) = {
+ val inInterface = definesInterface(parentToken)
+ val statics = new ListBuffer[Tree]
+ val members = new ListBuffer[Tree]
+ while (in.token != RBRACE && in.token != EOF) {
+ val start = in.offset
+ var mods = atPos(start) { modifiers(inInterface) }
+ if (in.token == LBRACE) {
+ skipAhead() // skip init block, we just assume we have seen only static
+ accept(RBRACE)
+ } else if (in.token == SEMI) {
+ in.nextToken()
+ } else {
+ if (in.token == ENUM || definesInterface(in.token)) mods |= Flags.JavaStatic
+ val decls = memberDecl(start, mods, parentToken, parentTParams)
+ (if ((mods is Flags.JavaStatic) || inInterface && !(decls exists (_.isInstanceOf[DefDef])))
+ statics
+ else
+ members) ++= decls
+ }
+ }
+ def forwarders(sdef: Tree): List[Tree] = sdef match {
+ case TypeDef(name, _) if (parentToken == INTERFACE) =>
+ var rhs: Tree = Select(Ident(parentName.toTermName), name)
+ List(TypeDef(name, rhs).withMods(Modifiers(Flags.Protected)))
+ case _ =>
+ List()
+ }
+ val sdefs = statics.toList
+ val idefs = members.toList ::: (sdefs flatMap forwarders)
+ (sdefs, idefs)
+ }
+ def annotationParents = List(
+ scalaAnnotationDot(tpnme.Annotation),
+ Select(javaLangDot(nme.annotation), tpnme.Annotation),
+ scalaAnnotationDot(tpnme.ClassfileAnnotation)
+ )
+ def annotationDecl(start: Offset, mods: Modifiers): List[Tree] = {
+ accept(AT)
+ accept(INTERFACE)
+ val nameOffset = in.offset
+ val name = identForType()
+ val (statics, body) = typeBody(AT, name, List())
+ val constructorParams = body.collect {
+ case dd: DefDef => makeParam(dd.name, dd.tpt)
+ }
+ val constr = DefDef(nme.CONSTRUCTOR,
+ List(), List(constructorParams), TypeTree(), EmptyTree).withMods(Modifiers(Flags.JavaDefined))
+ val body1 = body.filterNot(_.isInstanceOf[DefDef])
+ val templ = makeTemplate(annotationParents, constr :: body1, List(), false)
+ val annot = atPos(start, nameOffset) {
+ TypeDef(name, templ).withMods(mods | Flags.Abstract)
+ }
+ addCompanionObject(statics, annot)
+ }
+
+ def enumDecl(start: Offset, mods: Modifiers): List[Tree] = {
+ accept(ENUM)
+ val nameOffset = in.offset
+ val name = identForType()
+ def enumType = Ident(name)
+ val interfaces = interfacesOpt()
+ accept(LBRACE)
+ val buf = new ListBuffer[Tree]
+ def parseEnumConsts(): Unit = {
+ if (in.token != RBRACE && in.token != SEMI && in.token != EOF) {
+ buf += enumConst(enumType)
+ if (in.token == COMMA) {
+ in.nextToken()
+ parseEnumConsts()
+ }
+ }
+ }
+ parseEnumConsts()
+ val consts = buf.toList
+ val (statics, body) =
+ if (in.token == SEMI) {
+ in.nextToken()
+ typeBodyDecls(ENUM, name, List())
+ } else {
+ (List(), List())
+ }
+ val predefs = List(
+ DefDef(
+ nme.values, List(),
+ ListOfNil,
+ arrayOf(enumType),
+ unimplementedExpr).withMods(Modifiers(Flags.JavaDefined | Flags.JavaStatic | Flags.Method)),
+ DefDef(
+ nme.valueOf, List(),
+ List(List(makeParam("x".toTermName, TypeTree(StringType)))),
+ enumType,
+ unimplementedExpr).withMods(Modifiers(Flags.JavaDefined | Flags.JavaStatic | Flags.Method)))
+ accept(RBRACE)
+ /*
+ val superclazz =
+ AppliedTypeTree(javaLangDot(tpnme.Enum), List(enumType))
+ */
+ val superclazz = Apply(TypeApply(
+ Select(New(javaLangDot(tpnme.Enum)), nme.CONSTRUCTOR), List(enumType)),
+ List(Literal(Constant(null)),Literal(Constant(0))))
+ val enum = atPos(start, nameOffset) {
+ TypeDef(name,
+ makeTemplate(superclazz :: interfaces, body, List(), true)).withMods(mods | Flags.Enum)
+ }
+ addCompanionObject(consts ::: statics ::: predefs, enum)
+ }
+
+ def enumConst(enumType: Tree) = {
+ annotations()
+ atPos(in.offset) {
+ val name = ident()
+ if (in.token == LPAREN) {
+ // skip arguments
+ skipAhead()
+ accept(RPAREN)
+ }
+ if (in.token == LBRACE) {
+ // skip classbody
+ skipAhead()
+ accept(RBRACE)
+ }
+ ValDef(name.toTermName, enumType, unimplementedExpr).withMods(Modifiers(Flags.Enum | Flags.Stable | Flags.JavaDefined | Flags.JavaStatic))
+ }
+ }
+
+ def typeDecl(start: Offset, mods: Modifiers): List[Tree] = in.token match {
+ case ENUM => enumDecl(start, mods)
+ case INTERFACE => interfaceDecl(start, mods)
+ case AT => annotationDecl(start, mods)
+ case CLASS => classDecl(start, mods)
+ case _ => in.nextToken(); syntaxError("illegal start of type declaration", skipIt = true); List(errorTypeTree)
+ }
+
+ /** CompilationUnit ::= [package QualId semi] TopStatSeq
+ */
+ def compilationUnit(): Tree = {
+ val start = in.offset
+ val pkg: RefTree =
+ if (in.token == AT || in.token == PACKAGE) {
+ annotations()
+ accept(PACKAGE)
+ val pkg = qualId()
+ accept(SEMI)
+ pkg
+ } else {
+ Ident(nme.EMPTY_PACKAGE)
+ }
+ thisPackageName = convertToTypeName(pkg) match {
+ case Some(t) => t.name.toTypeName
+ case _ => tpnme.EMPTY
+ }
+ val buf = new ListBuffer[Tree]
+ while (in.token == IMPORT)
+ buf ++= importDecl()
+ while (in.token != EOF && in.token != RBRACE) {
+ while (in.token == SEMI) in.nextToken()
+ if (in.token != EOF) {
+ val start = in.offset
+ val mods = atPos(start) { modifiers(inInterface = false) }
+ buf ++= typeDecl(start, mods)
+ }
+ }
+ val unit = atPos(start) { PackageDef(pkg, buf.toList) }
+ accept(EOF)
+ unit
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/parsing/JavaScanners.scala b/compiler/src/dotty/tools/dotc/parsing/JavaScanners.scala
new file mode 100644
index 000000000..83e16627c
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/parsing/JavaScanners.scala
@@ -0,0 +1,538 @@
+package dotty.tools
+package dotc
+package parsing
+
+import core.Names._, core.Contexts._, core.Decorators._, util.Positions._
+import Scanners._
+import util.SourceFile
+import JavaTokens._
+import scala.annotation.{ switch, tailrec }
+import scala.reflect.internal.Chars._
+
+object JavaScanners {
+
+ class JavaScanner(source: SourceFile, override val startFrom: Offset = 0)(implicit ctx: Context) extends ScannerCommon(source)(ctx) {
+
+ def toToken(idx: Int): Token =
+ if (idx >= 0 && idx <= lastKeywordStart) kwArray(idx) else IDENTIFIER
+
+ private class JavaTokenData0 extends TokenData
+
+ /** we need one token lookahead
+ */
+ val next : TokenData = new JavaTokenData0
+ val prev : TokenData = new JavaTokenData0
+
+ // Get next token ------------------------------------------------------------
+
+ def nextToken(): Unit = {
+ if (next.token == EMPTY) {
+ lastOffset = lastCharOffset
+ fetchToken()
+ }
+ else {
+ this copyFrom next
+ next.token = EMPTY
+ }
+ }
+
+ def lookaheadToken: Int = {
+ prev copyFrom this
+ nextToken()
+ val t = token
+ next copyFrom this
+ this copyFrom prev
+ t
+ }
+
+ /** read next token
+ */
+ private def fetchToken(): Unit = {
+ offset = charOffset - 1
+ ch match {
+ case ' ' | '\t' | CR | LF | FF =>
+ nextChar()
+ fetchToken()
+ case _ =>
+ (ch: @switch) match {
+ case 'A' | 'B' | 'C' | 'D' | 'E' |
+ 'F' | 'G' | 'H' | 'I' | 'J' |
+ 'K' | 'L' | 'M' | 'N' | 'O' |
+ 'P' | 'Q' | 'R' | 'S' | 'T' |
+ 'U' | 'V' | 'W' | 'X' | 'Y' |
+ 'Z' | '$' | '_' |
+ 'a' | 'b' | 'c' | 'd' | 'e' |
+ 'f' | 'g' | 'h' | 'i' | 'j' |
+ 'k' | 'l' | 'm' | 'n' | 'o' |
+ 'p' | 'q' | 'r' | 's' | 't' |
+ 'u' | 'v' | 'w' | 'x' | 'y' |
+ 'z' =>
+ putChar(ch)
+ nextChar()
+ getIdentRest()
+
+ case '0' =>
+ putChar(ch)
+ nextChar()
+ if (ch == 'x' || ch == 'X') {
+ nextChar()
+ base = 16
+ } else {
+ base = 8
+ }
+ getNumber()
+
+ case '1' | '2' | '3' | '4' |
+ '5' | '6' | '7' | '8' | '9' =>
+ base = 10
+ getNumber()
+
+ case '\"' =>
+ nextChar()
+ while (ch != '\"' && (isUnicodeEscape || ch != CR && ch != LF && ch != SU)) {
+ getlitch()
+ }
+ if (ch == '\"') {
+ token = STRINGLIT
+ setStrVal()
+ nextChar()
+ } else {
+ error("unclosed string literal")
+ }
+
+ case '\'' =>
+ nextChar()
+ getlitch()
+ if (ch == '\'') {
+ nextChar()
+ token = CHARLIT
+ setStrVal()
+ } else {
+ error("unclosed character literal")
+ }
+
+ case '=' =>
+ token = EQUALS
+ nextChar()
+ if (ch == '=') {
+ token = EQEQ
+ nextChar()
+ }
+
+ case '>' =>
+ token = GT
+ nextChar()
+ if (ch == '=') {
+ token = GTEQ
+ nextChar()
+ } else if (ch == '>') {
+ token = GTGT
+ nextChar()
+ if (ch == '=') {
+ token = GTGTEQ
+ nextChar()
+ } else if (ch == '>') {
+ token = GTGTGT
+ nextChar()
+ if (ch == '=') {
+ token = GTGTGTEQ
+ nextChar()
+ }
+ }
+ }
+
+ case '<' =>
+ token = LT
+ nextChar()
+ if (ch == '=') {
+ token = LTEQ
+ nextChar()
+ } else if (ch == '<') {
+ token = LTLT
+ nextChar()
+ if (ch == '=') {
+ token = LTLTEQ
+ nextChar()
+ }
+ }
+
+ case '!' =>
+ token = BANG
+ nextChar()
+ if (ch == '=') {
+ token = BANGEQ
+ nextChar()
+ }
+
+ case '~' =>
+ token = TILDE
+ nextChar()
+
+ case '?' =>
+ token = QMARK
+ nextChar()
+
+ case ':' =>
+ token = COLON
+ nextChar()
+
+ case '@' =>
+ token = AT
+ nextChar()
+
+ case '&' =>
+ token = AMP
+ nextChar()
+ if (ch == '&') {
+ token = AMPAMP
+ nextChar()
+ } else if (ch == '=') {
+ token = AMPEQ
+ nextChar()
+ }
+
+ case '|' =>
+ token = BAR
+ nextChar()
+ if (ch == '|') {
+ token = BARBAR
+ nextChar()
+ } else if (ch == '=') {
+ token = BAREQ
+ nextChar()
+ }
+
+ case '+' =>
+ token = PLUS
+ nextChar()
+ if (ch == '+') {
+ token = PLUSPLUS
+ nextChar()
+ } else if (ch == '=') {
+ token = PLUSEQ
+ nextChar()
+ }
+
+ case '-' =>
+ token = MINUS
+ nextChar()
+ if (ch == '-') {
+ token = MINUSMINUS
+ nextChar()
+ } else if (ch == '=') {
+ token = MINUSEQ
+ nextChar()
+ }
+
+ case '*' =>
+ token = ASTERISK
+ nextChar()
+ if (ch == '=') {
+ token = ASTERISKEQ
+ nextChar()
+ }
+
+ case '/' =>
+ nextChar()
+ if (!skipComment()) {
+ token = SLASH
+ nextChar()
+ if (ch == '=') {
+ token = SLASHEQ
+ nextChar()
+ }
+ } else fetchToken()
+
+ case '^' =>
+ token = HAT
+ nextChar()
+ if (ch == '=') {
+ token = HATEQ
+ nextChar()
+ }
+
+ case '%' =>
+ token = PERCENT
+ nextChar()
+ if (ch == '=') {
+ token = PERCENTEQ
+ nextChar()
+ }
+
+ case '.' =>
+ token = DOT
+ nextChar()
+ if ('0' <= ch && ch <= '9') {
+ putChar('.');
+ getFraction()
+ } else if (ch == '.') {
+ nextChar()
+ if (ch == '.') {
+ nextChar()
+ token = DOTDOTDOT
+ } else error("`.' character expected")
+ }
+
+ case ';' =>
+ token = SEMI
+ nextChar()
+
+ case ',' =>
+ token = COMMA
+ nextChar()
+
+ case '(' =>
+ token = LPAREN
+ nextChar()
+
+ case '{' =>
+ token = LBRACE
+ nextChar()
+
+ case ')' =>
+ token = RPAREN
+ nextChar()
+
+ case '}' =>
+ token = RBRACE
+ nextChar()
+
+ case '[' =>
+ token = LBRACKET
+ nextChar()
+
+ case ']' =>
+ token = RBRACKET
+ nextChar()
+
+ case SU =>
+ if (isAtEnd) token = EOF
+ else {
+ error("illegal character")
+ nextChar()
+ }
+
+ case _ =>
+ if (Character.isUnicodeIdentifierStart(ch)) {
+ putChar(ch)
+ nextChar()
+ getIdentRest()
+ } else {
+ error("illegal character: " + ch.toInt)
+ nextChar()
+ }
+ }
+ }
+ }
+
+ protected def skipComment(): Boolean = {
+ @tailrec def skipLineComment(): Unit = ch match {
+ case CR | LF | SU =>
+ case _ => nextChar(); skipLineComment()
+ }
+ @tailrec def skipJavaComment(): Unit = ch match {
+ case SU => incompleteInputError("unclosed comment")
+ case '*' => nextChar(); if (ch == '/') nextChar() else skipJavaComment()
+ case _ => nextChar(); skipJavaComment()
+ }
+ ch match {
+ case '/' => nextChar(); skipLineComment(); true
+ case '*' => nextChar(); skipJavaComment(); true
+ case _ => false
+ }
+ }
+
+ // Identifiers ---------------------------------------------------------------
+
+ private def getIdentRest(): Unit = {
+ while (true) {
+ (ch: @switch) match {
+ case 'A' | 'B' | 'C' | 'D' | 'E' |
+ 'F' | 'G' | 'H' | 'I' | 'J' |
+ 'K' | 'L' | 'M' | 'N' | 'O' |
+ 'P' | 'Q' | 'R' | 'S' | 'T' |
+ 'U' | 'V' | 'W' | 'X' | 'Y' |
+ 'Z' | '$' |
+ 'a' | 'b' | 'c' | 'd' | 'e' |
+ 'f' | 'g' | 'h' | 'i' | 'j' |
+ 'k' | 'l' | 'm' | 'n' | 'o' |
+ 'p' | 'q' | 'r' | 's' | 't' |
+ 'u' | 'v' | 'w' | 'x' | 'y' |
+ 'z' |
+ '0' | '1' | '2' | '3' | '4' |
+ '5' | '6' | '7' | '8' | '9' =>
+ putChar(ch)
+ nextChar()
+
+ case '_' =>
+ putChar(ch)
+ nextChar()
+ getIdentRest()
+ return
+ case SU =>
+ finishNamed()
+ return
+ case _ =>
+ if (Character.isUnicodeIdentifierPart(ch)) {
+ putChar(ch)
+ nextChar()
+ } else {
+ finishNamed()
+ return
+ }
+ }
+ }
+ }
+
+ // Literals -----------------------------------------------------------------
+
+ /** read next character in character or string literal:
+ */
+ protected def getlitch() =
+ if (ch == '\\') {
+ nextChar()
+ if ('0' <= ch && ch <= '7') {
+ val leadch: Char = ch
+ var oct: Int = digit2int(ch, 8)
+ nextChar()
+ if ('0' <= ch && ch <= '7') {
+ oct = oct * 8 + digit2int(ch, 8)
+ nextChar()
+ if (leadch <= '3' && '0' <= ch && ch <= '7') {
+ oct = oct * 8 + digit2int(ch, 8)
+ nextChar()
+ }
+ }
+ putChar(oct.asInstanceOf[Char])
+ } else {
+ ch match {
+ case 'b' => putChar('\b')
+ case 't' => putChar('\t')
+ case 'n' => putChar('\n')
+ case 'f' => putChar('\f')
+ case 'r' => putChar('\r')
+ case '\"' => putChar('\"')
+ case '\'' => putChar('\'')
+ case '\\' => putChar('\\')
+ case _ =>
+ error("invalid escape character", charOffset - 1)
+ putChar(ch)
+ }
+ nextChar()
+ }
+ } else {
+ putChar(ch)
+ nextChar()
+ }
+
+ /** read fractional part and exponent of floating point number
+ * if one is present.
+ */
+ protected def getFraction(): Unit = {
+ token = DOUBLELIT
+ while ('0' <= ch && ch <= '9') {
+ putChar(ch)
+ nextChar()
+ }
+ if (ch == 'e' || ch == 'E') {
+ val lookahead = lookaheadReader
+ lookahead.nextChar()
+ if (lookahead.ch == '+' || lookahead.ch == '-') {
+ lookahead.nextChar()
+ }
+ if ('0' <= lookahead.ch && lookahead.ch <= '9') {
+ putChar(ch)
+ nextChar()
+ if (ch == '+' || ch == '-') {
+ putChar(ch)
+ nextChar()
+ }
+ while ('0' <= ch && ch <= '9') {
+ putChar(ch)
+ nextChar()
+ }
+ }
+ token = DOUBLELIT
+ }
+ if (ch == 'd' || ch == 'D') {
+ putChar(ch)
+ nextChar()
+ token = DOUBLELIT
+ } else if (ch == 'f' || ch == 'F') {
+ putChar(ch)
+ nextChar()
+ token = FLOATLIT
+ }
+ setStrVal()
+ }
+
+ /** read a number into name and set base
+ */
+ protected def getNumber(): Unit = {
+ while (digit2int(ch, if (base < 10) 10 else base) >= 0) {
+ putChar(ch)
+ nextChar()
+ }
+ token = INTLIT
+ if (base <= 10 && ch == '.') {
+ val lookahead = lookaheadReader
+ lookahead.nextChar()
+ lookahead.ch match {
+ case '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' |
+ '8' | '9' | 'd' | 'D' | 'e' | 'E' | 'f' | 'F' =>
+ putChar(ch)
+ nextChar()
+ return getFraction()
+ case _ =>
+ if (!isIdentifierStart(lookahead.ch)) {
+ putChar(ch)
+ nextChar()
+ return getFraction()
+ }
+ }
+ }
+ if (base <= 10 &&
+ (ch == 'e' || ch == 'E' ||
+ ch == 'f' || ch == 'F' ||
+ ch == 'd' || ch == 'D')) {
+ return getFraction()
+ }
+ setStrVal()
+ if (ch == 'l' || ch == 'L') {
+ nextChar()
+ token = LONGLIT
+ }
+ }
+
+ // Errors -----------------------------------------------------------------
+
+ override def toString() = token match {
+ case IDENTIFIER =>
+ "id(" + name + ")"
+ case CHARLIT =>
+ "char(" + intVal + ")"
+ case INTLIT =>
+ "int(" + intVal + ")"
+ case LONGLIT =>
+ "long(" + intVal + ")"
+ case FLOATLIT =>
+ "float(" + floatVal + ")"
+ case DOUBLELIT =>
+ "double(" + floatVal + ")"
+ case STRINGLIT =>
+ "string(" + name + ")"
+ case SEMI =>
+ ";"
+ case COMMA =>
+ ","
+ case _ =>
+ tokenString(token)
+ }
+
+ /* Initialization: read first char, then first token */
+ nextChar()
+ nextToken()
+ }
+
+ val (lastKeywordStart, kwArray) = buildKeywordArray(keywords)
+}
diff --git a/compiler/src/dotty/tools/dotc/parsing/JavaTokens.scala b/compiler/src/dotty/tools/dotc/parsing/JavaTokens.scala
new file mode 100644
index 000000000..9530e0516
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/parsing/JavaTokens.scala
@@ -0,0 +1,92 @@
+package dotty.tools
+package dotc
+package parsing
+
+import collection.immutable.BitSet
+
+object JavaTokens extends TokensCommon {
+ final val minToken = EMPTY
+ final val maxToken = DOUBLE
+
+ final val javaOnlyKeywords = tokenRange(INSTANCEOF, ASSERT)
+ final val sharedKeywords = BitSet( IF, FOR, ELSE, THIS, NULL, NEW, SUPER, ABSTRACT, FINAL, PRIVATE, PROTECTED,
+ OVERRIDE, EXTENDS, TRUE, FALSE, CLASS, IMPORT, PACKAGE, DO, THROW, TRY, CATCH, FINALLY, WHILE, RETURN )
+ final val primTypes = tokenRange(VOID, DOUBLE)
+ final val keywords = sharedKeywords | javaOnlyKeywords | primTypes
+
+ /** keywords */
+ final val INSTANCEOF = 101; enter(INSTANCEOF, "instanceof")
+ final val CONST = 102; enter(CONST, "const")
+
+ /** templates */
+ final val INTERFACE = 105; enter(INTERFACE, "interface")
+ final val ENUM = 106; enter(ENUM, "enum")
+ final val IMPLEMENTS = 107; enter(IMPLEMENTS, "implements")
+
+ /** modifiers */
+ final val PUBLIC = 110; enter(PUBLIC, "public")
+ final val DEFAULT = 111; enter(DEFAULT, "default")
+ final val STATIC = 112; enter(STATIC, "static")
+ final val TRANSIENT = 113; enter(TRANSIENT, "transient")
+ final val VOLATILE = 114; enter(VOLATILE, "volatile")
+ final val SYNCHRONIZED = 115; enter(SYNCHRONIZED, "synchronized")
+ final val NATIVE = 116; enter(NATIVE, "native")
+ final val STRICTFP = 117; enter(STRICTFP, "strictfp")
+ final val THROWS = 118; enter(THROWS, "throws")
+
+ /** control structures */
+ final val BREAK = 130; enter(BREAK, "break")
+ final val CONTINUE = 131; enter(CONTINUE, "continue")
+ final val GOTO = 132; enter(GOTO, "goto")
+ final val SWITCH = 133; enter(SWITCH, "switch")
+ final val ASSERT = 134; enter(ASSERT, "assert")
+
+ /** special symbols */
+ final val EQEQ = 140
+ final val BANGEQ = 141
+ final val LT = 142
+ final val GT = 143
+ final val LTEQ = 144
+ final val GTEQ = 145
+ final val BANG = 146
+ final val QMARK = 147
+ final val AMP = 148
+ final val BAR = 149
+ final val PLUS = 150
+ final val MINUS = 151
+ final val ASTERISK = 152
+ final val SLASH = 153
+ final val PERCENT = 154
+ final val HAT = 155
+ final val LTLT = 156
+ final val GTGT = 157
+ final val GTGTGT = 158
+ final val AMPAMP = 159
+ final val BARBAR = 160
+ final val PLUSPLUS = 161
+ final val MINUSMINUS = 162
+ final val TILDE = 163
+ final val DOTDOTDOT = 164
+ final val AMPEQ = 165
+ final val BAREQ = 166
+ final val PLUSEQ = 167
+ final val MINUSEQ = 168
+ final val ASTERISKEQ = 169
+ final val SLASHEQ = 170
+ final val PERCENTEQ = 171
+ final val HATEQ = 172
+ final val LTLTEQ = 173
+ final val GTGTEQ = 174
+ final val GTGTGTEQ = 175
+
+ /** primitive types */
+ final val VOID = 180; enter(VOID, "void")
+ final val BOOLEAN = 181; enter(BOOLEAN, "boolean")
+ final val BYTE = 182; enter(BYTE, "byte")
+ final val SHORT = 183; enter(SHORT, "short")
+ final val CHAR = 184; enter(CHAR, "char")
+ final val INT = 185; enter(INT, "int")
+ final val LONG = 186; enter(LONG, "long")
+ final val FLOAT = 187; enter(FLOAT, "float")
+ final val DOUBLE = 188; enter(DOUBLE, "double")
+}
diff --git a/compiler/src/dotty/tools/dotc/parsing/MarkupParserCommon.scala b/compiler/src/dotty/tools/dotc/parsing/MarkupParserCommon.scala
new file mode 100644
index 000000000..ce2c41797
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/parsing/MarkupParserCommon.scala
@@ -0,0 +1,257 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+package dotty.tools.dotc
+package parsing
+
+import Utility._
+import scala.reflect.internal.Chars.SU
+
+
+
+/** This is not a public trait - it contains common code shared
+ * between the library level XML parser and the compiler's.
+ * All members should be accessed through those.
+ */
+private[dotty] trait MarkupParserCommon {
+ protected def unreachable = scala.sys.error("Cannot be reached.")
+
+ // type HandleType // MarkupHandler, SymbolicXMLBuilder
+ type InputType // Source, CharArrayReader
+ type PositionType // Int, Position
+ type ElementType // NodeSeq, Tree
+ type NamespaceType // NamespaceBinding, Any
+ type AttributesType // (MetaData, NamespaceBinding), mutable.Map[String, Tree]
+
+ def mkAttributes(name: String, pscope: NamespaceType): AttributesType
+ def mkProcInstr(position: PositionType, name: String, text: String): ElementType
+
+ /** parse a start or empty tag.
+ * [40] STag ::= '<' Name { S Attribute } [S]
+ * [44] EmptyElemTag ::= '<' Name { S Attribute } [S]
+ */
+ protected def xTag(pscope: NamespaceType): (String, AttributesType) = {
+ val name = xName
+ xSpaceOpt
+
+ (name, mkAttributes(name, pscope))
+ }
+
+ /** '<?' ProcInstr ::= Name [S ({Char} - ({Char}'>?' {Char})]'?>'
+ *
+ * see [15]
+ */
+ def xProcInstr: ElementType = {
+ val n = xName
+ xSpaceOpt
+ xTakeUntil(mkProcInstr(_, n, _), () => tmppos, "?>")
+ }
+
+ /** attribute value, terminated by either `'` or `"`. value may not contain `<`.
+ @param endCh either `'` or `"`
+ */
+ def xAttributeValue(endCh: Char): String = {
+ val buf = new StringBuilder
+ while (ch != endCh) {
+ // well-formedness constraint
+ if (ch == '<') return errorAndResult("'<' not allowed in attrib value", "")
+ else if (ch == SU) truncatedError("")
+ else buf append ch_returning_nextch
+ }
+ ch_returning_nextch
+ // @todo: normalize attribute value
+ buf.toString
+ }
+
+ def xAttributeValue(): String = {
+ val str = xAttributeValue(ch_returning_nextch)
+ // well-formedness constraint
+ normalizeAttributeValue(str)
+ }
+
+ private def takeUntilChar(it: Iterator[Char], end: Char): String = {
+ val buf = new StringBuilder
+ while (it.hasNext) it.next match {
+ case `end` => return buf.toString
+ case ch => buf append ch
+ }
+ scala.sys.error("Expected '%s'".format(end))
+ }
+
+ /** [42] '<' xmlEndTag ::= '<' '/' Name S? '>'
+ */
+ def xEndTag(startName: String): Unit = {
+ xToken('/')
+ if (xName != startName)
+ errorNoEnd(startName)
+
+ xSpaceOpt
+ xToken('>')
+ }
+
+ /** actually, Name ::= (Letter | '_' | ':') (NameChar)* but starting with ':' cannot happen
+ * Name ::= (Letter | '_') (NameChar)*
+ *
+ * see [5] of XML 1.0 specification
+ *
+ * pre-condition: ch != ':' // assured by definition of XMLSTART token
+ * post-condition: name does neither start, nor end in ':'
+ */
+ def xName: String = {
+ if (ch == SU)
+ truncatedError("")
+ else if (!isNameStart(ch))
+ return errorAndResult("name expected, but char '%s' cannot start a name" format ch, "")
+
+ val buf = new StringBuilder
+
+ do buf append ch_returning_nextch
+ while (isNameChar(ch))
+
+ if (buf.last == ':') {
+ reportSyntaxError( "name cannot end in ':'" )
+ buf.toString dropRight 1
+ }
+ else buf.toString
+ }
+
+ private def attr_unescape(s: String) = s match {
+ case "lt" => "<"
+ case "gt" => ">"
+ case "amp" => "&"
+ case "apos" => "'"
+ case "quot" => "\""
+ case "quote" => "\""
+ case _ => "&" + s + ";"
+ }
+
+ /** Replaces only character references right now.
+ * see spec 3.3.3
+ */
+ private def normalizeAttributeValue(attval: String): String = {
+ val buf = new StringBuilder
+ val it = attval.iterator.buffered
+
+ while (it.hasNext) buf append (it.next match {
+ case ' ' | '\t' | '\n' | '\r' => " "
+ case '&' if it.head == '#' => it.next ; xCharRef(it)
+ case '&' => attr_unescape(takeUntilChar(it, ';'))
+ case c => c
+ })
+
+ buf.toString
+ }
+
+ /** CharRef ::= "&#" '0'..'9' {'0'..'9'} ";"
+ * | "&#x" '0'..'9'|'A'..'F'|'a'..'f' { hexdigit } ";"
+ *
+ * see [66]
+ */
+ def xCharRef(ch: () => Char, nextch: () => Unit): String =
+ Utility.parseCharRef(ch, nextch, reportSyntaxError _, truncatedError _)
+
+ def xCharRef(it: Iterator[Char]): String = {
+ var c = it.next
+ Utility.parseCharRef(() => c, () => { c = it.next }, reportSyntaxError _, truncatedError _)
+ }
+
+ def xCharRef: String = xCharRef(() => ch, () => nextch)
+
+ /** Create a lookahead reader which does not influence the input */
+ def lookahead(): BufferedIterator[Char]
+
+ /** The library and compiler parsers had the interesting distinction of
+ * different behavior for nextch (a function for which there are a total
+ * of two plausible behaviors, so we know the design space was fully
+ * explored.) One of them returned the value of nextch before the increment
+ * and one of them the new value. So to unify code we have to at least
+ * temporarily abstract over the nextchs.
+ */
+ def ch: Char
+ def nextch(): Unit
+ protected def ch_returning_nextch: Char
+ def eof: Boolean
+
+ // def handle: HandleType
+ var tmppos: PositionType
+
+ def xHandleError(that: Char, msg: String): Unit
+ def reportSyntaxError(str: String): Unit
+ def reportSyntaxError(pos: Int, str: String): Unit
+
+ def truncatedError(msg: String): Nothing
+ def errorNoEnd(tag: String): Nothing
+
+ protected def errorAndResult[T](msg: String, x: T): T = {
+ reportSyntaxError(msg)
+ x
+ }
+
+ def xToken(that: Char): Unit = {
+ if (ch == that) nextch
+ else xHandleError(that, "'%s' expected instead of '%s'".format(that, ch))
+ }
+ def xToken(that: Seq[Char]): Unit = { that foreach xToken }
+
+ /** scan [S] '=' [S]*/
+ def xEQ() = { xSpaceOpt; xToken('='); xSpaceOpt }
+
+ /** skip optional space S? */
+ def xSpaceOpt() = while (isSpace(ch) && !eof) nextch
+
+ /** scan [3] S ::= (#x20 | #x9 | #xD | #xA)+ */
+ def xSpace() =
+ if (isSpace(ch)) { nextch; xSpaceOpt }
+ else xHandleError(ch, "whitespace expected")
+
+ /** Apply a function and return the passed value */
+ def returning[T](x: T)(f: T => Unit): T = { f(x); x }
+
+ /** Execute body with a variable saved and restored after execution */
+ def saving[A, B](getter: A, setter: A => Unit)(body: => B): B = {
+ val saved = getter
+ try body
+ finally setter(saved)
+ }
+
+ /** Take characters from input stream until given String "until"
+ * is seen. Once seen, the accumulated characters are passed
+ * along with the current Position to the supplied handler function.
+ */
+ protected def xTakeUntil[T](
+ handler: (PositionType, String) => T,
+ positioner: () => PositionType,
+ until: String): T =
+ {
+ val sb = new StringBuilder
+ val head = until.head
+ val rest = until.tail
+
+ while (true) {
+ if (ch == head && peek(rest))
+ return handler(positioner(), sb.toString)
+ else if (ch == SU)
+ truncatedError("") // throws TruncatedXMLControl in compiler
+
+ sb append ch
+ nextch
+ }
+ unreachable
+ }
+
+ /** Create a non-destructive lookahead reader and see if the head
+ * of the input would match the given String. If yes, return true
+ * and drop the entire String from input; if no, return false
+ * and leave input unchanged.
+ */
+ private def peek(lookingFor: String): Boolean =
+ (lookahead() take lookingFor.length sameElements lookingFor.iterator) && {
+ // drop the chars from the real reader (all lookahead + orig)
+ (0 to lookingFor.length) foreach (_ => nextch)
+ true
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/parsing/MarkupParsers.scala b/compiler/src/dotty/tools/dotc/parsing/MarkupParsers.scala
new file mode 100644
index 000000000..f648b9e2c
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/parsing/MarkupParsers.scala
@@ -0,0 +1,466 @@
+package dotty.tools
+package dotc
+package parsing
+
+import scala.collection.mutable
+import mutable.{ Buffer, ArrayBuffer, ListBuffer }
+import scala.util.control.ControlThrowable
+import scala.reflect.internal.Chars.SU
+import Parsers._
+import util.Positions._
+import core._
+import Constants._
+import Utility._
+
+
+// XXX/Note: many/most of the functions in here are almost direct cut and pastes
+// from another file - scala.xml.parsing.MarkupParser, it looks like.
+// (It was like that when I got here.) They used to be commented "[Duplicate]" butx
+// since approximately all of them were, I snipped it as noise. As far as I can
+// tell this wasn't for any particularly good reason, but slightly different
+// compiler and library parser interfaces meant it would take some setup.
+//
+// I rewrote most of these, but not as yet the library versions: so if you are
+// tempted to touch any of these, please be aware of that situation and try not
+// to let it get any worse. -- paulp
+
+/** This trait ...
+ *
+ * @author Burak Emir
+ * @version 1.0
+ */
+object MarkupParsers {
+
+ import ast.untpd._
+
+ case object MissingEndTagControl extends ControlThrowable {
+ override def getMessage = "start tag was here: "
+ }
+
+ case object ConfusedAboutBracesControl extends ControlThrowable {
+ override def getMessage = " I encountered a '}' where I didn't expect one, maybe this tag isn't closed <"
+ }
+
+ case object TruncatedXMLControl extends ControlThrowable {
+ override def getMessage = "input ended while parsing XML"
+ }
+
+ class MarkupParser(parser: Parser, final val preserveWS: Boolean) extends MarkupParserCommon {
+
+ import Tokens.{ LBRACE, RBRACE }
+
+ type PositionType = Position
+ type InputType = CharArrayReader
+ type ElementType = Tree
+ type AttributesType = mutable.Map[String, Tree]
+ type NamespaceType = Any // namespaces ignored
+
+ def mkAttributes(name: String, other: NamespaceType): AttributesType = xAttributes
+
+ val eof = false
+
+ def truncatedError(msg: String): Nothing = throw TruncatedXMLControl
+ def xHandleError(that: Char, msg: String) =
+ if (ch == SU) throw TruncatedXMLControl
+ else reportSyntaxError(msg)
+
+ var input : CharArrayReader = _
+ def lookahead(): BufferedIterator[Char] =
+ (input.buf drop input.charOffset).iterator.buffered
+
+ import parser.{ symbXMLBuilder => handle }
+
+ def curOffset : Int = input.charOffset - 1
+ var tmppos : Position = NoPosition
+ def ch = input.ch
+ /** this method assign the next character to ch and advances in input */
+ def nextch(): Unit = { input.nextChar() }
+
+ protected def ch_returning_nextch: Char = {
+ val result = ch; input.nextChar(); result
+ }
+
+ def mkProcInstr(position: Position, name: String, text: String): ElementType =
+ parser.symbXMLBuilder.procInstr(position, name, text)
+
+ var xEmbeddedBlock = false
+
+ private var debugLastStartElement = new mutable.Stack[(Int, String)]
+ private def debugLastPos = debugLastStartElement.top._1
+ private def debugLastElem = debugLastStartElement.top._2
+
+ private def errorBraces() = {
+ reportSyntaxError("in XML content, please use '}}' to express '}'")
+ throw ConfusedAboutBracesControl
+ }
+ def errorNoEnd(tag: String) = {
+ reportSyntaxError("expected closing tag of " + tag)
+ throw MissingEndTagControl
+ }
+
+ /** checks whether next character starts a Scala block, if yes, skip it.
+ * @return true if next character starts a scala block
+ */
+ def xCheckEmbeddedBlock: Boolean = {
+ // attentions, side-effect, used in xText
+ xEmbeddedBlock = (ch == '{') && { nextch; (ch != '{') }
+ xEmbeddedBlock
+ }
+
+ /** parse attribute and add it to listmap
+ * [41] Attributes ::= { S Name Eq AttValue }
+ * AttValue ::= `'` { _ } `'`
+ * | `"` { _ } `"`
+ * | `{` scalablock `}`
+ */
+ def xAttributes = {
+ val aMap = mutable.LinkedHashMap[String, Tree]()
+
+ while (isNameStart(ch)) {
+ val start = curOffset
+ val key = xName
+ xEQ
+ val delim = ch
+ val mid = curOffset
+ val value: Tree = ch match {
+ case '"' | '\'' =>
+ val tmp = xAttributeValue(ch_returning_nextch)
+
+ try handle.parseAttribute(Position(start, curOffset, mid), tmp)
+ catch {
+ case e: RuntimeException =>
+ errorAndResult("error parsing attribute value", parser.errorTermTree)
+ }
+
+ case '{' =>
+ nextch
+ xEmbeddedExpr
+ case SU =>
+ throw TruncatedXMLControl
+ case _ =>
+ errorAndResult("' or \" delimited attribute value or '{' scala-expr '}' expected", Literal(Constant("<syntax-error>")))
+ }
+ // well-formedness constraint: unique attribute names
+ if (aMap contains key)
+ reportSyntaxError("attribute %s may only be defined once" format key)
+
+ aMap(key) = value
+ if (ch != '/' && ch != '>')
+ xSpace
+ }
+ aMap
+ }
+
+ /** '<! CharData ::= [CDATA[ ( {char} - {char}"]]>"{char} ) ']]>'
+ *
+ * see [15]
+ */
+ def xCharData: Tree = {
+ val start = curOffset
+ xToken("[CDATA[")
+ val mid = curOffset
+ xTakeUntil(handle.charData, () => Position(start, curOffset, mid), "]]>")
+ }
+
+ def xUnparsed: Tree = {
+ val start = curOffset
+ xTakeUntil(handle.unparsed, () => Position(start, curOffset, start), "</xml:unparsed>")
+ }
+
+ /** Comment ::= '<!--' ((Char - '-') | ('-' (Char - '-')))* '-->'
+ *
+ * see [15]
+ */
+ def xComment: Tree = {
+ val start = curOffset - 2 // Rewinding to include "<!"
+ xToken("--")
+ xTakeUntil(handle.comment, () => Position(start, curOffset, start), "-->")
+ }
+
+ def appendText(pos: Position, ts: Buffer[Tree], txt: String): Unit = {
+ def append(t: String) = ts append handle.text(pos, t)
+
+ if (preserveWS) append(txt)
+ else {
+ val sb = new StringBuilder()
+
+ txt foreach { c =>
+ if (!isSpace(c)) sb append c
+ else if (sb.isEmpty || !isSpace(sb.last)) sb append ' '
+ }
+
+ val trimmed = sb.toString.trim
+ if (!trimmed.isEmpty) append(trimmed)
+ }
+ }
+
+ /** adds entity/character to ts as side-effect
+ * @precond ch == '&'
+ */
+ def content_AMP(ts: ArrayBuffer[Tree]): Unit = {
+ nextch
+ val toAppend = ch match {
+ case '#' => // CharacterRef
+ nextch
+ val theChar = handle.text(tmppos, xCharRef)
+ xToken(';')
+ theChar
+ case _ => // EntityRef
+ val n = xName
+ xToken(';')
+ handle.entityRef(tmppos, n)
+ }
+
+ ts append toAppend
+ }
+
+ /**
+ * @precond ch == '{'
+ * @postcond: xEmbeddedBlock == false!
+ */
+ def content_BRACE(p: Position, ts: ArrayBuffer[Tree]): Unit =
+ if (xCheckEmbeddedBlock) ts append xEmbeddedExpr
+ else appendText(p, ts, xText)
+
+ /** Returns true if it encounters an end tag (without consuming it),
+ * appends trees to ts as side-effect.
+ *
+ * @param ts ...
+ * @return ...
+ */
+ private def content_LT(ts: ArrayBuffer[Tree]): Boolean = {
+ if (ch == '/')
+ return true // end tag
+
+ val toAppend = ch match {
+ case '!' => nextch ; if (ch =='[') xCharData else xComment // CDATA or Comment
+ case '?' => nextch ; xProcInstr // PI
+ case _ => element // child node
+ }
+
+ ts append toAppend
+ false
+ }
+
+ def content: Buffer[Tree] = {
+ val ts = new ArrayBuffer[Tree]
+ while (true) {
+ if (xEmbeddedBlock)
+ ts append xEmbeddedExpr
+ else {
+ tmppos = Position(curOffset)
+ ch match {
+ // end tag, cdata, comment, pi or child node
+ case '<' => nextch ; if (content_LT(ts)) return ts
+ // either the character '{' or an embedded scala block }
+ case '{' => content_BRACE(tmppos, ts) // }
+ // EntityRef or CharRef
+ case '&' => content_AMP(ts)
+ case SU => return ts
+ // text content - here xEmbeddedBlock might be true
+ case _ => appendText(tmppos, ts, xText)
+ }
+ }
+ }
+ unreachable
+ }
+
+ /** '<' element ::= xmlTag1 '>' { xmlExpr | '{' simpleExpr '}' } ETag
+ * | xmlTag1 '/' '>'
+ */
+ def element: Tree = {
+ val start = curOffset
+ val (qname, attrMap) = xTag(())
+ if (ch == '/') { // empty element
+ xToken("/>")
+ handle.element(Position(start, curOffset, start), qname, attrMap, true, new ListBuffer[Tree])
+ }
+ else { // handle content
+ xToken('>')
+ if (qname == "xml:unparsed")
+ return xUnparsed
+
+ debugLastStartElement.push((start, qname))
+ val ts = content
+ xEndTag(qname)
+ debugLastStartElement.pop
+ val pos = Position(start, curOffset, start)
+ qname match {
+ case "xml:group" => handle.group(pos, ts)
+ case _ => handle.element(pos, qname, attrMap, false, ts)
+ }
+ }
+ }
+
+ /** parse character data.
+ * precondition: xEmbeddedBlock == false (we are not in a scala block)
+ */
+ private def xText: String = {
+ assert(!xEmbeddedBlock, "internal error: encountered embedded block")
+ val buf = new StringBuilder
+ def done = buf.toString
+
+ while (ch != SU) {
+ if (ch == '}') {
+ if (charComingAfter(nextch) == '}') nextch
+ else errorBraces()
+ }
+
+ buf append ch
+ nextch
+ if (xCheckEmbeddedBlock || ch == '<' || ch == '&')
+ return done
+ }
+ done
+ }
+
+ /** Some try/catch/finally logic used by xLiteral and xLiteralPattern. */
+ private def xLiteralCommon(f: () => Tree, ifTruncated: String => Unit): Tree = {
+ try return f()
+ catch {
+ case c @ TruncatedXMLControl =>
+ ifTruncated(c.getMessage)
+ case c @ (MissingEndTagControl | ConfusedAboutBracesControl) =>
+ parser.syntaxError(c.getMessage + debugLastElem + ">", debugLastPos)
+ case _: ArrayIndexOutOfBoundsException =>
+ parser.syntaxError("missing end tag in XML literal for <%s>" format debugLastElem, debugLastPos)
+ }
+ finally parser.in resume Tokens.XMLSTART
+
+ parser.errorTermTree
+ }
+
+ /** Use a lookahead parser to run speculative body, and return the first char afterward. */
+ private def charComingAfter(body: => Unit): Char = {
+ try {
+ input = input.lookaheadReader
+ body
+ ch
+ }
+ finally input = parser.in
+ }
+
+ /** xLiteral = element { element }
+ * @return Scala representation of this xml literal
+ */
+ def xLiteral: Tree = xLiteralCommon(
+ () => {
+ input = parser.in
+ handle.isPattern = false
+
+ val ts = new ArrayBuffer[Tree]
+ val start = curOffset
+ tmppos = Position(curOffset) // Iuli: added this line, as it seems content_LT uses tmppos when creating trees
+ content_LT(ts)
+
+ // parse more XML ?
+ if (charComingAfter(xSpaceOpt) == '<') {
+ xSpaceOpt
+ while (ch == '<') {
+ nextch
+ ts append element
+ xSpaceOpt
+ }
+ handle.makeXMLseq(Position(start, curOffset, start), ts)
+ }
+ else {
+ assert(ts.length == 1)
+ ts(0)
+ }
+ },
+ msg => parser.incompleteInputError(msg)
+ )
+
+ /** @see xmlPattern. resynchronizes after successful parse
+ * @return this xml pattern
+ */
+ def xLiteralPattern: Tree = xLiteralCommon(
+ () => {
+ input = parser.in
+ saving[Boolean, Tree](handle.isPattern, handle.isPattern = _) {
+ handle.isPattern = true
+ val tree = xPattern
+ xSpaceOpt
+ tree
+ }
+ },
+ msg => parser.syntaxError(msg, curOffset)
+ )
+
+ def escapeToScala[A](op: => A, kind: String) = {
+ xEmbeddedBlock = false
+ val res = saving[List[Int], A](parser.in.sepRegions, parser.in.sepRegions = _) {
+ parser.in resume LBRACE
+ op
+ }
+ if (parser.in.token != RBRACE)
+ reportSyntaxError(" expected end of Scala " + kind)
+
+ res
+ }
+
+ def xEmbeddedExpr: Tree = escapeToScala(parser.block(), "block")
+
+ /** xScalaPatterns ::= patterns
+ */
+ def xScalaPatterns: List[Tree] = escapeToScala(parser.patterns(), "pattern")
+
+ def reportSyntaxError(pos: Int, str: String) = parser.syntaxError(str, pos)
+ def reportSyntaxError(str: String): Unit = {
+ reportSyntaxError(curOffset, "in XML literal: " + str)
+ nextch()
+ }
+
+ /** '<' xPattern ::= Name [S] { xmlPattern | '{' pattern3 '}' } ETag
+ * | Name [S] '/' '>'
+ */
+ def xPattern: Tree = {
+ var start = curOffset
+ val qname = xName
+ debugLastStartElement.push((start, qname))
+ xSpaceOpt
+
+ val ts = new ArrayBuffer[Tree]
+
+ val isEmptyTag = ch == '/'
+ if (isEmptyTag) nextch()
+ xToken('>')
+
+ if (!isEmptyTag) {
+ // recurses until it hits a termination condition, then returns
+ def doPattern: Boolean = {
+ val start1 = curOffset
+ if (xEmbeddedBlock) ts ++= xScalaPatterns
+ else ch match {
+ case '<' => // tag
+ nextch
+ if (ch != '/') ts append xPattern // child
+ else return false // terminate
+
+ case '{' => // embedded Scala patterns
+ while (ch == '{') {
+ nextch
+ ts ++= xScalaPatterns
+ }
+ assert(!xEmbeddedBlock, "problem with embedded block")
+
+ case SU =>
+ throw TruncatedXMLControl
+
+ case _ => // text
+ appendText(Position(start1, curOffset, start1), ts, xText)
+ // here xEmbeddedBlock might be true:
+ // if (xEmbeddedBlock) throw new ApplicationError("after:" + text); // assert
+ }
+ true
+ }
+
+ while (doPattern) { } // call until false
+ xEndTag(qname)
+ debugLastStartElement.pop
+ }
+
+ handle.makeXMLpat(Position(start, curOffset, start), qname, ts)
+ }
+ } /* class MarkupParser */
+}
diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala
new file mode 100644
index 000000000..fa0576c7a
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala
@@ -0,0 +1,2309 @@
+package dotty.tools
+package dotc
+package parsing
+
+import scala.collection.mutable.ListBuffer
+import scala.collection.immutable.BitSet
+import util.{ SourceFile, SourcePosition }
+import Tokens._
+import Scanners._
+import MarkupParsers._
+import core._
+import Flags._
+import Contexts._
+import Names._
+import ast.Positioned
+import ast.Trees._
+import Decorators._
+import StdNames._
+import util.Positions._
+import Constants._
+import ScriptParsers._
+import Comments._
+import scala.annotation.{tailrec, switch}
+import util.DotClass
+import rewrite.Rewrites.patch
+
+object Parsers {
+
+ import ast.untpd._
+ import reporting.diagnostic.Message
+ import reporting.diagnostic.messages._
+
+ case class OpInfo(operand: Tree, operator: Name, offset: Offset)
+
+ class ParensCounters {
+ private var parCounts = new Array[Int](lastParen - firstParen)
+
+ def count(tok: Token) = parCounts(tok - firstParen)
+ def change(tok: Token, delta: Int) = parCounts(tok - firstParen) += delta
+ def nonePositive: Boolean = parCounts forall (_ <= 0)
+ }
+
+ @sharable object Location extends Enumeration {
+ val InParens, InBlock, InPattern, ElseWhere = Value
+ }
+
+ @sharable object ParamOwner extends Enumeration {
+ val Class, Type, TypeParam, Def = Value
+ }
+
+ /** The parse starting point depends on whether the source file is self-contained:
+ * if not, the AST will be supplemented.
+ */
+ def parser(source: SourceFile)(implicit ctx: Context) =
+ if (source.isSelfContained) new ScriptParser(source)
+ else new Parser(source)
+
+ abstract class ParserCommon(val source: SourceFile)(implicit ctx: Context) extends DotClass {
+
+ val in: ScannerCommon
+
+ /* ------------- POSITIONS ------------------------------------------- */
+
+ /** Positions tree.
+ * If `t` does not have a position yet, set its position to the given one.
+ */
+ def atPos[T <: Positioned](pos: Position)(t: T): T =
+ if (t.pos.isSourceDerived) t else t.withPos(pos)
+
+ def atPos[T <: Positioned](start: Offset, point: Offset, end: Offset)(t: T): T =
+ atPos(Position(start, end, point))(t)
+
+ /** If the last read offset is strictly greater than `start`, position tree
+ * to position spanning from `start` to last read offset, with given point.
+ * If the last offset is less than or equal to start, the tree `t` did not
+ * consume any source for its construction. In this case, don't position it yet,
+ * but wait for its position to be determined by `setChildPositions` when the
+ * parent node is positioned.
+ */
+ def atPos[T <: Positioned](start: Offset, point: Offset)(t: T): T =
+ if (in.lastOffset > start) atPos(start, point, in.lastOffset)(t) else t
+
+ def atPos[T <: Positioned](start: Offset)(t: T): T =
+ atPos(start, start)(t)
+
+ def nameStart: Offset =
+ if (in.token == BACKQUOTED_IDENT) in.offset + 1 else in.offset
+
+ def sourcePos(off: Int = in.offset): SourcePosition =
+ source atPos Position(off)
+
+
+ /* ------------- ERROR HANDLING ------------------------------------------- */
+ /** The offset where the last syntax error was reported, or if a skip to a
+ * safepoint occurred afterwards, the offset of the safe point.
+ */
+ protected var lastErrorOffset : Int = -1
+
+ /** Issue an error at given offset if beyond last error offset
+ * and update lastErrorOffset.
+ */
+ def syntaxError(msg: => Message, offset: Int = in.offset): Unit =
+ if (offset > lastErrorOffset) {
+ syntaxError(msg, Position(offset))
+ lastErrorOffset = in.offset
+ }
+
+ /** Unconditionally issue an error at given position, without
+ * updating lastErrorOffset.
+ */
+ def syntaxError(msg: => Message, pos: Position): Unit =
+ ctx.error(msg, source atPos pos)
+
+ }
+
+ class Parser(source: SourceFile)(implicit ctx: Context) extends ParserCommon(source) {
+
+ val in: Scanner = new Scanner(source)
+
+ val openParens = new ParensCounters
+
+ /** This is the general parse entry point.
+ * Overridden by ScriptParser
+ */
+ def parse(): Tree = {
+ val t = compilationUnit()
+ accept(EOF)
+ t
+ }
+
+/* -------------- TOKEN CLASSES ------------------------------------------- */
+
+ def isIdent = in.token == IDENTIFIER || in.token == BACKQUOTED_IDENT
+ def isIdent(name: Name) = in.token == IDENTIFIER && in.name == name
+ def isSimpleLiteral = simpleLiteralTokens contains in.token
+ def isLiteral = literalTokens contains in.token
+ def isNumericLit = numericLitTokens contains in.token
+ def isModifier = modifierTokens contains in.token
+ def isExprIntro = canStartExpressionTokens contains in.token
+ def isTemplateIntro = templateIntroTokens contains in.token
+ def isDclIntro = dclIntroTokens contains in.token
+ def isStatSeqEnd = in.token == RBRACE || in.token == EOF
+ def mustStartStat = mustStartStatTokens contains in.token
+
+ def isDefIntro(allowedMods: BitSet) =
+ in.token == AT || (allowedMods contains in.token) || (defIntroTokens contains in.token)
+
+ def isStatSep: Boolean =
+ in.token == NEWLINE || in.token == NEWLINES || in.token == SEMI
+
+/* ------------- ERROR HANDLING ------------------------------------------- */
+
+ /** The offset of the last time when a statement on a new line was definitely
+ * encountered in the current scope or an outer scope.
+ */
+ private var lastStatOffset = -1
+
+ def setLastStatOffset() =
+ if (mustStartStat && in.isAfterLineEnd)
+ lastStatOffset = in.offset
+
+ /** Is offset1 less or equally indented than offset2?
+ * This is the case if the characters between the preceding end-of-line and offset1
+ * are a prefix of the characters between the preceding end-of-line and offset2.
+ */
+ def isLeqIndented(offset1: Int, offset2: Int): Boolean = {
+ def recur(idx1: Int, idx2: Int): Boolean =
+ idx1 == offset1 ||
+ idx2 < offset2 && source(idx1) == source(idx2) && recur(idx1 + 1, idx2 + 1)
+ recur(source.startOfLine(offset1), source.startOfLine(offset2))
+ }
+
+ /** Skip on error to next safe point.
+ * Safe points are:
+ * - Closing braces, provided they match an opening brace before the error point.
+ * - Closing parens and brackets, provided they match an opening parent or bracket
+ * before the error point and there are no intervening other kinds of parens.
+ * - Semicolons and newlines, provided there are no intervening braces.
+ * - Definite statement starts on new lines, provided they are not more indented
+ * than the last known statement start before the error point.
+ */
+ protected def skip(): Unit = {
+ val skippedParens = new ParensCounters
+ while (true) {
+ (in.token: @switch) match {
+ case EOF =>
+ return
+ case SEMI | NEWLINE | NEWLINES =>
+ if (skippedParens.count(LBRACE) == 0) return
+ case RBRACE =>
+ if (openParens.count(LBRACE) > 0 && skippedParens.count(LBRACE) == 0)
+ return
+ skippedParens.change(LBRACE, -1)
+ case RPAREN =>
+ if (openParens.count(LPAREN) > 0 && skippedParens.nonePositive)
+ return
+ skippedParens.change(LPAREN, -1)
+ case RBRACKET =>
+ if (openParens.count(LBRACKET) > 0 && skippedParens.nonePositive)
+ return
+ skippedParens.change(LBRACKET, -1)
+ case LBRACE =>
+ skippedParens.change(LBRACE, + 1)
+ case LPAREN =>
+ skippedParens.change(LPAREN, + 1)
+ case LBRACKET=>
+ skippedParens.change(LBRACKET, + 1)
+ case _ =>
+ if (mustStartStat &&
+ in.isAfterLineEnd() &&
+ isLeqIndented(in.offset, lastStatOffset max 0))
+ return
+ }
+ in.nextToken()
+ }
+ }
+
+ def warning(msg: => Message, sourcePos: SourcePosition) =
+ ctx.warning(msg, sourcePos)
+
+ def warning(msg: => Message, offset: Int = in.offset) =
+ ctx.warning(msg, source atPos Position(offset))
+
+ def deprecationWarning(msg: => Message, offset: Int = in.offset) =
+ ctx.deprecationWarning(msg, source atPos Position(offset))
+
+ /** Issue an error at current offset taht input is incomplete */
+ def incompleteInputError(msg: => Message) =
+ ctx.incompleteInputError(msg, source atPos Position(in.offset))
+
+ /** If at end of file, issue an incompleteInputError.
+ * Otherwise issue a syntax error and skip to next safe point.
+ */
+ def syntaxErrorOrIncomplete(msg: => Message) =
+ if (in.token == EOF) incompleteInputError(msg)
+ else {
+ syntaxError(msg)
+ skip()
+ lastErrorOffset = in.offset
+ } // DEBUG
+
+ private def expectedMsg(token: Int): String =
+ expectedMessage(showToken(token))
+ private def expectedMessage(what: String): String =
+ s"$what expected but ${showToken(in.token)} found"
+
+ /** Consume one token of the specified type, or
+ * signal an error if it is not there.
+ *
+ * @return The offset at the start of the token to accept
+ */
+ def accept(token: Int): Int = {
+ val offset = in.offset
+ if (in.token != token) {
+ syntaxErrorOrIncomplete(expectedMsg(token))
+ }
+ if (in.token == token) in.nextToken()
+ offset
+ }
+
+ /** semi = nl {nl} | `;'
+ * nl = `\n' // where allowed
+ */
+ def acceptStatSep(): Unit = in.token match {
+ case NEWLINE | NEWLINES => in.nextToken()
+ case _ => accept(SEMI)
+ }
+
+ def acceptStatSepUnlessAtEnd(altEnd: Token = EOF) =
+ if (!isStatSeqEnd && in.token != altEnd) acceptStatSep()
+
+ def errorTermTree = atPos(in.offset) { Literal(Constant(null)) }
+
+ private var inFunReturnType = false
+ private def fromWithinReturnType[T](body: => T): T = {
+ val saved = inFunReturnType
+ try {
+ inFunReturnType = true
+ body
+ } finally inFunReturnType = saved
+ }
+
+ def migrationWarningOrError(msg: String, offset: Int = in.offset) =
+ if (in.isScala2Mode)
+ ctx.migrationWarning(msg, source atPos Position(offset))
+ else
+ syntaxError(msg, offset)
+
+/* ---------- TREE CONSTRUCTION ------------------------------------------- */
+
+ /** Convert tree to formal parameter list
+ */
+ def convertToParams(tree: Tree): List[ValDef] = tree match {
+ case Parens(t) => convertToParam(t) :: Nil
+ case Tuple(ts) => ts map (convertToParam(_))
+ case t => convertToParam(t) :: Nil
+ }
+
+ /** Convert tree to formal parameter
+ */
+ def convertToParam(tree: Tree, mods: Modifiers = Modifiers(), expected: String = "formal parameter"): ValDef = tree match {
+ case Ident(name) =>
+ makeParameter(name.asTermName, TypeTree(), mods) withPos tree.pos
+ case Typed(Ident(name), tpt) =>
+ makeParameter(name.asTermName, tpt, mods) withPos tree.pos
+ case _ =>
+ syntaxError(s"not a legal $expected", tree.pos)
+ makeParameter(nme.ERROR, tree, mods)
+ }
+
+ /** Convert (qual)ident to type identifier
+ */
+ def convertToTypeId(tree: Tree): Tree = tree match {
+ case id @ Ident(name) =>
+ cpy.Ident(id)(name.toTypeName)
+ case id @ Select(qual, name) =>
+ cpy.Select(id)(qual, name.toTypeName)
+ case _ =>
+ syntaxError(IdentifierExpected(tree.show), tree.pos)
+ tree
+ }
+
+/* --------------- PLACEHOLDERS ------------------------------------------- */
+
+ /** The implicit parameters introduced by `_` in the current expression.
+ * Parameters appear in reverse order.
+ */
+ var placeholderParams: List[ValDef] = Nil
+
+ def checkNoEscapingPlaceholders[T](op: => T): T = {
+ val savedPlaceholderParams = placeholderParams
+ placeholderParams = Nil
+
+ try op
+ finally {
+ placeholderParams match {
+ case vd :: _ => syntaxError(UnboundPlaceholderParameter(), vd.pos)
+ case _ =>
+ }
+ placeholderParams = savedPlaceholderParams
+ }
+ }
+
+ def isWildcard(t: Tree): Boolean = t match {
+ case Ident(name1) => placeholderParams.nonEmpty && name1 == placeholderParams.head.name
+ case Typed(t1, _) => isWildcard(t1)
+ case Annotated(t1, _) => isWildcard(t1)
+ case Parens(t1) => isWildcard(t1)
+ case _ => false
+ }
+
+/* -------------- XML ---------------------------------------------------- */
+
+ /** the markup parser */
+ lazy val xmlp = new MarkupParser(this, true)
+
+ object symbXMLBuilder extends SymbolicXMLBuilder(this, true) // DEBUG choices
+
+ def xmlLiteral() : Tree = xmlp.xLiteral
+ def xmlLiteralPattern() : Tree = xmlp.xLiteralPattern
+
+/* -------- COMBINATORS -------------------------------------------------------- */
+
+ def enclosed[T](tok: Token, body: => T): T = {
+ accept(tok)
+ openParens.change(tok, 1)
+ try body
+ finally {
+ accept(tok + 1)
+ openParens.change(tok, -1)
+ }
+ }
+
+ def inParens[T](body: => T): T = enclosed(LPAREN, body)
+ def inBraces[T](body: => T): T = enclosed(LBRACE, body)
+ def inBrackets[T](body: => T): T = enclosed(LBRACKET, body)
+
+ def inDefScopeBraces[T](body: => T): T = {
+ val saved = lastStatOffset
+ try inBraces(body)
+ finally lastStatOffset = saved
+ }
+
+ /** part { `separator` part }
+ */
+ def tokenSeparated[T](separator: Int, part: () => T): List[T] = {
+ val ts = new ListBuffer[T] += part()
+ while (in.token == separator) {
+ in.nextToken()
+ ts += part()
+ }
+ ts.toList
+ }
+
+ def commaSeparated[T](part: () => T): List[T] = tokenSeparated(COMMA, part)
+
+/* --------- OPERAND/OPERATOR STACK --------------------------------------- */
+
+ var opStack: List[OpInfo] = Nil
+
+ def checkAssoc(offset: Int, op: Name, leftAssoc: Boolean) =
+ if (isLeftAssoc(op) != leftAssoc)
+ syntaxError(
+ "left- and right-associative operators with same precedence may not be mixed", offset)
+
+ def reduceStack(base: List[OpInfo], top: Tree, prec: Int, leftAssoc: Boolean): Tree = {
+ if (opStack != base && precedence(opStack.head.operator) == prec)
+ checkAssoc(opStack.head.offset, opStack.head.operator, leftAssoc)
+ def recur(top: Tree): Tree = {
+ if (opStack == base) top
+ else {
+ val opInfo = opStack.head
+ val opPrec = precedence(opInfo.operator)
+ if (prec < opPrec || leftAssoc && prec == opPrec) {
+ opStack = opStack.tail
+ recur {
+ val opPos = Position(opInfo.offset, opInfo.offset + opInfo.operator.length, opInfo.offset)
+ atPos(opPos union opInfo.operand.pos union top.pos) {
+ InfixOp(opInfo.operand, opInfo.operator, top)
+ }
+ }
+ }
+ else top
+ }
+ }
+ recur(top)
+ }
+
+ /** operand { infixop operand} [postfixop],
+ * respecting rules of associativity and precedence.
+ * @param notAnOperator a token that does not count as operator.
+ * @param maybePostfix postfix operators are allowed.
+ */
+ def infixOps(
+ first: Tree, canStartOperand: Token => Boolean, operand: () => Tree,
+ isType: Boolean = false,
+ notAnOperator: Name = nme.EMPTY,
+ maybePostfix: Boolean = false): Tree = {
+ val base = opStack
+ var top = first
+ while (isIdent && in.name != notAnOperator) {
+ val op = if (isType) in.name.toTypeName else in.name
+ top = reduceStack(base, top, precedence(op), isLeftAssoc(op))
+ opStack = OpInfo(top, op, in.offset) :: opStack
+ ident()
+ newLineOptWhenFollowing(canStartOperand)
+ if (maybePostfix && !canStartOperand(in.token)) {
+ val topInfo = opStack.head
+ opStack = opStack.tail
+ val od = reduceStack(base, topInfo.operand, 0, true)
+ return atPos(od.pos.start, topInfo.offset) {
+ PostfixOp(od, topInfo.operator)
+ }
+ }
+ top = operand()
+ }
+ reduceStack(base, top, 0, true)
+ }
+
+/* -------- IDENTIFIERS AND LITERALS ------------------------------------------- */
+
+ /** Accept identifier and return its name as a term name. */
+ def ident(): TermName =
+ if (isIdent) {
+ val name = in.name
+ in.nextToken()
+ name
+ } else {
+ syntaxErrorOrIncomplete(expectedMsg(IDENTIFIER))
+ nme.ERROR
+ }
+
+ /** Accept identifier and return Ident with its name as a term name. */
+ def termIdent(): Ident = atPos(in.offset) {
+ makeIdent(in.token, ident())
+ }
+
+ /** Accept identifier and return Ident with its name as a type name. */
+ def typeIdent(): Ident = atPos(in.offset) {
+ makeIdent(in.token, ident().toTypeName)
+ }
+
+ private def makeIdent(tok: Token, name: Name) =
+ if (tok == BACKQUOTED_IDENT) BackquotedIdent(name)
+ else Ident(name)
+
+ def wildcardIdent(): Ident =
+ atPos(accept(USCORE)) { Ident(nme.WILDCARD) }
+
+ def termIdentOrWildcard(): Ident =
+ if (in.token == USCORE) wildcardIdent() else termIdent()
+
+ /** Accept identifier acting as a selector on given tree `t`. */
+ def selector(t: Tree): Tree =
+ atPos(t.pos.start, in.offset) { Select(t, ident()) }
+
+ /** Selectors ::= ident { `.' ident()
+ *
+ * Accept `.' separated identifiers acting as a selectors on given tree `t`.
+ * @param finish An alternative parse in case the next token is not an identifier.
+ * If the alternative does not apply, its tree argument is returned unchanged.
+ */
+ def selectors(t: Tree, finish: Tree => Tree): Tree = {
+ val t1 = finish(t)
+ if (t1 ne t) t1 else dotSelectors(selector(t), finish)
+ }
+
+ /** DotSelectors ::= { `.' ident()
+ *
+ * Accept `.' separated identifiers acting as a selectors on given tree `t`.
+ * @param finish An alternative parse in case the token following a `.' is not an identifier.
+ * If the alternative does not apply, its tree argument is returned unchanged.
+ */
+ def dotSelectors(t: Tree, finish: Tree => Tree = id) =
+ if (in.token == DOT) { in.nextToken(); selectors(t, finish) }
+ else t
+
+ private val id: Tree => Tree = x => x
+
+ /** Path ::= StableId
+ * | [Ident `.'] this
+ *
+ * @param thisOK If true, [Ident `.'] this is acceptable as the path.
+ * If false, another selection is required after the `this`.
+ * @param finish An alternative parse in case the token following a `.' is not an identifier.
+ * If the alternative does not apply, its tree argument is returned unchanged.
+ */
+ def path(thisOK: Boolean, finish: Tree => Tree = id): Tree = {
+ val start = in.offset
+ def handleThis(qual: Ident) = {
+ in.nextToken()
+ val t = atPos(start) { This(qual) }
+ if (!thisOK && in.token != DOT) syntaxError("`.' expected")
+ dotSelectors(t, finish)
+ }
+ def handleSuper(qual: Ident) = {
+ in.nextToken()
+ val mix = mixinQualifierOpt()
+ val t = atPos(start) { Super(This(qual), mix) }
+ accept(DOT)
+ dotSelectors(selector(t), finish)
+ }
+ if (in.token == THIS) handleThis(EmptyTypeIdent)
+ else if (in.token == SUPER) handleSuper(EmptyTypeIdent)
+ else {
+ val t = termIdent()
+ if (in.token == DOT) {
+ def qual = cpy.Ident(t)(t.name.toTypeName)
+ in.nextToken()
+ if (in.token == THIS) handleThis(qual)
+ else if (in.token == SUPER) handleSuper(qual)
+ else selectors(t, finish)
+ }
+ else t
+ }
+ }
+
+ /** MixinQualifier ::= `[' Id `]'
+ */
+ def mixinQualifierOpt(): Ident =
+ if (in.token == LBRACKET) inBrackets(atPos(in.offset) { typeIdent() })
+ else EmptyTypeIdent
+
+ /** StableId ::= Id
+ * | Path `.' Id
+ * | [id '.'] super [`[' id `]']`.' id
+ */
+ def stableId(): Tree =
+ path(thisOK = false)
+
+ /** QualId ::= Id {`.' Id}
+ */
+ def qualId(): Tree =
+ dotSelectors(termIdent())
+
+ /** SimpleExpr ::= literal
+ * | symbol
+ * | null
+ * @param negOffset The offset of a preceding `-' sign, if any.
+ * If the literal is not negated, negOffset = in.offset.
+ */
+ def literal(negOffset: Int = in.offset, inPattern: Boolean = false): Tree = {
+ def finish(value: Any): Tree = {
+ val t = atPos(negOffset) { Literal(Constant(value)) }
+ in.nextToken()
+ t
+ }
+ val isNegated = negOffset < in.offset
+ atPos(negOffset) {
+ if (in.token == SYMBOLLIT) atPos(in.skipToken()) { SymbolLit(in.strVal) }
+ else if (in.token == INTERPOLATIONID) interpolatedString()
+ else finish(in.token match {
+ case CHARLIT => in.charVal
+ case INTLIT => in.intVal(isNegated).toInt
+ case LONGLIT => in.intVal(isNegated)
+ case FLOATLIT => in.floatVal(isNegated).toFloat
+ case DOUBLELIT => in.floatVal(isNegated)
+ case STRINGLIT | STRINGPART => in.strVal
+ case TRUE => true
+ case FALSE => false
+ case NULL => null
+ case _ =>
+ syntaxErrorOrIncomplete(IllegalLiteral())
+ null
+ })
+ }
+ }
+
+ private def interpolatedString(inPattern: Boolean = false): Tree = atPos(in.offset) {
+ val segmentBuf = new ListBuffer[Tree]
+ val interpolator = in.name
+ in.nextToken()
+ while (in.token == STRINGPART) {
+ segmentBuf += Thicket(
+ literal(),
+ atPos(in.offset) {
+ if (in.token == IDENTIFIER)
+ termIdent()
+ else if (in.token == THIS) {
+ in.nextToken()
+ This(EmptyTypeIdent)
+ }
+ else if (in.token == LBRACE)
+ if (inPattern) Block(Nil, inBraces(pattern()))
+ else expr()
+ else {
+ ctx.error(InterpolatedStringError())
+ EmptyTree
+ }
+ })
+ }
+ if (in.token == STRINGLIT) segmentBuf += literal()
+ InterpolatedString(interpolator, segmentBuf.toList)
+ }
+
+/* ------------- NEW LINES ------------------------------------------------- */
+
+ def newLineOpt(): Unit = {
+ if (in.token == NEWLINE) in.nextToken()
+ }
+
+ def newLinesOpt(): Unit = {
+ if (in.token == NEWLINE || in.token == NEWLINES)
+ in.nextToken()
+ }
+
+ def newLineOptWhenFollowedBy(token: Int): Unit = {
+ // note: next is defined here because current == NEWLINE
+ if (in.token == NEWLINE && in.next.token == token) newLineOpt()
+ }
+
+ def newLineOptWhenFollowing(p: Int => Boolean): Unit = {
+ // note: next is defined here because current == NEWLINE
+ if (in.token == NEWLINE && p(in.next.token)) newLineOpt()
+ }
+
+/* ------------- TYPES ------------------------------------------------------ */
+ /** Same as [[typ]], but if this results in a wildcard it emits a syntax error and
+ * returns a tree for type `Any` instead.
+ */
+ def toplevelTyp(): Tree = {
+ val t = typ()
+ findWildcardType(t) match {
+ case Some(wildcardPos) =>
+ syntaxError("unbound wildcard type", wildcardPos)
+ scalaAny
+ case None => t
+ }
+ }
+
+ /** Type ::= FunArgTypes `=>' Type
+ * | HkTypeParamClause `->' Type
+ * | InfixType
+ * FunArgTypes ::= InfixType
+ * | `(' [ FunArgType {`,' FunArgType } ] `)'
+ */
+ def typ(): Tree = {
+ val start = in.offset
+ val t =
+ if (in.token == LPAREN) {
+ in.nextToken()
+ if (in.token == RPAREN) {
+ in.nextToken()
+ atPos(start, accept(ARROW)) { Function(Nil, typ()) }
+ }
+ else {
+ openParens.change(LPAREN, 1)
+ val ts = commaSeparated(funArgType)
+ openParens.change(LPAREN, -1)
+ accept(RPAREN)
+ if (in.token == ARROW)
+ atPos(start, in.skipToken()) { Function(ts, typ()) }
+ else {
+ for (t <- ts)
+ if (t.isInstanceOf[ByNameTypeTree])
+ syntaxError(ByNameParameterNotSupported())
+ val tuple = atPos(start) { makeTupleOrParens(ts) }
+ infixTypeRest(refinedTypeRest(withTypeRest(simpleTypeRest(tuple))))
+ }
+ }
+ }
+ else if (in.token == LBRACKET) {
+ val start = in.offset
+ val tparams = typeParamClause(ParamOwner.TypeParam)
+ if (in.token == ARROW)
+ atPos(start, in.skipToken())(PolyTypeTree(tparams, typ()))
+ else { accept(ARROW); typ() }
+ }
+ else infixType()
+
+ in.token match {
+ case ARROW => atPos(start, in.skipToken()) { Function(List(t), typ()) }
+ case FORSOME => syntaxError("existential types no longer supported; use a wildcard type or dependent type instead"); t
+ case _ => t
+ }
+ }
+
+ /** InfixType ::= RefinedType {id [nl] refinedType}
+ */
+ def infixType(): Tree = infixTypeRest(refinedType())
+
+ def infixTypeRest(t: Tree): Tree =
+ infixOps(t, canStartTypeTokens, refinedType, isType = true, notAnOperator = nme.raw.STAR)
+
+ /** RefinedType ::= WithType {Annotation | [nl] Refinement}
+ */
+ val refinedType: () => Tree = () => refinedTypeRest(withType())
+
+ def refinedTypeRest(t: Tree): Tree = {
+ newLineOptWhenFollowedBy(LBRACE)
+ if (in.token == LBRACE) refinedTypeRest(atPos(t.pos.start) { RefinedTypeTree(t, refinement()) })
+ else t
+ }
+
+ /** WithType ::= AnnotType {`with' AnnotType} (deprecated)
+ */
+ def withType(): Tree = withTypeRest(annotType())
+
+ def withTypeRest(t: Tree): Tree =
+ if (in.token == WITH) {
+ deprecationWarning(DeprecatedWithOperator())
+ in.nextToken()
+ AndTypeTree(t, withType())
+ }
+ else t
+
+ /** AnnotType ::= SimpleType {Annotation}
+ */
+ def annotType(): Tree = annotTypeRest(simpleType())
+
+ def annotTypeRest(t: Tree): Tree =
+ if (in.token == AT) annotTypeRest(atPos(t.pos.start) { Annotated(t, annot()) })
+ else t
+
+ /** SimpleType ::= SimpleType TypeArgs
+ * | SimpleType `#' Id
+ * | StableId
+ * | Path `.' type
+ * | `(' ArgTypes `)'
+ * | `_' TypeBounds
+ * | Refinement
+ * | Literal
+ */
+ def simpleType(): Tree = simpleTypeRest {
+ if (in.token == LPAREN)
+ atPos(in.offset) { makeTupleOrParens(inParens(argTypes())) }
+ else if (in.token == LBRACE)
+ atPos(in.offset) { RefinedTypeTree(EmptyTree, refinement()) }
+ else if (isSimpleLiteral) { SingletonTypeTree(literal()) }
+ else if (in.token == USCORE) {
+ val start = in.skipToken()
+ typeBounds().withPos(Position(start, in.lastOffset, start))
+ }
+ else path(thisOK = false, handleSingletonType) match {
+ case r @ SingletonTypeTree(_) => r
+ case r => convertToTypeId(r)
+ }
+ }
+
+ val handleSingletonType: Tree => Tree = t =>
+ if (in.token == TYPE) {
+ in.nextToken()
+ atPos(t.pos.start) { SingletonTypeTree(t) }
+ } else t
+
+ private def simpleTypeRest(t: Tree): Tree = in.token match {
+ case HASH => simpleTypeRest(typeProjection(t))
+ case LBRACKET => simpleTypeRest(atPos(t.pos.start) { AppliedTypeTree(t, typeArgs(namedOK = true)) })
+ case _ => t
+ }
+
+ private def typeProjection(t: Tree): Tree = {
+ accept(HASH)
+ val id = typeIdent()
+ atPos(t.pos.start, id.pos.start) { Select(t, id.name) }
+ }
+
+ /** NamedTypeArg ::= id `=' Type
+ */
+ val namedTypeArg = () => {
+ val name = ident()
+ accept(EQUALS)
+ NamedArg(name.toTypeName, typ())
+ }
+
+ /** ArgTypes ::= Type {`,' Type}
+ * | NamedTypeArg {`,' NamedTypeArg}
+ */
+ def argTypes(namedOK: Boolean = false) = {
+ def otherArgs(first: Tree, arg: () => Tree): List[Tree] = {
+ val rest =
+ if (in.token == COMMA) {
+ in.nextToken()
+ commaSeparated(arg)
+ }
+ else Nil
+ first :: rest
+ }
+ if (namedOK && in.token == IDENTIFIER)
+ typ() match {
+ case Ident(name) if in.token == EQUALS =>
+ in.nextToken()
+ otherArgs(NamedArg(name, typ()), namedTypeArg)
+ case firstArg =>
+ if (in.token == EQUALS) println(s"??? $firstArg")
+ otherArgs(firstArg, typ)
+ }
+ else commaSeparated(typ)
+ }
+
+ /** FunArgType ::= Type | `=>' Type
+ */
+ val funArgType = () =>
+ if (in.token == ARROW) atPos(in.skipToken()) { ByNameTypeTree(typ()) }
+ else typ()
+
+ /** ParamType ::= [`=>'] ParamValueType
+ */
+ def paramType(): Tree =
+ if (in.token == ARROW) atPos(in.skipToken()) { ByNameTypeTree(paramValueType()) }
+ else paramValueType()
+
+ /** ParamValueType ::= Type [`*']
+ */
+ def paramValueType(): Tree = {
+ val t = toplevelTyp()
+ if (isIdent(nme.raw.STAR)) {
+ in.nextToken()
+ atPos(t.pos.start) { PostfixOp(t, nme.raw.STAR) }
+ } else t
+ }
+
+ /** TypeArgs ::= `[' Type {`,' Type} `]'
+ * NamedTypeArgs ::= `[' NamedTypeArg {`,' NamedTypeArg} `]'
+ */
+ def typeArgs(namedOK: Boolean = false): List[Tree] = inBrackets(argTypes(namedOK))
+
+ /** Refinement ::= `{' RefineStatSeq `}'
+ */
+ def refinement(): List[Tree] = inBraces(refineStatSeq())
+
+ /** TypeBounds ::= [`>:' Type] [`<:' Type]
+ */
+ def typeBounds(): TypeBoundsTree =
+ atPos(in.offset) { TypeBoundsTree(bound(SUPERTYPE), bound(SUBTYPE)) }
+
+ private def bound(tok: Int): Tree =
+ if (in.token == tok) { in.nextToken(); toplevelTyp() }
+ else EmptyTree
+
+ /** TypeParamBounds ::= TypeBounds {`<%' Type} {`:' Type}
+ */
+ def typeParamBounds(pname: TypeName): Tree = {
+ val t = typeBounds()
+ val cbs = contextBounds(pname)
+ if (cbs.isEmpty) t
+ else atPos((t.pos union cbs.head.pos).start) { ContextBounds(t, cbs) }
+ }
+
+ def contextBounds(pname: TypeName): List[Tree] = in.token match {
+ case COLON =>
+ atPos(in.skipToken) {
+ AppliedTypeTree(toplevelTyp(), Ident(pname))
+ } :: contextBounds(pname)
+ case VIEWBOUND =>
+ deprecationWarning("view bounds `<%' are deprecated, use a context bound `:' instead")
+ atPos(in.skipToken) {
+ Function(Ident(pname) :: Nil, toplevelTyp())
+ } :: contextBounds(pname)
+ case _ =>
+ Nil
+ }
+
+ def typedOpt(): Tree =
+ if (in.token == COLON) { in.nextToken(); toplevelTyp() }
+ else TypeTree()
+
+ def typeDependingOn(location: Location.Value): Tree =
+ if (location == Location.InParens) typ()
+ else if (location == Location.InPattern) refinedType()
+ else infixType()
+
+ /** Checks whether `t` is a wildcard type.
+ * If it is, returns the [[Position]] where the wildcard occurs.
+ */
+ @tailrec
+ private final def findWildcardType(t: Tree): Option[Position] = t match {
+ case TypeBoundsTree(_, _) => Some(t.pos)
+ case Parens(t1) => findWildcardType(t1)
+ case Annotated(t1, _) => findWildcardType(t1)
+ case _ => None
+ }
+
+/* ----------- EXPRESSIONS ------------------------------------------------ */
+
+ /** EqualsExpr ::= `=' Expr
+ */
+ def equalsExpr(): Tree = {
+ accept(EQUALS)
+ expr()
+ }
+
+ def condExpr(altToken: Token): Tree = {
+ if (in.token == LPAREN) {
+ val t = atPos(in.offset) { Parens(inParens(exprInParens())) }
+ if (in.token == altToken) in.nextToken()
+ t
+ } else {
+ val t = expr()
+ accept(altToken)
+ t
+ }
+ }
+
+ /** Expr ::= FunParams `=>' Expr
+ * | Expr1
+ * FunParams ::= Bindings
+ * | [`implicit'] Id
+ * | `_'
+ * ExprInParens ::= PostfixExpr `:' Type
+ * | Expr
+ * BlockResult ::= (FunParams | [`implicit'] Id `:' InfixType) => Block
+ * | Expr1
+ * Expr1 ::= `if' `(' Expr `)' {nl} Expr [[semi] else Expr]
+ * | `if' Expr `then' Expr [[semi] else Expr]
+ * | `while' `(' Expr `)' {nl} Expr
+ * | `while' Expr `do' Expr
+ * | `do' Expr [semi] `while' Expr
+ * | `try' Expr Catches [`finally' Expr]
+ * | `try' Expr [`finally' Expr]
+ * | `throw' Expr
+ * | `return' [Expr]
+ * | ForExpr
+ * | [SimpleExpr `.'] Id `=' Expr
+ * | SimpleExpr1 ArgumentExprs `=' Expr
+ * | PostfixExpr [Ascription]
+ * | PostfixExpr `match' `{' CaseClauses `}'
+ * Bindings ::= `(' [Binding {`,' Binding}] `)'
+ * Binding ::= (Id | `_') [`:' Type]
+ * Ascription ::= `:' CompoundType
+ * | `:' Annotation {Annotation}
+ * | `:' `_' `*'
+ */
+ val exprInParens = () => expr(Location.InParens)
+
+ def expr(): Tree = expr(Location.ElseWhere)
+
+ def expr(location: Location.Value): Tree = {
+ val saved = placeholderParams
+ placeholderParams = Nil
+ val t = expr1(location)
+ if (in.token == ARROW) {
+ placeholderParams = saved
+ closureRest(t.pos.start, location, convertToParams(t))
+ }
+ else if (isWildcard(t)) {
+ placeholderParams = placeholderParams ::: saved
+ t
+ }
+ else
+ try
+ if (placeholderParams.isEmpty) t
+ else new WildcardFunction(placeholderParams.reverse, t)
+ finally placeholderParams = saved
+ }
+
+ def expr1(location: Location.Value = Location.ElseWhere): Tree = in.token match {
+ case IF =>
+ atPos(in.skipToken()) {
+ val cond = condExpr(THEN)
+ newLinesOpt()
+ val thenp = expr()
+ val elsep = if (in.token == ELSE) { in.nextToken(); expr() }
+ else EmptyTree
+ If(cond, thenp, elsep)
+ }
+ case WHILE =>
+ atPos(in.skipToken()) {
+ val cond = condExpr(DO)
+ newLinesOpt()
+ val body = expr()
+ WhileDo(cond, body)
+ }
+ case DO =>
+ atPos(in.skipToken()) {
+ val body = expr()
+ if (isStatSep) in.nextToken()
+ accept(WHILE)
+ val cond = expr()
+ DoWhile(body, cond)
+ }
+ case TRY =>
+ val tryOffset = in.offset
+ atPos(in.skipToken()) {
+ val body = expr()
+ val (handler, handlerStart) =
+ if (in.token == CATCH) {
+ val pos = in.offset
+ in.nextToken()
+ (expr(), pos)
+ } else (EmptyTree, -1)
+
+ handler match {
+ case Block(Nil, EmptyTree) =>
+ assert(handlerStart != -1)
+ syntaxError(
+ new EmptyCatchBlock(body),
+ Position(handlerStart, handler.pos.end)
+ )
+ case _ =>
+ }
+
+ val finalizer =
+ if (in.token == FINALLY) { accept(FINALLY); expr() }
+ else {
+ if (handler.isEmpty) warning(
+ EmptyCatchAndFinallyBlock(body),
+ source atPos Position(tryOffset, body.pos.end)
+ )
+ EmptyTree
+ }
+ ParsedTry(body, handler, finalizer)
+ }
+ case THROW =>
+ atPos(in.skipToken()) { Throw(expr()) }
+ case RETURN =>
+ atPos(in.skipToken()) { Return(if (isExprIntro) expr() else EmptyTree, EmptyTree) }
+ case FOR =>
+ forExpr()
+ case IMPLICIT =>
+ implicitClosure(in.skipToken(), location)
+ case _ =>
+ expr1Rest(postfixExpr(), location)
+ }
+
+ def expr1Rest(t: Tree, location: Location.Value) = in.token match {
+ case EQUALS =>
+ t match {
+ case Ident(_) | Select(_, _) | Apply(_, _) =>
+ atPos(t.pos.start, in.skipToken()) { Assign(t, expr()) }
+ case _ =>
+ t
+ }
+ case COLON =>
+ ascription(t, location)
+ case MATCH =>
+ atPos(t.pos.start, in.skipToken()) {
+ inBraces(Match(t, caseClauses()))
+ }
+ case _ =>
+ t
+ }
+
+ def ascription(t: Tree, location: Location.Value) = atPos(t.pos.start, in.skipToken()) {
+ in.token match {
+ case USCORE =>
+ val uscoreStart = in.skipToken()
+ if (isIdent(nme.raw.STAR)) {
+ in.nextToken()
+ if (in.token != RPAREN) syntaxError(SeqWildcardPatternPos(), uscoreStart)
+ Typed(t, atPos(uscoreStart) { Ident(tpnme.WILDCARD_STAR) })
+ } else {
+ syntaxErrorOrIncomplete(IncorrectRepeatedParameterSyntax())
+ t
+ }
+ case AT if location != Location.InPattern =>
+ (t /: annotations())(Annotated)
+ case _ =>
+ val tpt = typeDependingOn(location)
+ if (isWildcard(t) && location != Location.InPattern) {
+ val vd :: rest = placeholderParams
+ placeholderParams =
+ cpy.ValDef(vd)(tpt = tpt).withPos(vd.pos union tpt.pos) :: rest
+ }
+ Typed(t, tpt)
+ }
+ }
+
+ /** Expr ::= implicit Id `=>' Expr
+ * BlockResult ::= implicit Id [`:' InfixType] `=>' Block
+ */
+ def implicitClosure(start: Int, location: Location.Value, implicitMod: Option[Mod] = None): Tree = {
+ var mods = atPos(start) { Modifiers(Implicit) }
+ if (implicitMod.nonEmpty) mods = mods.withAddedMod(implicitMod.get)
+ val id = termIdent()
+ val paramExpr =
+ if (location == Location.InBlock && in.token == COLON)
+ atPos(id.pos.start, in.skipToken()) { Typed(id, infixType()) }
+ else
+ id
+ closureRest(start, location, convertToParam(paramExpr, mods) :: Nil)
+ }
+
+ def closureRest(start: Int, location: Location.Value, params: List[Tree]): Tree =
+ atPos(start, in.offset) {
+ accept(ARROW)
+ Function(params, if (location == Location.InBlock) block() else expr())
+ }
+
+ /** PostfixExpr ::= InfixExpr [Id [nl]]
+ * InfixExpr ::= PrefixExpr
+ * | InfixExpr Id [nl] InfixExpr
+ */
+ def postfixExpr(): Tree =
+ infixOps(prefixExpr(), canStartExpressionTokens, prefixExpr, maybePostfix = true)
+
+ /** PrefixExpr ::= [`-' | `+' | `~' | `!'] SimpleExpr
+ */
+ val prefixExpr = () =>
+ if (isIdent && nme.raw.isUnary(in.name)) {
+ val start = in.offset
+ val name = ident()
+ if (name == nme.raw.MINUS && isNumericLit)
+ simpleExprRest(literal(start), canApply = true)
+ else
+ atPos(start) { PrefixOp(name, simpleExpr()) }
+ }
+ else simpleExpr()
+
+ /** SimpleExpr ::= new Template
+ * | BlockExpr
+ * | SimpleExpr1 [`_']
+ * SimpleExpr1 ::= literal
+ * | xmlLiteral
+ * | Path
+ * | `(' [ExprsInParens] `)'
+ * | SimpleExpr `.' Id
+ * | SimpleExpr (TypeArgs | NamedTypeArgs)
+ * | SimpleExpr1 ArgumentExprs
+ */
+ def simpleExpr(): Tree = {
+ var canApply = true
+ val t = in.token match {
+ case XMLSTART =>
+ xmlLiteral()
+ case IDENTIFIER | BACKQUOTED_IDENT | THIS | SUPER =>
+ path(thisOK = true)
+ case USCORE =>
+ val start = in.skipToken()
+ val pname = ctx.freshName(nme.USCORE_PARAM_PREFIX).toTermName
+ val param = ValDef(pname, TypeTree(), EmptyTree).withFlags(SyntheticTermParam)
+ .withPos(Position(start))
+ placeholderParams = param :: placeholderParams
+ atPos(start) { Ident(pname) }
+ case LPAREN =>
+ atPos(in.offset) { makeTupleOrParens(inParens(exprsInParensOpt())) }
+ case LBRACE =>
+ canApply = false
+ blockExpr()
+ case NEW =>
+ canApply = false
+ val start = in.skipToken()
+ val (impl, missingBody) = template(emptyConstructor)
+ impl.parents match {
+ case parent :: Nil if missingBody =>
+ if (parent.isType) ensureApplied(wrapNew(parent)) else parent
+ case _ =>
+ New(impl.withPos(Position(start, in.lastOffset)))
+ }
+ case _ =>
+ if (isLiteral) literal()
+ else {
+ syntaxErrorOrIncomplete(IllegalStartSimpleExpr(tokenString(in.token)))
+ errorTermTree
+ }
+ }
+ simpleExprRest(t, canApply)
+ }
+
+ def simpleExprRest(t: Tree, canApply: Boolean = true): Tree = {
+ if (canApply) newLineOptWhenFollowedBy(LBRACE)
+ in.token match {
+ case DOT =>
+ in.nextToken()
+ simpleExprRest(selector(t), canApply = true)
+ case LBRACKET =>
+ val tapp = atPos(t.pos.start, in.offset) { TypeApply(t, typeArgs(namedOK = true)) }
+ simpleExprRest(tapp, canApply = true)
+ case LPAREN | LBRACE if canApply =>
+ val app = atPos(t.pos.start, in.offset) { Apply(t, argumentExprs()) }
+ simpleExprRest(app, canApply = true)
+ case USCORE =>
+ atPos(t.pos.start, in.skipToken()) { PostfixOp(t, nme.WILDCARD) }
+ case _ =>
+ t
+ }
+ }
+
+ /** ExprsInParens ::= ExprInParens {`,' ExprInParens}
+ */
+ def exprsInParensOpt(): List[Tree] =
+ if (in.token == RPAREN) Nil else commaSeparated(exprInParens)
+
+ /** ParArgumentExprs ::= `(' [ExprsInParens] `)'
+ * | `(' [ExprsInParens `,'] PostfixExpr `:' `_' `*' ')' \
+ */
+ def parArgumentExprs(): List[Tree] =
+ inParens(if (in.token == RPAREN) Nil else commaSeparated(argumentExpr))
+
+ /** ArgumentExprs ::= ParArgumentExprs
+ * | [nl] BlockExpr
+ */
+ def argumentExprs(): List[Tree] =
+ if (in.token == LBRACE) blockExpr() :: Nil else parArgumentExprs()
+
+ val argumentExpr = () => exprInParens() match {
+ case a @ Assign(Ident(id), rhs) => cpy.NamedArg(a)(id, rhs)
+ case e => e
+ }
+
+ /** ArgumentExprss ::= {ArgumentExprs}
+ */
+ def argumentExprss(fn: Tree): Tree = {
+ newLineOptWhenFollowedBy(LBRACE)
+ if (in.token == LPAREN || in.token == LBRACE) argumentExprss(Apply(fn, argumentExprs()))
+ else fn
+ }
+
+ /** ParArgumentExprss ::= {ParArgumentExprs}
+ */
+ def parArgumentExprss(fn: Tree): Tree =
+ if (in.token == LPAREN) parArgumentExprss(Apply(fn, parArgumentExprs()))
+ else fn
+
+ /** BlockExpr ::= `{' (CaseClauses | Block) `}'
+ */
+ def blockExpr(): Tree = atPos(in.offset) {
+ inDefScopeBraces {
+ if (in.token == CASE) Match(EmptyTree, caseClauses())
+ else block()
+ }
+ }
+
+ /** Block ::= BlockStatSeq
+ * @note Return tree does not carry source position.
+ */
+ def block(): Tree = {
+ val stats = blockStatSeq()
+ def isExpr(stat: Tree) = !(stat.isDef || stat.isInstanceOf[Import])
+ if (stats.nonEmpty && isExpr(stats.last)) Block(stats.init, stats.last)
+ else Block(stats, EmptyTree)
+ }
+
+ /** Guard ::= if PostfixExpr
+ */
+ def guard(): Tree =
+ if (in.token == IF) { in.nextToken(); postfixExpr() }
+ else EmptyTree
+
+ /** Enumerators ::= Generator {semi Enumerator | Guard}
+ */
+ def enumerators(): List[Tree] = generator() :: enumeratorsRest()
+
+ def enumeratorsRest(): List[Tree] =
+ if (isStatSep) { in.nextToken(); enumerator() :: enumeratorsRest() }
+ else if (in.token == IF) guard() :: enumeratorsRest()
+ else Nil
+
+ /** Enumerator ::= Generator
+ * | Guard
+ * | Pattern1 `=' Expr
+ */
+ def enumerator(): Tree =
+ if (in.token == IF) guard()
+ else {
+ val pat = pattern1()
+ if (in.token == EQUALS) atPos(pat.pos.start, in.skipToken()) { GenAlias(pat, expr()) }
+ else generatorRest(pat)
+ }
+
+ /** Generator ::= Pattern `<-' Expr
+ */
+ def generator(): Tree = generatorRest(pattern1())
+
+ def generatorRest(pat: Tree) =
+ atPos(pat.pos.start, accept(LARROW)) { GenFrom(pat, expr()) }
+
+ /** ForExpr ::= `for' (`(' Enumerators `)' | `{' Enumerators `}')
+ * {nl} [`yield'] Expr
+ * | `for' Enumerators (`do' Expr | `yield' Expr)
+ */
+ def forExpr(): Tree = atPos(in.skipToken()) {
+ var wrappedEnums = true
+ val enums =
+ if (in.token == LBRACE) inBraces(enumerators())
+ else if (in.token == LPAREN) {
+ val lparenOffset = in.skipToken()
+ openParens.change(LPAREN, 1)
+ val pats = patternsOpt()
+ val pat =
+ if (in.token == RPAREN || pats.length > 1) {
+ wrappedEnums = false
+ accept(RPAREN)
+ openParens.change(LPAREN, -1)
+ atPos(lparenOffset) { makeTupleOrParens(pats) } // note: alternatives `|' need to be weeded out by typer.
+ }
+ else pats.head
+ val res = generatorRest(pat) :: enumeratorsRest()
+ if (wrappedEnums) {
+ accept(RPAREN)
+ openParens.change(LPAREN, -1)
+ }
+ res
+ } else {
+ wrappedEnums = false
+ enumerators()
+ }
+ newLinesOpt()
+ if (in.token == YIELD) { in.nextToken(); ForYield(enums, expr()) }
+ else if (in.token == DO) { in.nextToken(); ForDo(enums, expr()) }
+ else {
+ if (!wrappedEnums) syntaxErrorOrIncomplete(YieldOrDoExpectedInForComprehension())
+ ForDo(enums, expr())
+ }
+ }
+
+ /** CaseClauses ::= CaseClause {CaseClause}
+ */
+ def caseClauses(): List[CaseDef] = {
+ val buf = new ListBuffer[CaseDef]
+ buf += caseClause()
+ while (in.token == CASE) buf += caseClause()
+ buf.toList
+ }
+
+ /** CaseClause ::= case Pattern [Guard] `=>' Block
+ */
+ def caseClause(): CaseDef = atPos(in.offset) {
+ accept(CASE)
+ CaseDef(pattern(), guard(), atPos(accept(ARROW)) { block() })
+ }
+
+ /* -------- PATTERNS ------------------------------------------- */
+
+ /** Pattern ::= Pattern1 { `|' Pattern1 }
+ */
+ val pattern = () => {
+ val pat = pattern1()
+ if (isIdent(nme.raw.BAR))
+ atPos(pat.pos.start) { Alternative(pat :: patternAlts()) }
+ else pat
+ }
+
+ def patternAlts(): List[Tree] =
+ if (isIdent(nme.raw.BAR)) { in.nextToken(); pattern1() :: patternAlts() }
+ else Nil
+
+ /** Pattern1 ::= PatVar Ascription
+ * | Pattern2
+ */
+ def pattern1(): Tree = {
+ val p = pattern2()
+ if (isVarPattern(p) && in.token == COLON) ascription(p, Location.InPattern)
+ else p
+ }
+
+ /** Pattern2 ::= [varid `@'] InfixPattern
+ */
+ val pattern2 = () => infixPattern() match {
+ case p @ Ident(name) if isVarPattern(p) && in.token == AT =>
+ val offset = in.skipToken()
+
+ // compatibility for Scala2 `x @ _*` syntax
+ infixPattern() match {
+ case pt @ Ident(tpnme.WILDCARD_STAR) =>
+ migrationWarningOrError("The syntax `x @ _*' is no longer supported; use `x : _*' instead", p.pos.start)
+ atPos(p.pos.start, offset) { Typed(p, pt) }
+ case p =>
+ atPos(p.pos.start, offset) { Bind(name, p) }
+ }
+ case p @ Ident(tpnme.WILDCARD_STAR) =>
+ // compatibility for Scala2 `_*` syntax
+ migrationWarningOrError("The syntax `_*' is no longer supported; use `x : _*' instead", p.pos.start)
+ atPos(p.pos.start) { Typed(Ident(nme.WILDCARD), p) }
+ case p =>
+ p
+ }
+
+ /** InfixPattern ::= SimplePattern {Id [nl] SimplePattern}
+ */
+ def infixPattern(): Tree =
+ infixOps(simplePattern(), canStartExpressionTokens, simplePattern, notAnOperator = nme.raw.BAR)
+
+ /** SimplePattern ::= PatVar
+ * | Literal
+ * | XmlPattern
+ * | `(' [Patterns] `)'
+ * | SimplePattern1 [TypeArgs] [ArgumentPatterns]
+ * SimplePattern1 ::= Path
+ * | `{' Block `}'
+ * | SimplePattern1 `.' Id
+ * PatVar ::= Id
+ * | `_'
+ */
+ val simplePattern = () => in.token match {
+ case IDENTIFIER | BACKQUOTED_IDENT | THIS =>
+ path(thisOK = true) match {
+ case id @ Ident(nme.raw.MINUS) if isNumericLit => literal(id.pos.start)
+ case t => simplePatternRest(t)
+ }
+ case USCORE =>
+ val wildIndent = wildcardIdent()
+
+ // compatibility for Scala2 `x @ _*` and `_*` syntax
+ // `x: _*' is parsed in `ascription'
+ if (isIdent(nme.raw.STAR)) {
+ in.nextToken()
+ if (in.token != RPAREN) syntaxError(SeqWildcardPatternPos(), wildIndent.pos)
+ atPos(wildIndent.pos) { Ident(tpnme.WILDCARD_STAR) }
+ } else wildIndent
+ case LPAREN =>
+ atPos(in.offset) { makeTupleOrParens(inParens(patternsOpt())) }
+ case LBRACE =>
+ dotSelectors(blockExpr())
+ case XMLSTART =>
+ xmlLiteralPattern()
+ case _ =>
+ if (isLiteral) literal()
+ else {
+ syntaxErrorOrIncomplete(IllegalStartOfSimplePattern())
+ errorTermTree
+ }
+ }
+
+ def simplePatternRest(t: Tree): Tree = {
+ var p = t
+ if (in.token == LBRACKET)
+ p = atPos(t.pos.start, in.offset) { TypeApply(p, typeArgs()) }
+ if (in.token == LPAREN)
+ p = atPos(t.pos.start, in.offset) { Apply(p, argumentPatterns()) }
+ p
+ }
+
+ /** Patterns ::= Pattern [`,' Pattern]
+ */
+ def patterns() = commaSeparated(pattern)
+
+ def patternsOpt(): List[Tree] =
+ if (in.token == RPAREN) Nil else patterns()
+
+
+ /** ArgumentPatterns ::= `(' [Patterns] `)'
+ * | `(' [Patterns `,'] Pattern2 `:' `_' `*' ')
+ */
+ def argumentPatterns(): List[Tree] =
+ inParens(patternsOpt)
+
+/* -------- MODIFIERS and ANNOTATIONS ------------------------------------------- */
+
+ private def modOfToken(tok: Int): Mod = tok match {
+ case ABSTRACT => Mod.Abstract()
+ case FINAL => Mod.Final()
+ case IMPLICIT => Mod.Implicit(ImplicitCommon)
+ case INLINE => Mod.Inline()
+ case LAZY => Mod.Lazy()
+ case OVERRIDE => Mod.Override()
+ case PRIVATE => Mod.Private()
+ case PROTECTED => Mod.Protected()
+ case SEALED => Mod.Sealed()
+ }
+
+ /** Drop `private' modifier when followed by a qualifier.
+ * Contract `abstract' and `override' to ABSOVERRIDE
+ */
+ private def normalize(mods: Modifiers): Modifiers =
+ if ((mods is Private) && mods.hasPrivateWithin)
+ normalize(mods &~ Private)
+ else if (mods is AbstractAndOverride)
+ normalize(addFlag(mods &~ (Abstract | Override), AbsOverride))
+ else
+ mods
+
+ private def addModifier(mods: Modifiers): Modifiers = {
+ val tok = in.token
+ val mod = atPos(in.skipToken()) { modOfToken(tok) }
+
+ if (mods is mod.flags) syntaxError(RepeatedModifier(mod.flags.toString))
+ addMod(mods, mod)
+ }
+
+ private def compatible(flags1: FlagSet, flags2: FlagSet): Boolean = (
+ flags1.isEmpty
+ || flags2.isEmpty
+ || flags1.isTermFlags && flags2.isTermFlags
+ || flags1.isTypeFlags && flags2.isTypeFlags
+ )
+
+ def addFlag(mods: Modifiers, flag: FlagSet): Modifiers = {
+ def incompatible(kind: String) = {
+ syntaxError(s"modifier(s) `${mods.flags}' not allowed for $kind")
+ Modifiers(flag)
+ }
+ if (compatible(mods.flags, flag)) mods | flag
+ else flag match {
+ case Trait => incompatible("trait")
+ case Method => incompatible("method")
+ case Mutable => incompatible("variable")
+ case _ =>
+ syntaxError(s"illegal modifier combination: ${mods.flags} and $flag")
+ mods
+ }
+ }
+
+ /** Always add the syntactic `mod`, but check and conditionally add semantic `mod.flags`
+ */
+ def addMod(mods: Modifiers, mod: Mod): Modifiers =
+ addFlag(mods, mod.flags).withAddedMod(mod)
+
+ /** AccessQualifier ::= "[" (Id | this) "]"
+ */
+ def accessQualifierOpt(mods: Modifiers): Modifiers =
+ if (in.token == LBRACKET) {
+ if ((mods is Local) || mods.hasPrivateWithin)
+ syntaxError("duplicate private/protected qualifier")
+ inBrackets {
+ if (in.token == THIS) { in.nextToken(); mods | Local }
+ else mods.withPrivateWithin(ident().toTypeName)
+ }
+ } else mods
+
+ /** {Annotation} {Modifier}
+ * Modifiers ::= {Modifier}
+ * LocalModifiers ::= {LocalModifier}
+ * AccessModifier ::= (private | protected) [AccessQualifier]
+ * Modifier ::= LocalModifier
+ * | AccessModifier
+ * | override
+ * LocalModifier ::= abstract | final | sealed | implicit | lazy
+ */
+ def modifiers(allowed: BitSet = modifierTokens, start: Modifiers = Modifiers()): Modifiers = {
+ def loop(mods: Modifiers): Modifiers = {
+ if (allowed contains in.token) {
+ val isAccessMod = accessModifierTokens contains in.token
+ val mods1 = addModifier(mods)
+ loop(if (isAccessMod) accessQualifierOpt(mods1) else mods1)
+ } else if (in.token == NEWLINE && (mods.hasFlags || mods.hasAnnotations)) {
+ in.nextToken()
+ loop(mods)
+ } else {
+ mods
+ }
+ }
+ normalize(loop(start))
+ }
+
+ /** Wrap annotation or constructor in New(...).<init> */
+ def wrapNew(tpt: Tree) = Select(New(tpt), nme.CONSTRUCTOR)
+
+ /** Adjust start of annotation or constructor to position of preceding @ or new */
+ def adjustStart(start: Offset)(tree: Tree): Tree = {
+ val tree1 = tree match {
+ case Apply(fn, args) => cpy.Apply(tree)(adjustStart(start)(fn), args)
+ case Select(qual, name) => cpy.Select(tree)(adjustStart(start)(qual), name)
+ case _ => tree
+ }
+ if (start < tree1.pos.start) tree1.withPos(tree1.pos.withStart(start))
+ else tree1
+ }
+
+ /** Annotation ::= `@' SimpleType {ParArgumentExprs}
+ */
+ def annot() =
+ adjustStart(accept(AT)) {
+ if (in.token == INLINE) in.token = BACKQUOTED_IDENT // allow for now
+ ensureApplied(parArgumentExprss(wrapNew(simpleType())))
+ }
+
+ def annotations(skipNewLines: Boolean = false): List[Tree] = {
+ if (skipNewLines) newLineOptWhenFollowedBy(AT)
+ if (in.token == AT) annot() :: annotations(skipNewLines)
+ else Nil
+ }
+
+ def annotsAsMods(skipNewLines: Boolean = false): Modifiers =
+ Modifiers() withAnnotations annotations(skipNewLines)
+
+ def defAnnotsMods(allowed: BitSet): Modifiers =
+ modifiers(allowed, annotsAsMods(skipNewLines = true))
+
+ /* -------- PARAMETERS ------------------------------------------- */
+
+ /** ClsTypeParamClause::= `[' ClsTypeParam {`,' ClsTypeParam} `]'
+ * ClsTypeParam ::= {Annotation} [{Modifier} type] [`+' | `-']
+ * Id [HkTypeParamClause] TypeParamBounds
+ *
+ * DefTypeParamClause::= `[' DefTypeParam {`,' DefTypeParam} `]'
+ * DefTypeParam ::= {Annotation} Id [HkTypeParamClause] TypeParamBounds
+ *
+ * TypTypeParamCaluse::= `[' TypTypeParam {`,' TypTypeParam} `]'
+ * TypTypeParam ::= {Annotation} Id [HkTypePamClause] TypeBounds
+ *
+ * HkTypeParamClause ::= `[' HkTypeParam {`,' HkTypeParam} `]'
+ * HkTypeParam ::= {Annotation} ['+' | `-'] (Id[HkTypePamClause] | _') TypeBounds
+ */
+ def typeParamClause(ownerKind: ParamOwner.Value): List[TypeDef] = inBrackets {
+ def typeParam(): TypeDef = {
+ val isConcreteOwner = ownerKind == ParamOwner.Class || ownerKind == ParamOwner.Def
+ val start = in.offset
+ var mods = annotsAsMods()
+ if (ownerKind == ParamOwner.Class) {
+ mods = modifiers(start = mods)
+ mods =
+ atPos(start, in.offset) {
+ if (in.token == TYPE) {
+ val mod = atPos(in.skipToken()) { Mod.Type() }
+ (mods | Param | ParamAccessor).withAddedMod(mod)
+ } else {
+ if (mods.hasFlags) syntaxError(TypeParamsTypeExpected(mods, ident()))
+ mods | Param | PrivateLocal
+ }
+ }
+ }
+ else mods = atPos(start) (mods | Param)
+ if (ownerKind != ParamOwner.Def) {
+ if (isIdent(nme.raw.PLUS)) mods |= Covariant
+ else if (isIdent(nme.raw.MINUS)) mods |= Contravariant
+ if (mods is VarianceFlags) in.nextToken()
+ }
+ atPos(start, nameStart) {
+ val name =
+ if (isConcreteOwner || in.token != USCORE) ident().toTypeName
+ else {
+ in.nextToken()
+ ctx.freshName(nme.USCORE_PARAM_PREFIX).toTypeName
+ }
+ val hkparams = typeParamClauseOpt(ParamOwner.TypeParam)
+ val bounds =
+ if (isConcreteOwner) typeParamBounds(name)
+ else typeBounds()
+ TypeDef(name, lambdaAbstract(hkparams, bounds)).withMods(mods)
+ }
+ }
+ commaSeparated(typeParam)
+ }
+
+ def typeParamClauseOpt(ownerKind: ParamOwner.Value): List[TypeDef] =
+ if (in.token == LBRACKET) typeParamClause(ownerKind) else Nil
+
+ /** ClsParamClauses ::= {ClsParamClause} [[nl] `(' `implicit' ClsParams `)']
+ * ClsParamClause ::= [nl] `(' [ClsParams] ')'
+ * ClsParams ::= ClsParam {`' ClsParam}
+ * ClsParam ::= {Annotation} [{Modifier} (`val' | `var') | `inline'] Param
+ * DefParamClauses ::= {DefParamClause} [[nl] `(' `implicit' DefParams `)']
+ * DefParamClause ::= [nl] `(' [DefParams] ')'
+ * DefParams ::= DefParam {`,' DefParam}
+ * DefParam ::= {Annotation} [`inline'] Param
+ * Param ::= id `:' ParamType [`=' Expr]
+ */
+ def paramClauses(owner: Name, ofCaseClass: Boolean = false): List[List[ValDef]] = {
+ var implicitMod: Mod = null
+ var firstClauseOfCaseClass = ofCaseClass
+ var implicitOffset = -1 // use once
+ def param(): ValDef = {
+ val start = in.offset
+ var mods = annotsAsMods()
+ if (owner.isTypeName) {
+ mods = modifiers(start = mods) | ParamAccessor
+ mods =
+ atPos(start, in.offset) {
+ if (in.token == VAL) {
+ val mod = atPos(in.skipToken()) { Mod.Val() }
+ mods.withAddedMod(mod)
+ } else if (in.token == VAR) {
+ val mod = atPos(in.skipToken()) { Mod.Var() }
+ addMod(mods, mod)
+ } else {
+ if (!(mods.flags &~ (ParamAccessor | Inline)).isEmpty)
+ syntaxError("`val' or `var' expected")
+ if (firstClauseOfCaseClass) mods else mods | PrivateLocal
+ }
+ }
+ }
+ else {
+ if (in.token == INLINE) mods = addModifier(mods)
+ mods = atPos(start) { mods | Param }
+ }
+ atPos(start, nameStart) {
+ val name = ident()
+ val tpt =
+ if (ctx.settings.YmethodInfer.value && owner.isTermName && in.token != COLON) {
+ TypeTree() // XX-METHOD-INFER
+ } else {
+ accept(COLON)
+ if (in.token == ARROW) {
+ if (owner.isTypeName && !(mods is Local))
+ syntaxError(s"${if (mods is Mutable) "`var'" else "`val'"} parameters may not be call-by-name")
+ else if (implicitMod != null)
+ syntaxError("implicit parameters may not be call-by-name")
+ }
+ paramType()
+ }
+ val default =
+ if (in.token == EQUALS) { in.nextToken(); expr() }
+ else EmptyTree
+ if (implicitOffset >= 0) {
+ mods = mods.withPos(mods.pos.union(Position(implicitOffset, implicitOffset)))
+ implicitOffset = -1
+ }
+ if (implicitMod != null) mods = addMod(mods, implicitMod)
+ ValDef(name, tpt, default).withMods(mods)
+ }
+ }
+ def paramClause(): List[ValDef] = inParens {
+ if (in.token == RPAREN) Nil
+ else {
+ if (in.token == IMPLICIT) {
+ implicitOffset = in.offset
+ implicitMod = atPos(in.skipToken()) { Mod.Implicit(Implicit) }
+ }
+ commaSeparated(param)
+ }
+ }
+ def clauses(): List[List[ValDef]] = {
+ newLineOptWhenFollowedBy(LPAREN)
+ if (in.token == LPAREN)
+ paramClause() :: {
+ firstClauseOfCaseClass = false
+ if (implicitMod == null) clauses() else Nil
+ }
+ else Nil
+ }
+ val start = in.offset
+ val result = clauses()
+ if (owner == nme.CONSTRUCTOR && (result.isEmpty || (result.head take 1 exists (_.mods is Implicit)))) {
+ in.token match {
+ case LBRACKET => syntaxError("no type parameters allowed here")
+ case EOF => incompleteInputError(AuxConstructorNeedsNonImplicitParameter())
+ case _ => syntaxError(AuxConstructorNeedsNonImplicitParameter(), start)
+ }
+ }
+ result
+ }
+
+/* -------- DEFS ------------------------------------------- */
+
+ /** Import ::= import ImportExpr {`,' ImportExpr}
+ */
+ def importClause(): List[Tree] = {
+ val offset = accept(IMPORT)
+ commaSeparated(importExpr) match {
+ case t :: rest =>
+ // The first import should start at the position of the keyword.
+ t.withPos(t.pos.withStart(offset)) :: rest
+ case nil => nil
+ }
+ }
+
+ /** ImportExpr ::= StableId `.' (Id | `_' | ImportSelectors)
+ */
+ val importExpr = () => path(thisOK = false, handleImport) match {
+ case imp: Import =>
+ imp
+ case sel @ Select(qual, name) =>
+ val selector = atPos(sel.pos.point) { Ident(name) }
+ cpy.Import(sel)(qual, selector :: Nil)
+ case t =>
+ accept(DOT)
+ Import(t, Ident(nme.WILDCARD) :: Nil)
+ }
+
+ val handleImport = { tree: Tree =>
+ if (in.token == USCORE) Import(tree, importSelector() :: Nil)
+ else if (in.token == LBRACE) Import(tree, inBraces(importSelectors()))
+ else tree
+ }
+
+ /** ImportSelectors ::= `{' {ImportSelector `,'} (ImportSelector | `_') `}'
+ */
+ def importSelectors(): List[Tree] =
+ if (in.token == RBRACE) Nil
+ else {
+ val sel = importSelector()
+ sel :: {
+ if (!isWildcardArg(sel) && in.token == COMMA) {
+ in.nextToken()
+ importSelectors()
+ }
+ else Nil
+ }
+ }
+
+ /** ImportSelector ::= Id [`=>' Id | `=>' `_']
+ */
+ def importSelector(): Tree = {
+ val from = termIdentOrWildcard()
+ if (from.name != nme.WILDCARD && in.token == ARROW)
+ atPos(from.pos.start, in.skipToken()) {
+ Thicket(from, termIdentOrWildcard())
+ }
+ else from
+ }
+
+ def posMods(start: Int, mods: Modifiers) = {
+ val mods1 = atPos(start)(mods)
+ in.nextToken()
+ mods1
+ }
+
+ /** Def ::= val PatDef
+ * | var VarDef
+ * | def DefDef
+ * | type {nl} TypeDcl
+ * | TmplDef
+ * Dcl ::= val ValDcl
+ * | var ValDcl
+ * | def DefDcl
+ * | type {nl} TypeDcl
+ */
+ def defOrDcl(start: Int, mods: Modifiers): Tree = in.token match {
+ case VAL =>
+ val mod = atPos(in.skipToken()) { Mod.Val() }
+ val mods1 = mods.withAddedMod(mod)
+ patDefOrDcl(start, mods1, in.getDocComment(start))
+ case VAR =>
+ val mod = atPos(in.skipToken()) { Mod.Var() }
+ val mod1 = addMod(mods, mod)
+ patDefOrDcl(start, mod1, in.getDocComment(start))
+ case DEF =>
+ defDefOrDcl(start, posMods(start, mods), in.getDocComment(start))
+ case TYPE =>
+ typeDefOrDcl(start, posMods(start, mods), in.getDocComment(start))
+ case _ =>
+ tmplDef(start, mods)
+ }
+
+ /** PatDef ::= Pattern2 {`,' Pattern2} [`:' Type] `=' Expr
+ * VarDef ::= PatDef | Id {`,' Id} `:' Type `=' `_'
+ * ValDcl ::= Id {`,' Id} `:' Type
+ * VarDcl ::= Id {`,' Id} `:' Type
+ */
+ def patDefOrDcl(start: Offset, mods: Modifiers, docstring: Option[Comment] = None): Tree = atPos(start, nameStart) {
+ val lhs = commaSeparated(pattern2)
+ val tpt = typedOpt()
+ val rhs =
+ if (tpt.isEmpty || in.token == EQUALS) {
+ accept(EQUALS)
+ if (in.token == USCORE && !tpt.isEmpty && (mods is Mutable) &&
+ (lhs.toList forall (_.isInstanceOf[Ident]))) {
+ wildcardIdent()
+ } else {
+ expr()
+ }
+ } else EmptyTree
+ lhs match {
+ case (id @ Ident(name: TermName)) :: Nil => {
+ ValDef(name, tpt, rhs).withMods(mods).setComment(docstring)
+ } case _ =>
+ PatDef(mods, lhs, tpt, rhs)
+ }
+ }
+
+ /** DefDef ::= DefSig (`:' Type [`=' Expr] | "=" Expr)
+ * | this ParamClause ParamClauses `=' ConstrExpr
+ * DefDcl ::= DefSig `:' Type
+ * DefSig ::= id [DefTypeParamClause] ParamClauses
+ */
+ def defDefOrDcl(start: Offset, mods: Modifiers, docstring: Option[Comment] = None): Tree = atPos(start, nameStart) {
+ def scala2ProcedureSyntax(resultTypeStr: String) = {
+ val toInsert =
+ if (in.token == LBRACE) s"$resultTypeStr ="
+ else ": Unit " // trailing space ensures that `def f()def g()` works.
+ in.testScala2Mode(s"Procedure syntax no longer supported; `$toInsert' should be inserted here") && {
+ patch(source, Position(in.lastOffset), toInsert)
+ true
+ }
+ }
+ if (in.token == THIS) {
+ in.nextToken()
+ val vparamss = paramClauses(nme.CONSTRUCTOR)
+ val rhs = {
+ if (!(in.token == LBRACE && scala2ProcedureSyntax(""))) accept(EQUALS)
+ atPos(in.offset) { constrExpr() }
+ }
+ makeConstructor(Nil, vparamss, rhs).withMods(mods)
+ } else {
+ val mods1 = addFlag(mods, Method)
+ val name = ident()
+ val tparams = typeParamClauseOpt(ParamOwner.Def)
+ val vparamss = paramClauses(name)
+ var tpt = fromWithinReturnType(typedOpt())
+ val rhs =
+ if (in.token == EQUALS) {
+ in.nextToken()
+ expr
+ }
+ else if (!tpt.isEmpty)
+ EmptyTree
+ else if (scala2ProcedureSyntax(": Unit")) {
+ tpt = scalaUnit
+ if (in.token == LBRACE) expr()
+ else EmptyTree
+ }
+ else {
+ if (!isExprIntro) syntaxError(MissingReturnType(), in.lastOffset)
+ accept(EQUALS)
+ expr()
+ }
+ DefDef(name, tparams, vparamss, tpt, rhs).withMods(mods1).setComment(docstring)
+ }
+ }
+
+ /** ConstrExpr ::= SelfInvocation
+ * | ConstrBlock
+ */
+ def constrExpr(): Tree =
+ if (in.token == LBRACE) constrBlock()
+ else Block(selfInvocation() :: Nil, Literal(Constant(())))
+
+ /** SelfInvocation ::= this ArgumentExprs {ArgumentExprs}
+ */
+ def selfInvocation(): Tree =
+ atPos(accept(THIS)) {
+ newLineOptWhenFollowedBy(LBRACE)
+ argumentExprss(Apply(Ident(nme.CONSTRUCTOR), argumentExprs()))
+ }
+
+ /** ConstrBlock ::= `{' SelfInvocation {semi BlockStat} `}'
+ */
+ def constrBlock(): Tree =
+ atPos(in.skipToken()) {
+ val stats = selfInvocation() :: {
+ if (isStatSep) { in.nextToken(); blockStatSeq() }
+ else Nil
+ }
+ accept(RBRACE)
+ Block(stats, Literal(Constant(())))
+ }
+
+ /** TypeDef ::= type Id [TypeParamClause] `=' Type
+ * TypeDcl ::= type Id [TypeParamClause] TypeBounds
+ */
+ def typeDefOrDcl(start: Offset, mods: Modifiers, docstring: Option[Comment] = None): Tree = {
+ newLinesOpt()
+ atPos(start, nameStart) {
+ val name = ident().toTypeName
+ val tparams = typeParamClauseOpt(ParamOwner.Type)
+ in.token match {
+ case EQUALS =>
+ in.nextToken()
+ TypeDef(name, lambdaAbstract(tparams, typ())).withMods(mods).setComment(docstring)
+ case SUPERTYPE | SUBTYPE | SEMI | NEWLINE | NEWLINES | COMMA | RBRACE | EOF =>
+ TypeDef(name, lambdaAbstract(tparams, typeBounds())).withMods(mods).setComment(docstring)
+ case _ =>
+ syntaxErrorOrIncomplete("`=', `>:', or `<:' expected")
+ EmptyTree
+ }
+ }
+ }
+
+ /** TmplDef ::= ([`case'] `class' | `trait') ClassDef
+ * | [`case'] `object' ObjectDef
+ */
+ def tmplDef(start: Int, mods: Modifiers): Tree = {
+ val docstring = in.getDocComment(start)
+ in.token match {
+ case TRAIT =>
+ classDef(start, posMods(start, addFlag(mods, Trait)), docstring)
+ case CLASS =>
+ classDef(start, posMods(start, mods), docstring)
+ case CASECLASS =>
+ classDef(start, posMods(start, mods | Case), docstring)
+ case OBJECT =>
+ objectDef(start, posMods(start, mods | Module), docstring)
+ case CASEOBJECT =>
+ objectDef(start, posMods(start, mods | Case | Module), docstring)
+ case _ =>
+ syntaxErrorOrIncomplete("expected start of definition")
+ EmptyTree
+ }
+ }
+
+ /** ClassDef ::= Id [ClsTypeParamClause]
+ * [ConstrMods] ClsParamClauses TemplateOpt
+ */
+ def classDef(start: Offset, mods: Modifiers, docstring: Option[Comment]): TypeDef = atPos(start, nameStart) {
+ val name = ident().toTypeName
+ val constr = atPos(in.lastOffset) {
+ val tparams = typeParamClauseOpt(ParamOwner.Class)
+ val cmods = constrModsOpt()
+ val vparamss = paramClauses(name, mods is Case)
+
+ makeConstructor(tparams, vparamss).withMods(cmods)
+ }
+ val templ = templateOpt(constr)
+
+ TypeDef(name, templ).withMods(mods).setComment(docstring)
+ }
+
+ /** ConstrMods ::= AccessModifier
+ * | Annotation {Annotation} (AccessModifier | `this')
+ */
+ def constrModsOpt(): Modifiers = {
+ val mods = modifiers(accessModifierTokens, annotsAsMods())
+ if (mods.hasAnnotations && !mods.hasFlags)
+ if (in.token == THIS) in.nextToken()
+ else syntaxError("`private', `protected', or `this' expected")
+ mods
+ }
+
+ /** ObjectDef ::= Id TemplateOpt
+ */
+ def objectDef(start: Offset, mods: Modifiers, docstring: Option[Comment] = None): ModuleDef = atPos(start, nameStart) {
+ val name = ident()
+ val template = templateOpt(emptyConstructor)
+
+ ModuleDef(name, template).withMods(mods).setComment(docstring)
+ }
+
+/* -------- TEMPLATES ------------------------------------------- */
+
+ /** ConstrApp ::= SimpleType {ParArgumentExprs}
+ */
+ val constrApp = () => {
+ val t = annotType()
+ if (in.token == LPAREN) parArgumentExprss(wrapNew(t))
+ else t
+ }
+
+ /** Template ::= ConstrApps [TemplateBody] | TemplateBody
+ * ConstrApps ::= ConstrApp {`with' ConstrApp}
+ *
+ * @return a pair consisting of the template, and a boolean which indicates
+ * whether the template misses a body (i.e. no {...} part).
+ */
+ def template(constr: DefDef): (Template, Boolean) = {
+ newLineOptWhenFollowedBy(LBRACE)
+ if (in.token == LBRACE) (templateBodyOpt(constr, Nil), false)
+ else {
+ val parents = tokenSeparated(WITH, constrApp)
+ newLineOptWhenFollowedBy(LBRACE)
+ val missingBody = in.token != LBRACE
+ (templateBodyOpt(constr, parents), missingBody)
+ }
+ }
+
+ /** TemplateOpt = [`extends' Template | TemplateBody]
+ */
+ def templateOpt(constr: DefDef): Template =
+ if (in.token == EXTENDS) { in.nextToken(); template(constr)._1 }
+ else {
+ newLineOptWhenFollowedBy(LBRACE)
+ if (in.token == LBRACE) template(constr)._1
+ else Template(constr, Nil, EmptyValDef, Nil)
+ }
+
+ /** TemplateBody ::= [nl] `{' TemplateStatSeq `}'
+ */
+ def templateBodyOpt(constr: DefDef, parents: List[Tree]) = {
+ val (self, stats) =
+ if (in.token == LBRACE) templateBody() else (EmptyValDef, Nil)
+ Template(constr, parents, self, stats)
+ }
+
+ def templateBody(): (ValDef, List[Tree]) = {
+ val r = inDefScopeBraces { templateStatSeq() }
+ if (in.token == WITH) {
+ syntaxError(EarlyDefinitionsNotSupported())
+ in.nextToken()
+ template(emptyConstructor)
+ }
+ r
+ }
+
+/* -------- STATSEQS ------------------------------------------- */
+
+ /** Create a tree representing a packaging */
+ def makePackaging(start: Int, pkg: Tree, stats: List[Tree]): PackageDef = pkg match {
+ case x: RefTree => atPos(start, pkg.pos.point)(PackageDef(x, stats))
+ }
+
+ /** Packaging ::= package QualId [nl] `{' TopStatSeq `}'
+ */
+ def packaging(start: Int): Tree = {
+ val pkg = qualId()
+ newLineOptWhenFollowedBy(LBRACE)
+ val stats = inDefScopeBraces(topStatSeq)
+ makePackaging(start, pkg, stats)
+ }
+
+ /** TopStatSeq ::= TopStat {semi TopStat}
+ * TopStat ::= Annotations Modifiers TmplDef
+ * | Packaging
+ * | package object objectDef
+ * | Import
+ * |
+ */
+ def topStatSeq(): List[Tree] = {
+ val stats = new ListBuffer[Tree]
+ while (!isStatSeqEnd) {
+ setLastStatOffset()
+ if (in.token == PACKAGE) {
+ val start = in.skipToken()
+ if (in.token == OBJECT)
+ stats += objectDef(start, atPos(start, in.skipToken()) { Modifiers(Package) })
+ else stats += packaging(start)
+ }
+ else if (in.token == IMPORT)
+ stats ++= importClause()
+ else if (in.token == AT || isTemplateIntro || isModifier)
+ stats += tmplDef(in.offset, defAnnotsMods(modifierTokens))
+ else if (!isStatSep) {
+ if (in.token == CASE)
+ syntaxErrorOrIncomplete("only `case class` or `case object` allowed")
+ else
+ syntaxErrorOrIncomplete("expected class or object definition")
+ if (mustStartStat) // do parse all definitions even if they are probably local (i.e. a "}" has been forgotten)
+ defOrDcl(in.offset, defAnnotsMods(modifierTokens))
+ }
+ acceptStatSepUnlessAtEnd()
+ }
+ stats.toList
+ }
+
+ /** TemplateStatSeq ::= [id [`:' Type] `=>'] TemplateStat {semi TemplateStat}
+ * TemplateStat ::= Import
+ * | Annotations Modifiers Def
+ * | Annotations Modifiers Dcl
+ * | Expr1
+ * | super ArgumentExprs {ArgumentExprs}
+ * |
+ */
+ def templateStatSeq(): (ValDef, List[Tree]) = checkNoEscapingPlaceholders {
+ var self: ValDef = EmptyValDef
+ val stats = new ListBuffer[Tree]
+ if (isExprIntro) {
+ val first = expr1()
+ if (in.token == ARROW) {
+ first match {
+ case Typed(tree @ This(EmptyTypeIdent), tpt) =>
+ self = makeSelfDef(nme.WILDCARD, tpt).withPos(first.pos)
+ case _ =>
+ val ValDef(name, tpt, _) = convertToParam(first, expected = "self type clause")
+ if (name != nme.ERROR)
+ self = makeSelfDef(name, tpt).withPos(first.pos)
+ }
+ in.nextToken()
+ } else {
+ stats += first
+ acceptStatSepUnlessAtEnd()
+ }
+ }
+ var exitOnError = false
+ while (!isStatSeqEnd && !exitOnError) {
+ setLastStatOffset()
+ if (in.token == IMPORT)
+ stats ++= importClause()
+ else if (isExprIntro)
+ stats += expr1()
+ else if (isDefIntro(modifierTokens))
+ stats += defOrDcl(in.offset, defAnnotsMods(modifierTokens))
+ else if (!isStatSep) {
+ exitOnError = mustStartStat
+ syntaxErrorOrIncomplete("illegal start of definition")
+ }
+ acceptStatSepUnlessAtEnd()
+ }
+ (self, if (stats.isEmpty) List(EmptyTree) else stats.toList)
+ }
+
+ /** RefineStatSeq ::= RefineStat {semi RefineStat}
+ * RefineStat ::= Dcl
+ * |
+ * (in reality we admit Defs and filter them out afterwards)
+ */
+ def refineStatSeq(): List[Tree] = {
+ val stats = new ListBuffer[Tree]
+ while (!isStatSeqEnd) {
+ if (isDclIntro) {
+ stats += defOrDcl(in.offset, Modifiers())
+ } else if (!isStatSep) {
+ syntaxErrorOrIncomplete(
+ "illegal start of declaration" +
+ (if (inFunReturnType) " (possible cause: missing `=' in front of current method body)"
+ else ""))
+ }
+ acceptStatSepUnlessAtEnd()
+ }
+ stats.toList
+ }
+
+ def localDef(start: Int, implicitFlag: FlagSet, implicitMod: Option[Mod] = None): Tree = {
+ var mods = addFlag(defAnnotsMods(localModifierTokens), implicitFlag)
+ if (implicitMod.nonEmpty) mods = mods.withAddedMod(implicitMod.get)
+ defOrDcl(start, mods)
+ }
+
+ /** BlockStatSeq ::= { BlockStat semi } [ResultExpr]
+ * BlockStat ::= Import
+ * | Annotations [implicit] [lazy] Def
+ * | Annotations LocalModifiers TmplDef
+ * | Expr1
+ * |
+ */
+ def blockStatSeq(): List[Tree] = checkNoEscapingPlaceholders {
+ val stats = new ListBuffer[Tree]
+ var exitOnError = false
+ while (!isStatSeqEnd && in.token != CASE && !exitOnError) {
+ setLastStatOffset()
+ if (in.token == IMPORT)
+ stats ++= importClause()
+ else if (isExprIntro)
+ stats += expr(Location.InBlock)
+ else if (isDefIntro(localModifierTokens))
+ if (in.token == IMPLICIT) {
+ val start = in.offset
+ val mod = atPos(in.skipToken()) { Mod.Implicit(ImplicitCommon) }
+ if (isIdent) stats += implicitClosure(start, Location.InBlock, Some(mod))
+ else stats += localDef(start, ImplicitCommon, Some(mod))
+ } else {
+ stats += localDef(in.offset, EmptyFlags)
+ }
+ else if (!isStatSep && (in.token != CASE)) {
+ exitOnError = mustStartStat
+ val addendum = if (isModifier) " (no modifiers allowed here)" else ""
+ syntaxErrorOrIncomplete("illegal start of statement" + addendum)
+ }
+ acceptStatSepUnlessAtEnd(CASE)
+ }
+ stats.toList
+ }
+
+ /** CompilationUnit ::= {package QualId semi} TopStatSeq
+ */
+ def compilationUnit(): Tree = checkNoEscapingPlaceholders {
+ def topstats(): List[Tree] = {
+ val ts = new ListBuffer[Tree]
+ while (in.token == SEMI) in.nextToken()
+ val start = in.offset
+ if (in.token == PACKAGE) {
+ in.nextToken()
+ if (in.token == OBJECT) {
+ val docstring = in.getDocComment(start)
+ ts += objectDef(start, atPos(start, in.skipToken()) { Modifiers(Package) }, docstring)
+ if (in.token != EOF) {
+ acceptStatSep()
+ ts ++= topStatSeq()
+ }
+ } else {
+ val pkg = qualId()
+ newLineOptWhenFollowedBy(LBRACE)
+ if (in.token == EOF)
+ ts += makePackaging(start, pkg, List())
+ else if (in.token == LBRACE) {
+ ts += inDefScopeBraces(makePackaging(start, pkg, topStatSeq()))
+ acceptStatSepUnlessAtEnd()
+ ts ++= topStatSeq()
+ }
+ else {
+ acceptStatSep()
+ ts += makePackaging(start, pkg, topstats())
+ }
+ }
+ }
+ else
+ ts ++= topStatSeq()
+
+ ts.toList
+ }
+
+ topstats() match {
+ case List(stat @ PackageDef(_, _)) => stat
+ case Nil => EmptyTree // without this case we'd get package defs without positions
+ case stats => PackageDef(Ident(nme.EMPTY_PACKAGE), stats)
+ }
+ }
+ }
+
+
+ class OutlineParser(source: SourceFile)(implicit ctx: Context) extends Parser(source) {
+
+ def skipBraces[T](body: T): T = {
+ accept(LBRACE)
+ var openBraces = 1
+ while (in.token != EOF && openBraces > 0) {
+ if (in.token == XMLSTART) xmlLiteral()
+ else {
+ if (in.token == LBRACE) openBraces += 1
+ else if (in.token == RBRACE) openBraces -= 1
+ in.nextToken()
+ }
+ }
+ body
+ }
+
+ override def blockExpr(): Tree = skipBraces(EmptyTree)
+
+ override def templateBody() = skipBraces((EmptyValDef, List(EmptyTree)))
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala
new file mode 100644
index 000000000..60003d098
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala
@@ -0,0 +1,1014 @@
+package dotty.tools
+package dotc
+package parsing
+
+import core.Names._, core.Contexts._, core.Decorators._, util.Positions._
+import core.StdNames._, core.Comments._
+import util.SourceFile
+import java.lang.Character.isDigit
+import scala.reflect.internal.Chars._
+import Tokens._
+import scala.annotation.{ switch, tailrec }
+import scala.collection.mutable
+import mutable.ListBuffer
+import Utility.isNameStart
+import rewrite.Rewrites.patch
+
+object Scanners {
+
+ /** Offset into source character array */
+ type Offset = Int
+
+ /** An undefined offset */
+ val NoOffset: Offset = -1
+
+ type Token = Int
+
+ trait TokenData {
+
+ /** the next token */
+ var token: Token = EMPTY
+
+ /** the offset of the first character of the current token */
+ var offset: Offset = 0
+
+ /** the offset of the character following the token preceding this one */
+ var lastOffset: Offset = 0
+
+ /** the name of an identifier */
+ var name: TermName = null
+
+ /** the string value of a literal */
+ var strVal: String = null
+
+ /** the base of a number */
+ var base: Int = 0
+
+ def copyFrom(td: TokenData) = {
+ this.token = td.token
+ this.offset = td.offset
+ this.lastOffset = td.lastOffset
+ this.name = td.name
+ this.strVal = td.strVal
+ this.base = td.base
+ }
+ }
+
+ abstract class ScannerCommon(source: SourceFile)(implicit ctx: Context) extends CharArrayReader with TokenData {
+ val buf = source.content
+
+ // Errors -----------------------------------------------------------------
+
+ /** the last error offset
+ */
+ var errOffset: Offset = NoOffset
+
+
+ /** Generate an error at the given offset */
+ def error(msg: String, off: Offset = offset) = {
+ ctx.error(msg, source atPos Position(off))
+ token = ERROR
+ errOffset = off
+ }
+
+ /** signal an error where the input ended in the middle of a token */
+ def incompleteInputError(msg: String): Unit = {
+ ctx.incompleteInputError(msg, source atPos Position(offset))
+ token = EOF
+ errOffset = offset
+ }
+
+ // Setting token data ----------------------------------------------------
+
+ /** A character buffer for literals
+ */
+ val litBuf = new StringBuilder
+
+ /** append Unicode character to "litBuf" buffer
+ */
+ protected def putChar(c: Char): Unit = litBuf.append(c)
+
+ /** Return buffer contents and clear */
+ def flushBuf(buf: StringBuilder): String = {
+ val str = buf.toString
+ buf.clear()
+ str
+ }
+
+ /** Clear buffer and set name and token */
+ def finishNamed(idtoken: Token = IDENTIFIER, target: TokenData = this): Unit = {
+ target.name = flushBuf(litBuf).toTermName
+ target.token = idtoken
+ if (idtoken == IDENTIFIER) {
+ val idx = target.name.start
+ target.token = toToken(idx)
+ }
+ }
+
+ def toToken(idx: Int): Token
+
+ /** Clear buffer and set string */
+ def setStrVal() =
+ strVal = flushBuf(litBuf)
+
+ /** Convert current strVal to char value
+ */
+ def charVal: Char = if (strVal.length > 0) strVal.charAt(0) else 0
+
+ /** Convert current strVal, base to long value
+ * This is tricky because of max negative value.
+ */
+ def intVal(negated: Boolean): Long = {
+ if (token == CHARLIT && !negated) {
+ charVal
+ } else {
+ var value: Long = 0
+ val divider = if (base == 10) 1 else 2
+ val limit: Long =
+ if (token == LONGLIT) Long.MaxValue else Int.MaxValue
+ var i = 0
+ val len = strVal.length
+ while (i < len) {
+ val d = digit2int(strVal charAt i, base)
+ if (d < 0) {
+ error("malformed integer number")
+ return 0
+ }
+ if (value < 0 ||
+ limit / (base / divider) < value ||
+ limit - (d / divider) < value * (base / divider) &&
+ !(negated && limit == value * base - 1 + d)) {
+ error("integer number too large")
+ return 0
+ }
+ value = value * base + d
+ i += 1
+ }
+ if (negated) -value else value
+ }
+ }
+
+ def intVal: Long = intVal(false)
+
+ /** Convert current strVal, base to double value
+ */
+ def floatVal(negated: Boolean): Double = {
+ val limit: Double =
+ if (token == DOUBLELIT) Double.MaxValue else Float.MaxValue
+ try {
+ val value: Double = java.lang.Double.valueOf(strVal).doubleValue()
+ if (value > limit)
+ error("floating point number too large")
+ if (negated) -value else value
+ } catch {
+ case _: NumberFormatException =>
+ error("malformed floating point number")
+ 0.0
+ }
+ }
+
+ def floatVal: Double = floatVal(false)
+
+ }
+
+ class Scanner(source: SourceFile, override val startFrom: Offset = 0)(implicit ctx: Context) extends ScannerCommon(source)(ctx) {
+ val keepComments = ctx.settings.YkeepComments.value
+
+ /** All doc comments as encountered, each list contains doc comments from
+ * the same block level. Starting with the deepest level and going upward
+ */
+ private[this] var docsPerBlockStack: List[List[Comment]] = List(Nil)
+
+ /** Adds level of nesting to docstrings */
+ def enterBlock(): Unit =
+ docsPerBlockStack = List(Nil) ::: docsPerBlockStack
+
+ /** Removes level of nesting for docstrings */
+ def exitBlock(): Unit = docsPerBlockStack = docsPerBlockStack match {
+ case x :: Nil => List(Nil)
+ case _ => docsPerBlockStack.tail
+ }
+
+ /** Returns the closest docstring preceding the position supplied */
+ def getDocComment(pos: Int): Option[Comment] = {
+ def closest(c: Comment, docstrings: List[Comment]): Comment = docstrings match {
+ case x :: xs if (c.pos.end < x.pos.end && x.pos.end <= pos) => closest(x, xs)
+ case Nil => c
+ }
+
+ docsPerBlockStack match {
+ case (list @ (x :: xs)) :: _ => {
+ val c = closest(x, xs)
+ docsPerBlockStack = list.dropWhile(_ != c).tail :: docsPerBlockStack.tail
+ Some(c)
+ }
+ case _ => None
+ }
+ }
+
+ /** A buffer for comments */
+ val commentBuf = new StringBuilder
+
+ private def handleMigration(keyword: Token): Token =
+ if (!isScala2Mode) keyword
+ else if (keyword == INLINE) treatAsIdent()
+ else keyword
+
+
+ private def treatAsIdent() = {
+ testScala2Mode(i"$name is now a keyword, write `$name` instead of $name to keep it as an identifier")
+ patch(source, Position(offset), "`")
+ patch(source, Position(offset + name.length), "`")
+ IDENTIFIER
+ }
+
+ def toToken(idx: Int): Token =
+ if (idx >= 0 && idx <= lastKeywordStart) handleMigration(kwArray(idx))
+ else IDENTIFIER
+
+ private class TokenData0 extends TokenData
+
+ /** we need one token lookahead and one token history
+ */
+ val next : TokenData = new TokenData0
+ private val prev : TokenData = new TokenData0
+
+ /** a stack of tokens which indicates whether line-ends can be statement separators
+ * also used for keeping track of nesting levels.
+ * We keep track of the closing symbol of a region. This can be
+ * RPAREN if region starts with '('
+ * RBRACKET if region starts with '['
+ * RBRACE if region starts with '{'
+ * ARROW if region starts with `case'
+ * STRINGLIT if region is a string interpolation expression starting with '${'
+ * (the STRINGLIT appears twice in succession on the stack iff the
+ * expression is a multiline string literal).
+ */
+ var sepRegions: List[Token] = List()
+
+// Scala 2 compatibility
+
+ val isScala2Mode = ctx.settings.language.value.contains(nme.Scala2.toString)
+
+ /** Cannot use ctx.featureEnabled because accessing the context would force too much */
+ def testScala2Mode(msg: String, pos: Position = Position(offset)) = {
+ if (isScala2Mode) ctx.migrationWarning(msg, source atPos pos)
+ isScala2Mode
+ }
+
+// Get next token ------------------------------------------------------------
+
+ /** Are we directly in a string interpolation expression?
+ */
+ private def inStringInterpolation =
+ sepRegions.nonEmpty && sepRegions.head == STRINGLIT
+
+ /** Are we directly in a multiline string interpolation expression?
+ * @pre inStringInterpolation
+ */
+ private def inMultiLineInterpolation =
+ inStringInterpolation && sepRegions.tail.nonEmpty && sepRegions.tail.head == STRINGPART
+
+ /** read next token and return last offset
+ */
+ def skipToken(): Offset = {
+ val off = offset
+ nextToken()
+ off
+ }
+
+ def adjustSepRegions(lastToken: Token): Unit = (lastToken: @switch) match {
+ case LPAREN =>
+ sepRegions = RPAREN :: sepRegions
+ case LBRACKET =>
+ sepRegions = RBRACKET :: sepRegions
+ case LBRACE =>
+ sepRegions = RBRACE :: sepRegions
+ case CASE =>
+ sepRegions = ARROW :: sepRegions
+ case RBRACE =>
+ while (!sepRegions.isEmpty && sepRegions.head != RBRACE)
+ sepRegions = sepRegions.tail
+ if (!sepRegions.isEmpty) sepRegions = sepRegions.tail
+ case RBRACKET | RPAREN =>
+ if (!sepRegions.isEmpty && sepRegions.head == lastToken)
+ sepRegions = sepRegions.tail
+ case ARROW =>
+ if (!sepRegions.isEmpty && sepRegions.head == lastToken)
+ sepRegions = sepRegions.tail
+ case STRINGLIT =>
+ if (inMultiLineInterpolation)
+ sepRegions = sepRegions.tail.tail
+ else if (inStringInterpolation)
+ sepRegions = sepRegions.tail
+ case _ =>
+ }
+
+ /** Produce next token, filling TokenData fields of Scanner.
+ */
+ def nextToken(): Unit = {
+ val lastToken = token
+ adjustSepRegions(lastToken)
+
+ // Read a token or copy it from `next` tokenData
+ if (next.token == EMPTY) {
+ lastOffset = lastCharOffset
+ if (inStringInterpolation) fetchStringPart()
+ else fetchToken()
+ if (token == ERROR) adjustSepRegions(STRINGLIT)
+ } else {
+ this copyFrom next
+ next.token = EMPTY
+ }
+
+ /** Insert NEWLINE or NEWLINES if
+ * - we are after a newline
+ * - we are within a { ... } or on toplevel (wrt sepRegions)
+ * - the current token can start a statement and the one before can end it
+ * insert NEWLINES if we are past a blank line, NEWLINE otherwise
+ */
+ if (isAfterLineEnd() &&
+ (canEndStatTokens contains lastToken) &&
+ (canStartStatTokens contains token) &&
+ (sepRegions.isEmpty || sepRegions.head == RBRACE)) {
+ next copyFrom this
+ // todo: make offset line-end of previous line?
+ offset = if (lineStartOffset <= offset) lineStartOffset else lastLineStartOffset
+ token = if (pastBlankLine()) NEWLINES else NEWLINE
+ }
+
+ postProcessToken()
+ // print("[" + this +"]")
+ }
+
+ def postProcessToken() = {
+ // Join CASE + CLASS => CASECLASS, CASE + OBJECT => CASEOBJECT, SEMI + ELSE => ELSE
+ def lookahead() = {
+ prev copyFrom this
+ fetchToken()
+ }
+ def reset(nextLastOffset: Offset) = {
+ lastOffset = nextLastOffset
+ next copyFrom this
+ this copyFrom prev
+ }
+ def fuse(tok: Int) = {
+ token = tok
+ offset = prev.offset
+ lastOffset = prev.lastOffset
+ }
+ if (token == CASE) {
+ val nextLastOffset = lastCharOffset
+ lookahead()
+ if (token == CLASS) fuse(CASECLASS)
+ else if (token == OBJECT) fuse(CASEOBJECT)
+ else reset(nextLastOffset)
+ } else if (token == SEMI) {
+ val nextLastOffset = lastCharOffset
+ lookahead()
+ if (token != ELSE) reset(nextLastOffset)
+ }
+ }
+
+ /** Is current token first one after a newline? */
+ def isAfterLineEnd(): Boolean =
+ lastOffset < lineStartOffset &&
+ (lineStartOffset <= offset ||
+ lastOffset < lastLineStartOffset && lastLineStartOffset <= offset)
+
+ /** Is there a blank line between the current token and the last one?
+ * @pre afterLineEnd().
+ */
+ private def pastBlankLine(): Boolean = {
+ val end = offset
+ def recur(idx: Offset, isBlank: Boolean): Boolean =
+ idx < end && {
+ val ch = buf(idx)
+ if (ch == LF || ch == FF) isBlank || recur(idx + 1, true)
+ else recur(idx + 1, isBlank && ch <= ' ')
+ }
+ recur(lastOffset, false)
+ }
+
+ /** read next token, filling TokenData fields of Scanner.
+ */
+ protected final def fetchToken(): Unit = {
+ offset = charOffset - 1
+ (ch: @switch) match {
+ case ' ' | '\t' | CR | LF | FF =>
+ nextChar()
+ fetchToken()
+ case 'A' | 'B' | 'C' | 'D' | 'E' |
+ 'F' | 'G' | 'H' | 'I' | 'J' |
+ 'K' | 'L' | 'M' | 'N' | 'O' |
+ 'P' | 'Q' | 'R' | 'S' | 'T' |
+ 'U' | 'V' | 'W' | 'X' | 'Y' |
+ 'Z' | '$' | '_' |
+ 'a' | 'b' | 'c' | 'd' | 'e' |
+ 'f' | 'g' | 'h' | 'i' | 'j' |
+ 'k' | 'l' | 'm' | 'n' | 'o' |
+ 'p' | 'q' | 'r' | 's' | 't' |
+ 'u' | 'v' | 'w' | 'x' | 'y' |
+ 'z' =>
+ putChar(ch)
+ nextChar()
+ getIdentRest()
+ if (ch == '"' && token == IDENTIFIER)
+ token = INTERPOLATIONID
+ case '<' => // is XMLSTART?
+ def fetchLT() = {
+ val last = if (charOffset >= 2) buf(charOffset - 2) else ' '
+ nextChar()
+ last match {
+ case ' ' | '\t' | '\n' | '{' | '(' | '>' if isNameStart(ch) || ch == '!' || ch == '?' =>
+ token = XMLSTART
+ case _ =>
+ // Console.println("found '<', but last is '" + in.last +"'"); // DEBUG
+ putChar('<')
+ getOperatorRest()
+ }
+ }
+ fetchLT
+ case '~' | '!' | '@' | '#' | '%' |
+ '^' | '*' | '+' | '-' | /*'<' | */
+ '>' | '?' | ':' | '=' | '&' |
+ '|' | '\\' =>
+ putChar(ch)
+ nextChar()
+ getOperatorRest()
+ case '/' =>
+ if (skipComment()) {
+ fetchToken()
+ } else {
+ putChar('/')
+ getOperatorRest()
+ }
+ case '0' =>
+ def fetchZero() = {
+ putChar(ch)
+ nextChar()
+ if (ch == 'x' || ch == 'X') {
+ nextChar()
+ base = 16
+ } else {
+ /**
+ * What should leading 0 be in the future? It is potentially dangerous
+ * to let it be base-10 because of history. Should it be an error? Is
+ * there a realistic situation where one would need it?
+ */
+ if (isDigit(ch))
+ error("Non-zero numbers may not have a leading zero.")
+ base = 10
+ }
+ getNumber()
+ }
+ fetchZero
+ case '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' =>
+ base = 10
+ getNumber()
+ case '`' =>
+ getBackquotedIdent()
+ case '\"' =>
+ def fetchDoubleQuote() = {
+ if (token == INTERPOLATIONID) {
+ nextRawChar()
+ if (ch == '\"') {
+ nextRawChar()
+ if (ch == '\"') {
+ nextRawChar()
+ getStringPart(multiLine = true)
+ sepRegions = STRINGPART :: sepRegions // indicate string part
+ sepRegions = STRINGLIT :: sepRegions // once more to indicate multi line string part
+ } else {
+ token = STRINGLIT
+ strVal = ""
+ }
+ } else {
+ getStringPart(multiLine = false)
+ sepRegions = STRINGLIT :: sepRegions // indicate single line string part
+ }
+ } else {
+ nextChar()
+ if (ch == '\"') {
+ nextChar()
+ if (ch == '\"') {
+ nextRawChar()
+ getRawStringLit()
+ } else {
+ token = STRINGLIT
+ strVal = ""
+ }
+ } else {
+ getStringLit()
+ }
+ }
+ }
+ fetchDoubleQuote
+ case '\'' =>
+ def fetchSingleQuote() = {
+ nextChar()
+ if (isIdentifierStart(ch))
+ charLitOr(getIdentRest)
+ else if (isOperatorPart(ch) && (ch != '\\'))
+ charLitOr(getOperatorRest)
+ else {
+ getLitChar()
+ if (ch == '\'') {
+ nextChar()
+ token = CHARLIT
+ setStrVal()
+ } else {
+ error("unclosed character literal")
+ }
+ }
+ }
+ fetchSingleQuote
+ case '.' =>
+ nextChar()
+ if ('0' <= ch && ch <= '9') {
+ putChar('.'); getFraction(); setStrVal()
+ } else {
+ token = DOT
+ }
+ case ';' =>
+ nextChar(); token = SEMI
+ case ',' =>
+ nextChar(); token = COMMA
+ case '(' =>
+ enterBlock(); nextChar(); token = LPAREN
+ case '{' =>
+ enterBlock(); nextChar(); token = LBRACE
+ case ')' =>
+ exitBlock(); nextChar(); token = RPAREN
+ case '}' =>
+ exitBlock(); nextChar(); token = RBRACE
+ case '[' =>
+ nextChar(); token = LBRACKET
+ case ']' =>
+ nextChar(); token = RBRACKET
+ case SU =>
+ if (isAtEnd) token = EOF
+ else {
+ error("illegal character")
+ nextChar()
+ }
+ case _ =>
+ def fetchOther() = {
+ if (ch == '\u21D2') {
+ nextChar(); token = ARROW
+ } else if (ch == '\u2190') {
+ nextChar(); token = LARROW
+ } else if (Character.isUnicodeIdentifierStart(ch)) {
+ putChar(ch)
+ nextChar()
+ getIdentRest()
+ } else if (isSpecial(ch)) {
+ putChar(ch)
+ nextChar()
+ getOperatorRest()
+ } else {
+ error(f"illegal character '\\u${ch: Int}%04x'")
+ nextChar()
+ }
+ }
+ fetchOther
+ }
+ }
+
+ private def skipComment(): Boolean = {
+ def appendToComment(ch: Char) =
+ if (keepComments) commentBuf.append(ch)
+ def nextChar() = {
+ appendToComment(ch)
+ Scanner.this.nextChar()
+ }
+ def skipLine(): Unit = {
+ nextChar()
+ if ((ch != CR) && (ch != LF) && (ch != SU)) skipLine()
+ }
+ @tailrec
+ def skipComment(): Unit = {
+ if (ch == '/') {
+ nextChar()
+ if (ch == '*') nestedComment()
+ skipComment()
+ }
+ else if (ch == '*') {
+ do nextChar() while (ch == '*')
+ if (ch == '/') nextChar()
+ else skipComment()
+ }
+ else if (ch == SU) incompleteInputError("unclosed comment")
+ else { nextChar(); skipComment() }
+ }
+ def nestedComment() = { nextChar(); skipComment() }
+ val start = lastCharOffset
+ def finishComment(): Boolean = {
+ if (keepComments) {
+ val pos = Position(start, charOffset, start)
+ val comment = Comment(pos, flushBuf(commentBuf))
+
+ if (comment.isDocComment)
+ docsPerBlockStack = (docsPerBlockStack.head :+ comment) :: docsPerBlockStack.tail
+ }
+
+ true
+ }
+ nextChar()
+ if (ch == '/') { skipLine(); finishComment() }
+ else if (ch == '*') { nextChar(); skipComment(); finishComment() }
+ else false
+ }
+
+// Identifiers ---------------------------------------------------------------
+
+ private def getBackquotedIdent(): Unit = {
+ nextChar()
+ getLitChars('`')
+ if (ch == '`') {
+ nextChar()
+ finishNamed(BACKQUOTED_IDENT)
+ if (name.length == 0)
+ error("empty quoted identifier")
+ else if (name == nme.WILDCARD)
+ error("wildcard invalid as backquoted identifier")
+ }
+ else error("unclosed quoted identifier")
+ }
+
+ private def getIdentRest(): Unit = (ch: @switch) match {
+ case 'A' | 'B' | 'C' | 'D' | 'E' |
+ 'F' | 'G' | 'H' | 'I' | 'J' |
+ 'K' | 'L' | 'M' | 'N' | 'O' |
+ 'P' | 'Q' | 'R' | 'S' | 'T' |
+ 'U' | 'V' | 'W' | 'X' | 'Y' |
+ 'Z' | '$' |
+ 'a' | 'b' | 'c' | 'd' | 'e' |
+ 'f' | 'g' | 'h' | 'i' | 'j' |
+ 'k' | 'l' | 'm' | 'n' | 'o' |
+ 'p' | 'q' | 'r' | 's' | 't' |
+ 'u' | 'v' | 'w' | 'x' | 'y' |
+ 'z' |
+ '0' | '1' | '2' | '3' | '4' |
+ '5' | '6' | '7' | '8' | '9' =>
+ putChar(ch)
+ nextChar()
+ getIdentRest()
+ case '_' =>
+ putChar(ch)
+ nextChar()
+ getIdentOrOperatorRest()
+ case SU => // strangely enough, Character.isUnicodeIdentifierPart(SU) returns true!
+ finishNamed()
+ case _ =>
+ if (Character.isUnicodeIdentifierPart(ch)) {
+ putChar(ch)
+ nextChar()
+ getIdentRest()
+ } else {
+ finishNamed()
+ }
+ }
+
+ private def getOperatorRest(): Unit = (ch: @switch) match {
+ case '~' | '!' | '@' | '#' | '%' |
+ '^' | '*' | '+' | '-' | '<' |
+ '>' | '?' | ':' | '=' | '&' |
+ '|' | '\\' =>
+ putChar(ch); nextChar(); getOperatorRest()
+ case '/' =>
+ if (skipComment()) finishNamed()
+ else { putChar('/'); getOperatorRest() }
+ case _ =>
+ if (isSpecial(ch)) { putChar(ch); nextChar(); getOperatorRest() }
+ else finishNamed()
+ }
+
+ private def getIdentOrOperatorRest(): Unit = {
+ if (isIdentifierPart(ch))
+ getIdentRest()
+ else ch match {
+ case '~' | '!' | '@' | '#' | '%' |
+ '^' | '*' | '+' | '-' | '<' |
+ '>' | '?' | ':' | '=' | '&' |
+ '|' | '\\' | '/' =>
+ getOperatorRest()
+ case _ =>
+ if (isSpecial(ch)) getOperatorRest()
+ else finishNamed()
+ }
+ }
+
+
+// Literals -----------------------------------------------------------------
+
+ private def getStringLit() = {
+ getLitChars('"')
+ if (ch == '"') {
+ setStrVal()
+ nextChar()
+ token = STRINGLIT
+ } else error("unclosed string literal")
+ }
+
+ private def getRawStringLit(): Unit = {
+ if (ch == '\"') {
+ nextRawChar()
+ if (isTripleQuote()) {
+ setStrVal()
+ token = STRINGLIT
+ } else
+ getRawStringLit()
+ } else if (ch == SU) {
+ incompleteInputError("unclosed multi-line string literal")
+ } else {
+ putChar(ch)
+ nextRawChar()
+ getRawStringLit()
+ }
+ }
+
+ @annotation.tailrec private def getStringPart(multiLine: Boolean): Unit = {
+ def finishStringPart() = {
+ setStrVal()
+ token = STRINGPART
+ next.lastOffset = charOffset - 1
+ next.offset = charOffset - 1
+ }
+ if (ch == '"') {
+ if (multiLine) {
+ nextRawChar()
+ if (isTripleQuote()) {
+ setStrVal()
+ token = STRINGLIT
+ } else
+ getStringPart(multiLine)
+ } else {
+ nextChar()
+ setStrVal()
+ token = STRINGLIT
+ }
+ } else if (ch == '$') {
+ nextRawChar()
+ if (ch == '$') {
+ putChar(ch)
+ nextRawChar()
+ getStringPart(multiLine)
+ } else if (ch == '{') {
+ finishStringPart()
+ nextRawChar()
+ next.token = LBRACE
+ } else if (Character.isUnicodeIdentifierStart(ch)) {
+ finishStringPart()
+ do {
+ putChar(ch)
+ nextRawChar()
+ } while (ch != SU && Character.isUnicodeIdentifierPart(ch))
+ finishNamed(target = next)
+ } else {
+ error("invalid string interpolation: `$$', `$'ident or `$'BlockExpr expected")
+ }
+ } else {
+ val isUnclosedLiteral = !isUnicodeEscape && (ch == SU || (!multiLine && (ch == CR || ch == LF)))
+ if (isUnclosedLiteral) {
+ if (multiLine)
+ incompleteInputError("unclosed multi-line string literal")
+ else
+ error("unclosed string literal")
+ }
+ else {
+ putChar(ch)
+ nextRawChar()
+ getStringPart(multiLine)
+ }
+ }
+ }
+
+ private def fetchStringPart() = {
+ offset = charOffset - 1
+ getStringPart(multiLine = inMultiLineInterpolation)
+ }
+
+ private def isTripleQuote(): Boolean =
+ if (ch == '"') {
+ nextRawChar()
+ if (ch == '"') {
+ nextChar()
+ while (ch == '"') {
+ putChar('"')
+ nextChar()
+ }
+ true
+ } else {
+ putChar('"')
+ putChar('"')
+ false
+ }
+ } else {
+ putChar('"')
+ false
+ }
+
+ /** copy current character into litBuf, interpreting any escape sequences,
+ * and advance to next character.
+ */
+ protected def getLitChar(): Unit =
+ if (ch == '\\') {
+ nextChar()
+ if ('0' <= ch && ch <= '7') {
+ val leadch: Char = ch
+ var oct: Int = digit2int(ch, 8)
+ nextChar()
+ if ('0' <= ch && ch <= '7') {
+ oct = oct * 8 + digit2int(ch, 8)
+ nextChar()
+ if (leadch <= '3' && '0' <= ch && ch <= '7') {
+ oct = oct * 8 + digit2int(ch, 8)
+ nextChar()
+ }
+ }
+ putChar(oct.toChar)
+ } else {
+ ch match {
+ case 'b' => putChar('\b')
+ case 't' => putChar('\t')
+ case 'n' => putChar('\n')
+ case 'f' => putChar('\f')
+ case 'r' => putChar('\r')
+ case '\"' => putChar('\"')
+ case '\'' => putChar('\'')
+ case '\\' => putChar('\\')
+ case _ => invalidEscape()
+ }
+ nextChar()
+ }
+ } else {
+ putChar(ch)
+ nextChar()
+ }
+
+ protected def invalidEscape(): Unit = {
+ error("invalid escape character", charOffset - 1)
+ putChar(ch)
+ }
+
+ private def getLitChars(delimiter: Char) = {
+ while (ch != delimiter && !isAtEnd && (ch != SU && ch != CR && ch != LF || isUnicodeEscape))
+ getLitChar()
+ }
+
+ /** read fractional part and exponent of floating point number
+ * if one is present.
+ */
+ protected def getFraction(): Unit = {
+ token = DOUBLELIT
+ while ('0' <= ch && ch <= '9') {
+ putChar(ch)
+ nextChar()
+ }
+ if (ch == 'e' || ch == 'E') {
+ val lookahead = lookaheadReader
+ lookahead.nextChar()
+ if (lookahead.ch == '+' || lookahead.ch == '-') {
+ lookahead.nextChar()
+ }
+ if ('0' <= lookahead.ch && lookahead.ch <= '9') {
+ putChar(ch)
+ nextChar()
+ if (ch == '+' || ch == '-') {
+ putChar(ch)
+ nextChar()
+ }
+ while ('0' <= ch && ch <= '9') {
+ putChar(ch)
+ nextChar()
+ }
+ }
+ token = DOUBLELIT
+ }
+ if (ch == 'd' || ch == 'D') {
+ putChar(ch)
+ nextChar()
+ token = DOUBLELIT
+ } else if (ch == 'f' || ch == 'F') {
+ putChar(ch)
+ nextChar()
+ token = FLOATLIT
+ }
+ checkNoLetter()
+ }
+ def checkNoLetter(): Unit = {
+ if (isIdentifierPart(ch) && ch >= ' ')
+ error("Invalid literal number")
+ }
+
+ /** Read a number into strVal and set base
+ */
+ protected def getNumber(): Unit = {
+ while (digit2int(ch, base) >= 0) {
+ putChar(ch)
+ nextChar()
+ }
+ token = INTLIT
+ if (base == 10 && ch == '.') {
+ val isDefinitelyNumber = {
+ val lookahead = lookaheadReader
+ val c = lookahead.getc()
+ (c: @switch) match {
+ /** Another digit is a giveaway. */
+ case '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' =>
+ true
+
+ /** Backquoted idents like 22.`foo`. */
+ case '`' =>
+ false
+
+ /** These letters may be part of a literal, or a method invocation on an Int.
+ */
+ case 'd' | 'D' | 'f' | 'F' =>
+ !isIdentifierPart(lookahead.getc())
+
+ /** A little more special handling for e.g. 5e7 */
+ case 'e' | 'E' =>
+ val ch = lookahead.getc()
+ !isIdentifierPart(ch) || (isDigit(ch) || ch == '+' || ch == '-')
+
+ case x =>
+ !isIdentifierStart(x)
+ }
+ }
+ if (isDefinitelyNumber) {
+ putChar(ch)
+ nextChar()
+ getFraction()
+ }
+ } else (ch: @switch) match {
+ case 'e' | 'E' | 'f' | 'F' | 'd' | 'D' =>
+ if (base == 10) getFraction()
+ case 'l' | 'L' =>
+ nextChar()
+ token = LONGLIT
+ case _ =>
+ }
+ setStrVal()
+ }
+
+ /** Parse character literal if current character is followed by \',
+ * or follow with given op and return a symbol literal token
+ */
+ def charLitOr(op: () => Unit): Unit = {
+ putChar(ch)
+ nextChar()
+ if (ch == '\'') {
+ nextChar()
+ token = CHARLIT
+ setStrVal()
+ } else {
+ op()
+ token = SYMBOLLIT
+ strVal = name.toString
+ }
+ }
+ override def toString =
+ showTokenDetailed(token) + {
+ if ((identifierTokens contains token) || (literalTokens contains token)) " " + name
+ else ""
+ }
+
+ def show: String = token match {
+ case IDENTIFIER | BACKQUOTED_IDENT => s"id($name)"
+ case CHARLIT => s"char($intVal)"
+ case INTLIT => s"int($intVal)"
+ case LONGLIT => s"long($intVal)"
+ case FLOATLIT => s"float($floatVal)"
+ case DOUBLELIT => s"double($floatVal)"
+ case STRINGLIT => s"string($strVal)"
+ case STRINGPART => s"stringpart($strVal)"
+ case INTERPOLATIONID => s"interpolationid($name)"
+ case SEMI => ";"
+ case NEWLINE => ";"
+ case NEWLINES => ";;"
+ case COMMA => ","
+ case _ => showToken(token)
+ }
+
+// (does not seem to be needed) def flush = { charOffset = offset; nextChar(); this }
+
+ /* Resume normal scanning after XML */
+ def resume(lastToken: Token) = {
+ token = lastToken
+ if (next.token != EMPTY && !ctx.reporter.hasErrors)
+ error("unexpected end of input: possible missing '}' in XML block")
+
+ nextToken()
+ }
+
+ /* Initialization: read first char, then first token */
+ nextChar()
+ nextToken()
+ } // end Scanner
+
+ // ------------- keyword configuration -----------------------------------
+
+ val (lastKeywordStart, kwArray) = buildKeywordArray(keywords)
+}
diff --git a/compiler/src/dotty/tools/dotc/parsing/ScriptParsers.scala b/compiler/src/dotty/tools/dotc/parsing/ScriptParsers.scala
new file mode 100644
index 000000000..afa7fefab
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/parsing/ScriptParsers.scala
@@ -0,0 +1,145 @@
+package dotty.tools
+package dotc
+package parsing
+
+import util.SourceFile
+import core._
+import Contexts._
+import Parsers._
+
+
+/** <p>Performs the following context-free rewritings:</p>
+ * <ol>
+ * <li>
+ * Places all pattern variables in Bind nodes. In a pattern, for
+ * identifiers <code>x</code>:<pre>
+ * x => x @ _
+ * x:T => x @ (_ : T)</pre>
+ * </li>
+ * <li>Removes pattern definitions (PatDef's) as follows:
+ * If pattern is a simple (typed) identifier:<pre>
+ * <b>val</b> x = e ==> <b>val</b> x = e
+ * <b>val</b> x: T = e ==> <b>val</b> x: T = e</pre>
+ *
+ * if there are no variables in pattern<pre>
+ * <b>val</b> p = e ==> e match (case p => ())</pre>
+ *
+ * if there is exactly one variable in pattern<pre>
+ * <b>val</b> x_1 = e <b>match</b> (case p => (x_1))</pre>
+ *
+ * if there is more than one variable in pattern<pre>
+ * <b>val</b> p = e ==> <b>private synthetic val</b> t$ = e <b>match</b> (case p => (x_1, ..., x_N))
+ * <b>val</b> x_1 = t$._1
+ * ...
+ * <b>val</b> x_N = t$._N</pre>
+ * </li>
+ * <li>
+ * Removes function types as follows:<pre>
+ * (argtpes) => restpe ==> scala.Function_n[argtpes, restpe]</pre>
+ * </li>
+ * <li>
+ * Wraps naked case definitions in a match as follows:<pre>
+ * { cases } ==> (x => x.match {cases})<span style="font-family:normal;">, except when already argument to match</span></pre>
+ * </li>
+ * </ol>
+ */
+object ScriptParsers {
+
+ import ast.untpd._
+
+ class ScriptParser(source: SourceFile)(implicit ctx: Context) extends Parser(source) {
+
+ /** This is the parse entry point for code which is not self-contained, e.g.
+ * a script which is a series of template statements. They will be
+ * swaddled in Trees until the AST is equivalent to the one returned
+ * by compilationUnit().
+ */
+ override def parse(): Tree = unsupported("parse")
+ /* TODO: reinstantiate
+ val stmts = templateStatSeq(false)._2
+ accept(EOF)
+
+ def mainModuleName = ctx.settings.script.value
+
+ /** If there is only a single object template in the file and it has a
+ * suitable main method, we will use it rather than building another object
+ * around it. Since objects are loaded lazily the whole script would have
+ * been a no-op, so we're not taking much liberty.
+ */
+ def searchForMain(): Option[Tree] = {
+ /** Have to be fairly liberal about what constitutes a main method since
+ * nothing has been typed yet - for instance we can't assume the parameter
+ * type will look exactly like "Array[String]" as it could have been renamed
+ * via import, etc.
+ */
+ def isMainMethod(t: Tree) = t match {
+ case DefDef(_, nme.main, Nil, List(_), _, _) => true
+ case _ => false
+ }
+ /** For now we require there only be one top level object. */
+ var seenModule = false
+ val newStmts = stmts collect {
+ case t @ Import(_, _) => t
+ case md @ ModuleDef(mods, name, template)
+ if !seenModule && (template.body exists isMainMethod) =>
+ seenModule = true
+ /** This slightly hacky situation arises because we have no way to communicate
+ * back to the scriptrunner what the name of the program is. Even if we were
+ * willing to take the sketchy route of settings.script.value = progName, that
+ * does not work when using fsc. And to find out in advance would impose a
+ * whole additional parse. So instead, if the actual object's name differs from
+ * what the script is expecting, we transform it to match.
+ */
+ md.derivedModuleDef(mods, mainModuleName.toTermName, template)
+ case _ =>
+ /** If we see anything but the above, fail. */
+ return None
+ }
+ Some(makePackaging(0, emptyPkg, newStmts))
+ }
+
+ if (mainModuleName == ScriptRunner.defaultScriptMain)
+ searchForMain() foreach { return _ }
+
+ /** Here we are building an AST representing the following source fiction,
+ * where <moduleName> is from -Xscript (defaults to "Main") and <stmts> are
+ * the result of parsing the script file.
+ *
+ * object <moduleName> {
+ * def main(argv: Array[String]): Unit = {
+ * val args = argv
+ * new AnyRef {
+ * <stmts>
+ * }
+ * }
+ * }
+ */
+ import definitions._
+
+ def emptyPkg = atPos(0, 0, 0) { Ident(nme.EMPTY_PACKAGE_NAME) }
+ def emptyInit = DefDef(
+ Modifiers(),
+ nme.CONSTRUCTOR,
+ Nil,
+ List(Nil),
+ TypeTree(),
+ Block(List(Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), Nil)), Literal(Constant(())))
+ )
+
+ // def main
+ def mainParamType = AppliedTypeTree(Ident(tpnme.Array), List(Ident(tpnme.String)))
+ def mainParameter = List(ValDef(Modifiers(Param), "argv", mainParamType, EmptyTree))
+ def mainSetArgv = List(ValDef(Modifiers(), "args", TypeTree(), Ident("argv")))
+ def mainNew = makeNew(Nil, emptyValDef, stmts, List(Nil), NoPosition, NoPosition)
+ def mainDef = DefDef(Modifiers(), nme.main, Nil, List(mainParameter), scalaDot(tpnme.Unit), Block(mainSetArgv, mainNew))
+
+ // object Main
+ def moduleName = ScriptRunner scriptMain settings
+ def moduleBody = Template(List(scalaScalaObjectConstr), emptyValDef, List(emptyInit, mainDef))
+ def moduleDef = ModuleDef(Modifiers(), moduleName, moduleBody)
+
+ // package <empty> { ... }
+ makePackaging(0, emptyPkg, List(moduleDef))
+ }*/
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/parsing/SymbolicXMLBuilder.scala b/compiler/src/dotty/tools/dotc/parsing/SymbolicXMLBuilder.scala
new file mode 100644
index 000000000..20b655a19
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/parsing/SymbolicXMLBuilder.scala
@@ -0,0 +1,264 @@
+package dotty.tools
+package dotc
+package parsing
+
+import scala.collection.mutable
+import scala.xml.{ EntityRef, Text }
+import core._
+import Flags.Mutable
+import Names._, StdNames._, ast.Trees._, ast.{tpd, untpd}
+import Symbols._, Contexts._
+import util.Positions._
+import Parsers.Parser
+import scala.reflect.internal.util.StringOps.splitWhere
+import scala.language.implicitConversions
+
+/** This class builds instance of `Tree` that represent XML.
+ *
+ * Note from martin: This needs to have its position info reworked. I don't
+ * understand exactly what's done here. To make validation pass, I set many
+ * positions to be transparent. Not sure this is a good idea for navigating
+ * XML trees in the IDE but it's the best I can do right now. If someone
+ * who understands this part better wants to give it a shot, please do!
+ *
+ * @author Burak Emir
+ * @version 1.0
+ */
+class SymbolicXMLBuilder(parser: Parser, preserveWS: Boolean)(implicit ctx: Context) {
+
+ import Constants.Constant
+ import untpd._
+
+ import parser.atPos
+
+ private[parsing] var isPattern: Boolean = _
+
+ private object xmltypes extends ScalaTypeNames {
+ val _Comment: TypeName = "Comment"
+ val _Elem: TypeName = "Elem"
+ val _EntityRef: TypeName = "EntityRef"
+ val _Group: TypeName = "Group"
+ val _MetaData: TypeName = "MetaData"
+ val _NamespaceBinding: TypeName = "NamespaceBinding"
+ val _NodeBuffer: TypeName = "NodeBuffer"
+ val _PrefixedAttribute: TypeName = "PrefixedAttribute"
+ val _ProcInstr: TypeName = "ProcInstr"
+ val _Text: TypeName = "Text"
+ val _Unparsed: TypeName = "Unparsed"
+ val _UnprefixedAttribute: TypeName = "UnprefixedAttribute"
+ }
+
+ private object xmlterms extends ScalaTermNames {
+ val _Null: TermName = "Null"
+ val __Elem: TermName = "Elem"
+ val __Text: TermName = "Text"
+ val _buf: TermName = "$buf"
+ val _md: TermName = "$md"
+ val _plus: TermName = "$amp$plus"
+ val _scope: TermName = "$scope"
+ val _tmpscope: TermName = "$tmpscope"
+ val _xml: TermName = "xml"
+ }
+
+ import xmltypes.{_Comment, _Elem, _EntityRef, _Group, _MetaData, _NamespaceBinding, _NodeBuffer,
+ _PrefixedAttribute, _ProcInstr, _Text, _Unparsed, _UnprefixedAttribute}
+
+ import xmlterms.{_Null, __Elem, __Text, _buf, _md, _plus, _scope, _tmpscope, _xml}
+
+ // convenience methods
+ private def LL[A](x: A*): List[List[A]] = List(List(x:_*))
+ private def const(x: Any) = Literal(Constant(x))
+ private def wild = Ident(nme.WILDCARD)
+ private def wildStar = Ident(tpnme.WILDCARD_STAR)
+ private def _scala(name: Name) = scalaDot(name)
+ private def _scala_xml(name: Name) = Select(_scala(_xml), name)
+
+ private def _scala_xml_Comment = _scala_xml(_Comment)
+ private def _scala_xml_Elem = _scala_xml(_Elem)
+ private def _scala_xml_EntityRef = _scala_xml(_EntityRef)
+ private def _scala_xml_Group = _scala_xml(_Group)
+ private def _scala_xml_MetaData = _scala_xml(_MetaData)
+ private def _scala_xml_NamespaceBinding = _scala_xml(_NamespaceBinding)
+ private def _scala_xml_NodeBuffer = _scala_xml(_NodeBuffer)
+ private def _scala_xml_Null = _scala_xml(_Null)
+ private def _scala_xml_PrefixedAttribute = _scala_xml(_PrefixedAttribute)
+ private def _scala_xml_ProcInstr = _scala_xml(_ProcInstr)
+ private def _scala_xml_Text = _scala_xml(_Text)
+ private def _scala_xml_Unparsed = _scala_xml(_Unparsed)
+ private def _scala_xml_UnprefixedAttribute= _scala_xml(_UnprefixedAttribute)
+ private def _scala_xml__Elem = _scala_xml(__Elem)
+ private def _scala_xml__Text = _scala_xml(__Text)
+
+ /** Wildly wrong documentation deleted in favor of "self-documenting code." */
+ protected def mkXML(
+ pos: Position,
+ isPattern: Boolean,
+ pre: Tree,
+ label: Tree,
+ attrs: Tree,
+ scope: Tree,
+ empty: Boolean,
+ children: Seq[Tree]): Tree =
+ {
+ def starArgs =
+ if (children.isEmpty) Nil
+ else List(Typed(makeXMLseq(pos, children), wildStar))
+
+ def pat = Apply(_scala_xml__Elem, List(pre, label, wild, wild) ::: convertToTextPat(children))
+ def nonpat = New(_scala_xml_Elem, List(List(pre, label, attrs, scope, if (empty) Literal(Constant(true)) else Literal(Constant(false))) ::: starArgs))
+
+ atPos(pos) { if (isPattern) pat else nonpat }
+ }
+
+ final def entityRef(pos: Position, n: String) =
+ atPos(pos)( New(_scala_xml_EntityRef, LL(const(n))) )
+
+ // create scala.xml.Text here <: scala.xml.Node
+ final def text(pos: Position, txt: String): Tree = atPos(pos) {
+ if (isPattern) makeTextPat(const(txt))
+ else makeText1(const(txt))
+ }
+
+ def makeTextPat(txt: Tree) = Apply(_scala_xml__Text, List(txt))
+ def makeText1(txt: Tree) = New(_scala_xml_Text, LL(txt))
+ def comment(pos: Position, text: String) = atPos(pos)( Comment(const(text)) )
+ def charData(pos: Position, txt: String) = atPos(pos)( makeText1(const(txt)) )
+
+ def procInstr(pos: Position, target: String, txt: String) =
+ atPos(pos)( ProcInstr(const(target), const(txt)) )
+
+ protected def Comment(txt: Tree) = New(_scala_xml_Comment, LL(txt))
+ protected def ProcInstr(target: Tree, txt: Tree) = New(_scala_xml_ProcInstr, LL(target, txt))
+
+ /** @todo: attributes */
+ def makeXMLpat(pos: Position, n: String, args: Seq[Tree]): Tree = {
+ val (prepat, labpat) = splitPrefix(n) match {
+ case (Some(pre), rest) => (const(pre), const(rest))
+ case _ => (wild, const(n))
+ }
+ mkXML(pos, true, prepat, labpat, null, null, false, args)
+ }
+
+ protected def convertToTextPat(t: Tree): Tree = t match {
+ case _: Literal => makeTextPat(t)
+ case _ => t
+ }
+ protected def convertToTextPat(buf: Seq[Tree]): List[Tree] =
+ (buf map convertToTextPat).toList
+
+ def parseAttribute(pos: Position, s: String): Tree = {
+ val ts = scala.xml.Utility.parseAttributeValue(s) map {
+ case Text(s) => text(pos, s)
+ case EntityRef(s) => entityRef(pos, s)
+ }
+ ts.length match {
+ case 0 => TypedSplice(tpd.ref(defn.NilModule) withPos pos)
+ case 1 => ts.head
+ case _ => makeXMLseq(pos, ts.toList)
+ }
+ }
+
+ def isEmptyText(t: Tree) = t match {
+ case Literal(Constant("")) => true
+ case _ => false
+ }
+
+ /** could optimize if args.length == 0, args.length == 1 AND args(0) is <: Node. */
+ def makeXMLseq(pos: Position, args: Seq[Tree]) = {
+ val buffer = ValDef(_buf, TypeTree(), New(_scala_xml_NodeBuffer, ListOfNil))
+ val applies = args filterNot isEmptyText map (t => Apply(Select(Ident(_buf), _plus), List(t)))
+
+ atPos(pos)( Block(buffer :: applies.toList, Ident(_buf)) )
+ }
+
+ /** Returns (Some(prefix) | None, rest) based on position of ':' */
+ def splitPrefix(name: String): (Option[String], String) = splitWhere(name, _ == ':', true) match {
+ case Some((pre, rest)) => (Some(pre), rest)
+ case _ => (None, name)
+ }
+
+ /** Various node constructions. */
+ def group(pos: Position, args: Seq[Tree]): Tree =
+ atPos(pos)( New(_scala_xml_Group, LL(makeXMLseq(pos, args))) )
+
+ def unparsed(pos: Position, str: String): Tree =
+ atPos(pos)( New(_scala_xml_Unparsed, LL(const(str))) )
+
+ def element(pos: Position, qname: String, attrMap: mutable.Map[String, Tree], empty: Boolean, args: Seq[Tree]): Tree = {
+ def handleNamespaceBinding(pre: String, z: String): Tree = {
+ def mkAssign(t: Tree): Tree = Assign(
+ Ident(_tmpscope),
+ New(_scala_xml_NamespaceBinding, LL(const(pre), t, Ident(_tmpscope)))
+ )
+
+ val uri1 = attrMap(z) match {
+ case Apply(_, List(uri @ Literal(Constant(_)))) => mkAssign(uri)
+ case Select(_, nme.Nil) => mkAssign(const(null)) // allow for xmlns="" -- bug #1626
+ case x => mkAssign(x)
+ }
+ attrMap -= z
+ uri1
+ }
+
+ /** Extract all the namespaces from the attribute map. */
+ val namespaces: List[Tree] =
+ for (z <- attrMap.keys.toList ; if z startsWith "xmlns") yield {
+ val ns = splitPrefix(z) match {
+ case (Some(_), rest) => rest
+ case _ => null
+ }
+ handleNamespaceBinding(ns, z)
+ }
+
+ val (pre, newlabel) = splitPrefix(qname) match {
+ case (Some(p), x) => (p, x)
+ case (None, x) => (null, x)
+ }
+
+ def mkAttributeTree(pre: String, key: String, value: Tree) = atPos(pos.toSynthetic) {
+ // XXX this is where we'd like to put Select(value, nme.toString_) for #1787
+ // after we resolve the Some(foo) situation.
+ val baseArgs = List(const(key), value, Ident(_md))
+ val (clazz, attrArgs) =
+ if (pre == null) (_scala_xml_UnprefixedAttribute, baseArgs)
+ else (_scala_xml_PrefixedAttribute , const(pre) :: baseArgs)
+
+ Assign(Ident(_md), New(clazz, LL(attrArgs: _*)))
+ }
+
+ def handlePrefixedAttribute(pre: String, key: String, value: Tree) = mkAttributeTree(pre, key, value)
+ def handleUnprefixedAttribute(key: String, value: Tree) = mkAttributeTree(null, key, value)
+
+ val attributes: List[Tree] =
+ for ((k, v) <- attrMap.toList.reverse) yield splitPrefix(k) match {
+ case (Some(pre), rest) => handlePrefixedAttribute(pre, rest, v)
+ case _ => handleUnprefixedAttribute(k, v)
+ }
+
+ lazy val scopeDef = ValDef(_scope, _scala_xml_NamespaceBinding, Ident(_tmpscope))
+ lazy val tmpScopeDef = ValDef(_tmpscope, _scala_xml_NamespaceBinding, Ident(_scope)).withFlags(Mutable)
+ lazy val metadataDef = ValDef(_md, _scala_xml_MetaData, _scala_xml_Null).withFlags(Mutable)
+ val makeSymbolicAttrs = if (!attributes.isEmpty) Ident(_md) else _scala_xml_Null
+
+ val (attrResult, nsResult) =
+ (attributes.isEmpty, namespaces.isEmpty) match {
+ case (true , true) => (Nil, Nil)
+ case (true , false) => (scopeDef :: Nil, tmpScopeDef :: namespaces)
+ case (false, true) => (metadataDef :: attributes, Nil)
+ case (false, false) => (scopeDef :: metadataDef :: attributes, tmpScopeDef :: namespaces)
+ }
+
+ val body = mkXML(
+ pos.toSynthetic,
+ false,
+ const(pre),
+ const(newlabel),
+ makeSymbolicAttrs,
+ Ident(_scope),
+ empty,
+ args
+ )
+
+ atPos(pos.toSynthetic)( Block(nsResult, Block(attrResult, body)) )
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/parsing/Tokens.scala b/compiler/src/dotty/tools/dotc/parsing/Tokens.scala
new file mode 100644
index 000000000..5324207db
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/parsing/Tokens.scala
@@ -0,0 +1,238 @@
+package dotty.tools
+package dotc
+package parsing
+
+import collection.immutable.BitSet
+import core.Decorators._
+
+abstract class TokensCommon {
+ val maxToken: Int
+
+ type Token = Int
+ type TokenSet = BitSet
+
+ def tokenRange(lo: Int, hi: Int): TokenSet = BitSet(lo to hi: _*)
+
+ def showTokenDetailed(token: Int) = debugString(token)
+
+ def showToken(token: Int) = {
+ val str = tokenString(token)
+ if (keywords contains token) s"'$str'" else str
+ }
+
+ val tokenString, debugString = new Array[String](maxToken + 1)
+
+ def enter(token: Int, str: String, debugStr: String = ""): Unit = {
+ assert(tokenString(token) == null)
+ tokenString(token) = str
+ debugString(token) = if (debugStr.isEmpty) str else debugStr
+ }
+
+ /** special tokens */
+ final val EMPTY = 0; enter(EMPTY, "<empty>") // a missing token, used in lookahead
+ final val ERROR = 1; enter(ERROR, "erroneous token") // an erroneous token
+ final val EOF = 2; enter(EOF, "eof")
+
+ /** literals */
+ final val CHARLIT = 3; enter(CHARLIT, "character literal")
+ final val INTLIT = 4; enter(INTLIT, "integer literal")
+ final val LONGLIT = 5; enter(LONGLIT, "long literal")
+ final val FLOATLIT = 6; enter(FLOATLIT, "float literal")
+ final val DOUBLELIT = 7; enter(DOUBLELIT, "double literal")
+ final val STRINGLIT = 8; enter(STRINGLIT, "string literal")
+ final val STRINGPART = 9; enter(STRINGPART, "string literal", "string literal part")
+ //final val INTERPOLATIONID = 10; enter(INTERPOLATIONID, "string interpolator")
+ //final val SYMBOLLIT = 11; enter(SYMBOLLIT, "symbol literal") // TODO: deprecate
+
+ /** identifiers */
+ final val IDENTIFIER = 12; enter(IDENTIFIER, "identifier")
+ //final val BACKQUOTED_IDENT = 13; enter(BACKQUOTED_IDENT, "identifier", "backquoted ident")
+
+ /** alphabetic keywords */
+ final val IF = 20; enter(IF, "if")
+ final val FOR = 21; enter(FOR, "for")
+ final val ELSE = 22; enter(ELSE, "else")
+ final val THIS = 23; enter(THIS, "this")
+ final val NULL = 24; enter(NULL, "null")
+ final val NEW = 25; enter(NEW, "new")
+ //final val WITH = 26; enter(WITH, "with")
+ final val SUPER = 27; enter(SUPER, "super")
+ //final val CASE = 28; enter(CASE, "case")
+ //final val CASECLASS = 29; enter(CASECLASS, "case class")
+ //final val CASEOBJECT = 30; enter(CASEOBJECT, "case object")
+ //final val VAL = 31; enter(VAL, "val")
+ final val ABSTRACT = 32; enter(ABSTRACT, "abstract")
+ final val FINAL = 33; enter(FINAL, "final")
+ final val PRIVATE = 34; enter(PRIVATE, "private")
+ final val PROTECTED = 35; enter(PROTECTED, "protected")
+ final val OVERRIDE = 36; enter(OVERRIDE, "override")
+ //final val IMPLICIT = 37; enter(IMPLICIT, "implicit")
+ //final val VAR = 38; enter(VAR, "var")
+ //final val DEF = 39; enter(DEF, "def")
+ //final val TYPE = 40; enter(TYPE, "type")
+ final val EXTENDS = 41; enter(EXTENDS, "extends")
+ final val TRUE = 42; enter(TRUE, "true")
+ final val FALSE = 43; enter(FALSE, "false")
+ //final val OBJECT = 44; enter(OBJECT, "object")
+ final val CLASS = 45; enter(CLASS, "class")
+ final val IMPORT = 46; enter(IMPORT, "import")
+ final val PACKAGE = 47; enter(PACKAGE, "package")
+ //final val YIELD = 48; enter(YIELD, "yield")
+ final val DO = 49; enter(DO, "do")
+ //final val TRAIT = 50; enter(TRAIT, "trait")
+ //final val SEALED = 51; enter(SEALED, "sealed")
+ final val THROW = 52; enter(THROW, "throw")
+ final val TRY = 53; enter(TRY, "try")
+ final val CATCH = 54; enter(CATCH, "catch")
+ final val FINALLY = 55; enter(FINALLY, "finally")
+ final val WHILE = 56; enter(WHILE, "while")
+ final val RETURN = 57; enter(RETURN, "return")
+ //final val MATCH = 58; enter(MATCH, "match")
+ //final val LAZY = 59; enter(LAZY, "lazy")
+ //final val THEN = 60; enter(THEN, "then")
+ //final val FORSOME = 61; enter(FORSOME, "forSome") // TODO: deprecate
+ //final val INLINE = 62; enter(INLINE, "inline")
+
+ /** special symbols */
+ final val COMMA = 70; enter(COMMA, "','")
+ final val SEMI = 71; enter(SEMI, "';'")
+ final val DOT = 72; enter(DOT, "'.'")
+ //final val NEWLINE = 78; enter(NEWLINE, "end of statement", "new line")
+ //final val NEWLINES = 79; enter(NEWLINES, "end of statement", "new lines")
+
+ /** special keywords */
+ //final val USCORE = 73; enter(USCORE, "_")
+ final val COLON = 74; enter(COLON, ":")
+ final val EQUALS = 75; enter(EQUALS, "=")
+ //final val LARROW = 76; enter(LARROW, "<-")
+ //final val ARROW = 77; enter(ARROW, "=>")
+ //final val SUBTYPE = 80; enter(SUBTYPE, "<:")
+ //final val SUPERTYPE = 81; enter(SUPERTYPE, ">:")
+ //final val HASH = 82; enter(HASH, "#")
+ final val AT = 83; enter(AT, "@")
+ //final val VIEWBOUND = 84; enter(VIEWBOUND, "<%") // TODO: deprecate
+
+ val keywords: TokenSet
+
+ /** parentheses */
+ final val LPAREN = 90; enter(LPAREN, "'('")
+ final val RPAREN = 91; enter(RPAREN, "')'")
+ final val LBRACKET = 92; enter(LBRACKET, "'['")
+ final val RBRACKET = 93; enter(RBRACKET, "']'")
+ final val LBRACE = 94; enter(LBRACE, "'{'")
+ final val RBRACE = 95; enter(RBRACE, "'}'")
+
+ final val firstParen = LPAREN
+ final val lastParen = RBRACE
+
+ def buildKeywordArray(keywords: TokenSet) = {
+ def start(tok: Token) = tokenString(tok).toTermName.start
+ def sourceKeywords = keywords.toList.filter { (kw: Token) =>
+ val ts = tokenString(kw)
+ (ts != null) && !ts.contains(' ')
+ }
+
+ val lastKeywordStart = sourceKeywords.map(start).max
+
+ val arr = Array.fill(lastKeywordStart + 1)(IDENTIFIER)
+ for (kw <- sourceKeywords) arr(start(kw)) = kw
+ (lastKeywordStart, arr)
+ }
+}
+
+object Tokens extends TokensCommon {
+ final val minToken = EMPTY
+ final val maxToken = XMLSTART
+
+ final val INTERPOLATIONID = 10; enter(INTERPOLATIONID, "string interpolator")
+ final val SYMBOLLIT = 11; enter(SYMBOLLIT, "symbol literal") // TODO: deprecate
+
+ final val BACKQUOTED_IDENT = 13; enter(BACKQUOTED_IDENT, "identifier", "backquoted ident")
+
+ final val identifierTokens = BitSet(IDENTIFIER, BACKQUOTED_IDENT)
+
+ def isIdentifier(token : Int) =
+ token >= IDENTIFIER && token <= BACKQUOTED_IDENT
+
+ /** alphabetic keywords */
+ final val WITH = 26; enter(WITH, "with")
+ final val CASE = 28; enter(CASE, "case")
+ final val CASECLASS = 29; enter(CASECLASS, "case class")
+ final val CASEOBJECT = 30; enter(CASEOBJECT, "case object")
+ final val VAL = 31; enter(VAL, "val")
+ final val IMPLICIT = 37; enter(IMPLICIT, "implicit")
+ final val VAR = 38; enter(VAR, "var")
+ final val DEF = 39; enter(DEF, "def")
+ final val TYPE = 40; enter(TYPE, "type")
+ final val OBJECT = 44; enter(OBJECT, "object")
+ final val YIELD = 48; enter(YIELD, "yield")
+ final val TRAIT = 50; enter(TRAIT, "trait")
+ final val SEALED = 51; enter(SEALED, "sealed")
+ final val MATCH = 58; enter(MATCH, "match")
+ final val LAZY = 59; enter(LAZY, "lazy")
+ final val THEN = 60; enter(THEN, "then")
+ final val FORSOME = 61; enter(FORSOME, "forSome") // TODO: deprecate
+ final val INLINE = 62; enter(INLINE, "inline")
+
+ /** special symbols */
+ final val NEWLINE = 78; enter(NEWLINE, "end of statement", "new line")
+ final val NEWLINES = 79; enter(NEWLINES, "end of statement", "new lines")
+
+ /** special keywords */
+ final val USCORE = 73; enter(USCORE, "_")
+ final val LARROW = 76; enter(LARROW, "<-")
+ final val ARROW = 77; enter(ARROW, "=>")
+ final val SUBTYPE = 80; enter(SUBTYPE, "<:")
+ final val SUPERTYPE = 81; enter(SUPERTYPE, ">:")
+ final val HASH = 82; enter(HASH, "#")
+ final val VIEWBOUND = 84; enter(VIEWBOUND, "<%") // TODO: deprecate
+
+ /** XML mode */
+ final val XMLSTART = 96; enter(XMLSTART, "$XMLSTART$<") // TODO: deprecate
+
+ final val alphaKeywords = tokenRange(IF, INLINE)
+ final val symbolicKeywords = tokenRange(USCORE, VIEWBOUND)
+ final val symbolicTokens = tokenRange(COMMA, VIEWBOUND)
+ final val keywords = alphaKeywords | symbolicKeywords
+
+ final val allTokens = tokenRange(minToken, maxToken)
+
+ final val simpleLiteralTokens = tokenRange(CHARLIT, STRINGLIT) | BitSet(TRUE, FALSE)
+ final val literalTokens = simpleLiteralTokens | BitSet(INTERPOLATIONID, SYMBOLLIT, NULL)
+
+ final val atomicExprTokens = literalTokens | identifierTokens | BitSet(
+ USCORE, NULL, THIS, SUPER, TRUE, FALSE, RETURN, XMLSTART)
+
+ final val canStartExpressionTokens = atomicExprTokens | BitSet(
+ LBRACE, LPAREN, IF, DO, WHILE, FOR, NEW, TRY, THROW)
+
+ final val canStartTypeTokens = literalTokens | identifierTokens | BitSet(
+ THIS, SUPER, USCORE, LPAREN, AT)
+
+ final val templateIntroTokens = BitSet(CLASS, TRAIT, OBJECT, CASECLASS, CASEOBJECT)
+
+ final val dclIntroTokens = BitSet(DEF, VAL, VAR, TYPE)
+
+ final val defIntroTokens = templateIntroTokens | dclIntroTokens
+
+ final val localModifierTokens = BitSet(
+ ABSTRACT, FINAL, SEALED, IMPLICIT, INLINE, LAZY)
+
+ final val accessModifierTokens = BitSet(
+ PRIVATE, PROTECTED)
+
+ final val modifierTokens = localModifierTokens | accessModifierTokens | BitSet(
+ OVERRIDE)
+
+ /** Is token only legal as start of statement (eof also included)? */
+ final val mustStartStatTokens = defIntroTokens | modifierTokens | BitSet(
+ IMPORT, PACKAGE)
+
+ final val canStartStatTokens = canStartExpressionTokens | mustStartStatTokens | BitSet(
+ AT, CASE)
+
+ final val canEndStatTokens = atomicExprTokens | BitSet(
+ TYPE, RPAREN, RBRACE, RBRACKET)
+
+ final val numericLitTokens = BitSet(INTLIT, LONGLIT, FLOATLIT, DOUBLELIT)
+}
diff --git a/compiler/src/dotty/tools/dotc/parsing/TreeBuilder.scala.unused b/compiler/src/dotty/tools/dotc/parsing/TreeBuilder.scala.unused
new file mode 100644
index 000000000..672c85179
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/parsing/TreeBuilder.scala.unused
@@ -0,0 +1,535 @@
+package dotty.tools
+package dotc
+package parsing
+
+import core._
+import Flags._, Trees._, TypedTrees._, UntypedTrees._, Names._, StdNames._, NameOps._, Contexts._
+import scala.collection.mutable.ListBuffer
+import util.Positions._, Symbols._, Decorators._, Flags._, Constants._
+import TreeInfo._
+
+/** Methods for building trees, used in the parser. All the trees
+ * returned by this class must be untyped.
+ * Note: currently unused
+ */
+class TreeBuilder(implicit ctx: Context) {
+
+ import untpd._
+
+ def scalaDot(name: Name): Select =
+ Select(new TypedSplice(tpd.Ident(defn.ScalaPackageVal.termRef)), name)
+
+ def scalaAnyRefConstr = scalaDot(tpnme.AnyRef)
+ def scalaAnyValConstr = scalaDot(tpnme.AnyVal)
+ def scalaAnyConstr = scalaDot(tpnme.Any)
+ def scalaUnitConstr = scalaDot(tpnme.Unit)
+ def productConstr = scalaDot(tpnme.Product)
+ def productConstrN(n: Int) = scalaDot(("Product" + n).toTypeName)
+ def serializableConstr = scalaDot(tpnme.Serializable)
+
+ def convertToTypeName(t: Tree): Tree = ???
+
+ private implicit val cpos = NoPosition
+
+ /** Convert all occurrences of (lower-case) variables in a pattern as follows:
+ * x becomes x @ _
+ * x: T becomes x @ (_: T)
+ * Also covert all toplevel lower-case type arguments as follows:
+ * t becomes t @ _
+ */
+ private object patvarTransformer extends TreeTransformer {
+ override def transform(tree: Tree): Tree = tree match {
+ case Ident(name) if isVarPattern(tree) && name != nme.WILDCARD =>
+ Bind(
+ name, Ident(nme.WILDCARD).withPos(tree.pos.focus)
+ ).withPos(tree.pos)
+ case Typed(id @ Ident(name), tpt) if isVarPattern(id) && name != nme.WILDCARD =>
+ Bind(
+ name,
+ Typed(
+ Ident(nme.WILDCARD).withPos(tree.pos.focus),
+ transform(tpt)
+ ).withPos(tree.pos.withStart(tree.pos.point))
+ ).withPos(tree.pos.withPoint(id.pos.point))
+ case Apply(fn @ Apply(_, _), args) =>
+ tree.derivedApply(transform(fn), transform(args))
+ case Apply(fn, args) =>
+ tree.derivedApply(fn, transform(args))
+ case Typed(expr, tpt) =>
+ tree.derivedTyped(transform(expr), transform(tpt))
+ case Bind(name, body) =>
+ tree.derivedBind(name, transform(body))
+ case AppliedTypeTree(tycon, args) =>
+ tree.derivedAppliedTypeTree(tycon, args map transform)
+ case Alternative(_) | Typed(_, _) | AndTypeTree(_, _) | Annotated(_, _) =>
+ super.transform(tree)
+ case Parens(_) =>
+ stripParens(tree)
+ case _ =>
+ tree
+ }
+ }
+
+ case class VariableInfo(name: Name, tree: Tree, pos: Position)
+
+ /** Traverse pattern and collect all variable names with their types in buffer
+ * The variables keep their positions; whereas the pattern is converted to be
+ * synthetic for all nodes that contain a variable position.
+ */
+ object getVars extends TreeAccumulator[ListBuffer[VariableInfo]] {
+
+ def namePos(tree: Tree, name: Name): Position =
+ if (name contains '$') tree.pos.focus
+ else {
+ val start = tree.pos.start
+ val end = start + name.decode.length
+ Position(start, end)
+ }
+
+ override def apply(buf: ListBuffer[VariableInfo], tree: Tree): ListBuffer[VariableInfo] = {
+ def seenName(name: Name) = buf exists (_.name == name)
+ def add(name: Name, t: Tree): ListBuffer[VariableInfo] =
+ if (seenName(name)) buf else buf += VariableInfo(name, t, namePos(tree, name))
+
+ tree match {
+ case Bind(nme.WILDCARD, _) =>
+ foldOver(buf, tree)
+ case Bind(name, Typed(tree1, tpt)) if !mayBeTypePat(tpt) =>
+ apply(add(name, tpt), tree1)
+ case Bind(name, tree1) =>
+ apply(add(name, TypeTree()), tree1)
+ case _ =>
+ foldOver(buf, tree)
+ }
+ }
+ }
+
+ /** Returns list of all pattern variables, possibly with their types,
+ * without duplicates
+ */
+ private def getVariables(tree: Tree): List[VariableInfo] =
+ getVars(new ListBuffer[VariableInfo], tree).toList
+
+ def byNameApplication(tpe: Tree): Tree =
+ AppliedTypeTree(scalaDot(tpnme.BYNAME_PARAM_CLASS), List(tpe))
+ def repeatedApplication(tpe: Tree): Tree =
+ AppliedTypeTree(scalaDot(tpnme.REPEATED_PARAM_CLASS), List(tpe))
+
+ def makeTuple(trees: List[Tree])(implicit cpos: Position): Tree = {
+ def mkPair(t1: Tree, t2: Tree) = {
+ if (t1.isType) AppliedTypeTree(scalaDot(tpnme.Pair), List(t1, t2))
+ else Pair(t1, t2)
+ }
+ trees reduce mkPair
+ }
+
+ def stripParens(t: Tree) = t match {
+ case Parens(t) => t
+ case _ => t
+ }
+
+ def makeSelfDef(name: TermName, tpt: Tree): ValDef =
+ ValDef(Modifiers(Private), name, tpt, EmptyTree())
+
+ /** If tree is a variable pattern, return its variable info.
+ * Otherwise return none.
+ */
+ private def matchVarPattern(tree: Tree): Option[VariableInfo] = {
+ def wildType(t: Tree): Option[Tree] = t match {
+ case Ident(x) if x.toTermName == nme.WILDCARD => Some(TypeTree())
+ case Typed(Ident(x), tpt) if x.toTermName == nme.WILDCARD => Some(tpt)
+ case _ => None
+ }
+ tree match {
+ case Ident(name) => Some(VariableInfo(name, TypeTree(), tree.pos))
+ case Bind(name, body) => wildType(body) map (x => VariableInfo(name, x, tree.pos))
+ case Typed(id @ Ident(name), tpt) => Some(VariableInfo(name, tpt, id.pos))
+ case _ => None
+ }
+ }
+
+ /** Create tree representing (unencoded) binary operation expression or pattern. */
+ def makeBinop(isExpr: Boolean, left: Tree, op: TermName, right: Tree, opPos: Position): Tree = {
+ def mkNamed(args: List[Tree]) =
+ if (isExpr) args map {
+ case arg @ Assign(Ident(name), rhs) => NamedArg(name, rhs).withPos(arg.pos)
+ case arg => arg
+ } else args
+ val arguments = right match {
+ case Parens(arg) => mkNamed(arg :: Nil)
+ case _ => right :: Nil
+ }
+ if (isExpr) {
+ if (isLeftAssoc(op)) {
+ Apply(Select(stripParens(left), op.encode).withPos(opPos), arguments)
+ } else {
+ val x = ctx.freshName().toTermName
+ Block(
+ List(ValDef(Modifiers(Synthetic), x, TypeTree(), stripParens(left))),
+ Apply(Select(stripParens(right), op.encode).withPos(opPos), List(Ident(x).withPos(left.pos))))
+ }
+ } else {
+ Apply(Ident(op.encode).withPos(opPos), stripParens(left) :: arguments)
+ }
+ }
+
+ /** tpt.<init> */
+ def SelectConstructor(tpt: Tree): Tree =
+ Select(tpt, nme.CONSTRUCTOR)
+
+ private def splitArgss(constr: Tree, outerArgss: List[List[Tree]]): (Tree, List[List[Tree]]) = constr match {
+ case Apply(tree, args) => splitArgss(tree, args :: outerArgss)
+ case _ => (constr, if (outerArgss.isEmpty) ListOfNil else outerArgss)
+ }
+
+ /** new tpt(argss_1)...(argss_n)
+ * @param npos the position spanning <new tpt>, without any arguments
+ */
+ def makeNew(parentConstr: Tree) = {
+ val (tpt, argss) = splitArgss(parentConstr, Nil)
+ New(tpt, argss)
+ }
+
+ /** Create positioned tree representing an object creation <new parents { self => stats }
+ */
+ def makeNew(templ: Template): Tree = {
+ val x = tpnme.ANON_CLASS
+ val nu = makeNew(Ident(x))
+ val clsDef = {
+ implicit val cpos = NoPosition
+ ClassDef(Modifiers(Final), x, Nil, templ)
+ }
+ Block(clsDef, nu)
+ }
+
+ /** Create positioned tree representing an object creation <new parents { self => stats }
+ * @param cpos the position of the new, focus should be the first parent's start.
+ */
+ def makeNew(parents: List[Tree], self: ValDef, stats: List[Tree]): Tree = {
+ val newPos = Position(cpos.start, cpos.point)
+ val clsPos = Position(cpos.point, cpos.end)
+ if (parents.isEmpty)
+ makeNew(List(scalaAnyRefConstr.withPos(newPos.endPos)), self, stats)
+ else if (parents.tail.isEmpty && stats.isEmpty)
+ makeNew(parents.head)
+ else {
+ val x = tpnme.ANON_CLASS
+ val nu = makeNew(Ident(x).withPos(newPos)).withPos(newPos)
+ val clsDef = {
+ implicit val cpos = clsPos
+ ClassDef(Modifiers(Final), x, Nil, Template(???, parents, self, stats))
+ }
+ Block(clsDef, nu)
+ }
+ }
+
+ /** Create a tree representing an assignment <lhs = rhs> */
+ def makeAssign(lhs: Tree, rhs: Tree): Tree = lhs match {
+ case Apply(fn, args) =>
+ Apply(Select(fn, nme.update), args :+ rhs)
+ case _ =>
+ Assign(lhs, rhs)
+ }
+
+ /** A type tree corresponding to (possibly unary) intersection type
+ def makeIntersectionTypeTree(tps: List[Tree]): Tree =
+ if (tps.tail.isEmpty) tps.head
+ else CompoundTypeTree(Template(tps, emptyValDef, Nil))*/
+
+ private def labelDefAndCall(lname: TermName, rhs: Tree, call: Tree) = {
+ val ldef = DefDef(Modifiers(Label).withPos(cpos.startPos), lname, Nil, ListOfNil, TypeTree(), rhs)
+ Block(ldef, call)
+ }
+
+ private def labelCall(lname: TermName): Apply =
+ Apply(Ident(lname), Nil)
+
+ /** Create tree representing a while loop */
+ def makeWhile(lname: TermName, cond: Tree, body: Tree): Tree = {
+ val continu = labelCall(lname).withPos((cond.pos union body.pos).endPos)
+ val rhs = {
+ implicit val cpos = NoPosition
+ If(cond, Block(body, continu), Literal(Constant()).withPos(continu.pos))
+ }
+ labelDefAndCall(lname, rhs, continu)
+ }
+
+ /** Create tree representing a do-while loop */
+ def makeDoWhile(lname: TermName, body: Tree, cond: Tree): Tree = {
+ val continu = labelCall(lname).withPos((cond.pos union body.pos).endPos)
+ val rhs = Block(body, If(cond, continu, Literal(Constant()).withPos(continu.pos)))
+ labelDefAndCall(lname, rhs, continu)
+ }
+
+ /** Create block of statements `stats` */
+ def makeBlock(stats: List[Tree]): Tree =
+ if (stats.isEmpty) Literal(Constant())
+ else if (!stats.last.isTerm) Block(stats, Literal(Constant()).withPos(cpos.endPos))
+ else if (stats.length == 1) stats.head
+ else Block(stats.init, stats.last)
+
+ def makePatFilter(tree: Tree, condition: Tree, canDrop: Boolean): Tree = {
+ val cases = List(
+ CaseDef(condition, EmptyTree(), Literal(Constant(true))),
+ CaseDef(Ident(nme.WILDCARD), EmptyTree(), Literal(Constant(false)))
+ )
+ val matchTree = makeVisitor(cases, checkExhaustive = false, canDrop)
+ locally {
+ implicit val cpos = tree.pos
+ Apply(Select(tree, nme.withFilter), matchTree :: Nil)
+ }
+ }
+
+ /** Create tree for for-comprehension generator <pat <- rhs> or <pat = rhs> */
+ def makeGenerator(pat: Tree, valeq: Boolean, rhs: Tree): Enumerator = {
+ val pat1 = patvarTransformer.transform(pat)
+ if (valeq) ValEq(pat1, rhs)
+ else ValFrom(pat1, makePatFilter(rhs, pat1, canDrop = true))
+ }
+
+/*
+ def makeSyntheticTypeParam(pname: TypeName, bounds: Tree) =
+ TypeDef(Modifiers(DEFERRED | SYNTHETIC), pname, Nil, bounds)
+*/
+ abstract class Enumerator { def pos: Position }
+ case class ValFrom(pat: Tree, rhs: Tree) extends Enumerator {
+ val pos = cpos union pat.pos union rhs.pos
+ }
+ case class ValEq(pat: Tree, rhs: Tree) extends Enumerator {
+ val pos = cpos union pat.pos union rhs.pos
+ }
+ case class Filter(test: Tree) extends Enumerator {
+ val pos = cpos union test.pos
+ }
+
+ /** Create tree for for-comprehension <for (enums) do body> or
+ * <for (enums) yield body> where mapName and flatMapName are chosen
+ * corresponding to whether this is a for-do or a for-yield.
+ * The creation performs the following rewrite rules:
+ *
+ * 1.
+ *
+ * for (P <- G) E ==> G.foreach (P => E)
+ *
+ * Here and in the following (P => E) is interpreted as the function (P => E)
+ * if P is a variable pattern and as the partial function { case P => E } otherwise.
+ *
+ * 2.
+ *
+ * for (P <- G) yield E ==> G.map (P => E)
+ *
+ * 3.
+ *
+ * for (P_1 <- G_1; P_2 <- G_2; ...) ...
+ * ==>
+ * G_1.flatMap (P_1 => for (P_2 <- G_2; ...) ...)
+ *
+ * 4.
+ *
+ * for (P <- G; E; ...) ...
+ * =>
+ * for (P <- G.filter (P => E); ...) ...
+ *
+ * 5. For any N:
+ *
+ * for (P_1 <- G; P_2 = E_2; val P_N = E_N; ...)
+ * ==>
+ * for (TupleN(P_1, P_2, ... P_N) <-
+ * for (x_1 @ P_1 <- G) yield {
+ * val x_2 @ P_2 = E_2
+ * ...
+ * val x_N & P_N = E_N
+ * TupleN(x_1, ..., x_N)
+ * } ...)
+ *
+ * If any of the P_i are variable patterns, the corresponding `x_i @ P_i' is not generated
+ * and the variable constituting P_i is used instead of x_i
+ *
+ * @param mapName The name to be used for maps (either map or foreach)
+ * @param flatMapName The name to be used for flatMaps (either flatMap or foreach)
+ * @param enums The enumerators in the for expression
+ * @param body The body of the for expression
+ */
+ private def makeFor(mapName: TermName, flatMapName: TermName, enums: List[Enumerator], body: Tree): Tree = {
+
+ /** make a closure pat => body.
+ * The closure is assigned a transparent position with the point at pos.point and
+ * the limits given by pat and body.
+ */
+ def makeClosure(pat: Tree, body: Tree): Tree =
+ matchVarPattern(pat) match {
+ case Some(VariableInfo(name, tpt, pos)) =>
+ Function(ValDef(Modifiers(Param).withPos(cpos.startPos), name.toTermName, tpt, EmptyTree()).withPos(pos) :: Nil, body)
+ case None =>
+ makeVisitor(List(CaseDef(pat, EmptyTree(), body)), checkExhaustive = false)
+ }
+
+ /** Make an application qual.meth(pat => body) positioned at `pos`.
+ */
+ def makeCombination(meth: TermName, qual: Tree, pat: Tree, body: Tree): Tree =
+ Apply(Select(qual, meth).withPos(NoPosition), makeClosure(pat, body))
+
+ /** Optionally, if pattern is a `Bind`, the bound name, otherwise None.
+ */
+ def patternVar(pat: Tree): Option[Name] = pat match {
+ case Bind(name, _) => Some(name)
+ case _ => None
+ }
+
+ /** If `pat` is not yet a `Bind` wrap it in one with a fresh name
+ */
+ def makeBind(pat: Tree): Tree = pat match {
+ case Bind(_, _) => pat
+ case _ => Bind(ctx.freshName().toTermName, pat)
+ }
+
+ /** A reference to the name bound in Bind `pat`.
+ */
+ def makeValue(pat: Tree): Tree = pat match {
+ case Bind(name, _) => Ident(name).withPos(pat.pos.focus)
+ }
+
+ enums match {
+ case (enum @ ValFrom(pat, rhs)) :: Nil =>
+ makeCombination(mapName, rhs, pat, body).withPos(enum.pos)
+ case ValFrom(pat, rhs) :: (rest @ (ValFrom( _, _) :: _)) =>
+ makeCombination(flatMapName, rhs, pat,
+ makeFor(mapName, flatMapName, rest, body))
+ case (enum @ ValFrom(pat, rhs)) :: Filter(test) :: rest =>
+ makeFor(mapName, flatMapName,
+ ValFrom(pat, makeCombination(nme.withFilter, rhs, pat, test)) :: rest,
+ body)
+ case (enum @ ValFrom(pat, rhs)) :: rest =>
+ val (valeqs, rest1) = rest.span(_.isInstanceOf[ValEq])
+ assert(!valeqs.isEmpty)
+ val pats = valeqs map { case ValEq(pat, _) => pat }
+ val rhss = valeqs map { case ValEq(_, rhs) => rhs }
+ val defpat1 = makeBind(pat)
+ val defpats = pats map makeBind
+ val pdefs = (defpats, rhss).zipped flatMap (makePatDef)
+ val ids = (defpat1 :: defpats) map makeValue
+ val rhs1 = makeForYield(ValFrom(defpat1, rhs) :: Nil, Block(pdefs, makeTuple(ids)))
+ val allpats = pat :: pats
+ val vfrom1 = ValFrom(makeTuple(allpats), rhs1)
+ makeFor(mapName, flatMapName, vfrom1 :: rest1, body)
+ case _ =>
+ EmptyTree() //may happen for erroneous input
+ }
+ }
+
+ /** Create tree for for-do comprehension <for (enums) body> */
+ def makeFor(enums: List[Enumerator], body: Tree): Tree =
+ makeFor(nme.foreach, nme.foreach, enums, body)
+
+ /** Create tree for for-yield comprehension <for (enums) yield body> */
+ def makeForYield(enums: List[Enumerator], body: Tree): Tree =
+ makeFor(nme.map, nme.flatMap, enums, body)
+
+ /** Create tree for a pattern alternative */
+ def makeAlternative(ts: List[Tree]): Tree = Alternative(ts flatMap alternatives)
+
+ def alternatives(t: Tree): List[Tree] = t match {
+ case Alternative(ts) => ts
+ case _ => List(t)
+ }
+
+ def mkAnnotated(cls: Symbol, tree: Tree) =
+ Annotated(TypedSplice(tpd.New(cls.typeRef)), tree)
+
+ /** Create visitor <x => x match cases> */
+ def makeVisitor(cases: List[CaseDef], checkExhaustive: Boolean, canDrop: Boolean = false): Tree = {
+ val x = ctx.freshName().toTermName
+ val id = Ident(x)
+ val sel =
+ if (canDrop) mkAnnotated(???, id)
+ else if (!checkExhaustive) mkAnnotated(defn.UncheckedAnnot, id)
+ else id
+ Function(List(ugen.syntheticParameter(x)), Match(sel, cases))
+ }
+
+ /** Create tree for case definition <case pat if guard => rhs> */
+ def makeCaseDef(pat: Tree, guard: Tree, rhs: Tree): CaseDef =
+ CaseDef(patvarTransformer.transform(pat), guard, rhs)
+
+ /** Create tree for pattern definition <val pat0 = rhs> */
+ def makePatDef(pat: Tree, rhs: Tree): List[Tree] =
+ makePatDef(Modifiers(), pat, rhs)
+
+ /** Create tree for pattern definition <mods val pat0 = rhs> */
+ def makePatDef(mods: Modifiers, pat: Tree, rhs: Tree, varsArePatterns: Boolean = false): List[Tree] = matchVarPattern(pat) match {
+ case Some(VariableInfo(name, tpt, pos)) if varsArePatterns =>
+ ValDef(mods, name.toTermName, tpt, rhs).withPos(pos) :: Nil // point comes from pat.pos
+
+ case _ =>
+ // in case there is exactly one variable x_1 in pattern
+ // val/var p = e ==> val/var x_1 = e.match (case p => (x_1))
+ //
+ // in case there are zero or more than one variables in pattern
+ // val/var p = e ==> private synthetic val t$ = e.match (case p => (x_1, ..., x_N))
+ // val/var x_1 = t$._1
+ // ...
+ // val/var x_N = t$._N
+
+ val rhsUnchecked = mkAnnotated(defn.UncheckedAnnot, rhs)
+
+ // TODO: clean this up -- there is too much information packed into makePatDef's `pat` argument
+ // when it's a simple identifier (case Some((name, tpt)) -- above),
+ // pat should have the type ascription that was specified by the user
+ // however, in `case None` (here), we must be careful not to generate illegal pattern trees (such as `(a, b): Tuple2[Int, String]`)
+ // i.e., this must hold: pat1 match { case Typed(expr, tp) => assert(expr.isInstanceOf[Ident]) case _ => }
+ // if we encounter such an erroneous pattern, we strip off the type ascription from pat and propagate the type information to rhs
+ val (pat1, rhs1) = patvarTransformer.transform(pat) match {
+ // move the Typed ascription to the rhs
+ case Typed(expr, tpt) if !expr.isInstanceOf[Ident] =>
+ val rhsTypedUnchecked =
+ if (tpt.isEmpty) rhsUnchecked else Typed(rhsUnchecked, tpt)
+ (expr, rhsTypedUnchecked)
+ case ok =>
+ (ok, rhsUnchecked)
+ }
+ val vars = getVariables(pat1)
+ val ids = vars map (v => Ident(v.name).withPos(v.pos))
+ val caseDef = CaseDef(pat1, EmptyTree(), makeTuple(ids))
+ val matchExpr = Match(rhs1, caseDef :: Nil)
+ vars match {
+ case List(VariableInfo(vname, tpt, pos)) =>
+ ValDef(mods, vname.toTermName, tpt, matchExpr) :: Nil
+ case _ =>
+ val tmpName = ctx.freshName().toTermName
+ val patMods = Modifiers(PrivateLocal | Synthetic | (mods.flags & Lazy))
+ val firstDef = ValDef(patMods, tmpName, TypeTree(), matchExpr)
+ val restDefs = for {
+ (VariableInfo(vname, tpt, pos), n) <- vars.zipWithIndex
+ } yield {
+ val rhs = {
+ implicit val cpos = pos.focus
+ Select(Ident(tmpName), ("_" + n).toTermName)
+ }
+ ValDef(mods, vname.toTermName, tpt, rhs).withPos(pos)
+ }
+ firstDef :: restDefs
+ }
+ }
+
+ /** Create a tree representing the function type (argtpes) => restpe */
+ def makeFunctionTypeTree(argtpes: List[Tree], restpe: Tree): Tree =
+ AppliedTypeTree(scalaDot(("Function" + argtpes.length).toTypeName), argtpes ::: List(restpe))
+
+ /** Append implicit parameter section if `contextBounds` nonempty */
+ def addEvidenceParams(owner: Name, vparamss: List[List[ValDef]], contextBounds: List[Tree]): List[List[ValDef]] = {
+ if (contextBounds.isEmpty) vparamss
+ else {
+ val mods = Modifiers(if (owner.isTypeName) PrivateLocal | ParamAccessor else Param)
+ val evidenceParams = for (tpt <- contextBounds) yield {
+ val pname = ctx.freshName(nme.EVIDENCE_PARAM_PREFIX).toTermName
+ ValDef(mods | Implicit | Synthetic, pname, tpt, EmptyTree())
+ }
+ vparamss.reverse match {
+ case (vparams @ (vparam :: _)) :: _ if vparam.mods is Implicit =>
+ vparamss.init :+ (evidenceParams ++ vparams)
+ case _ =>
+ vparamss :+ evidenceParams
+ }
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/parsing/Utility.scala b/compiler/src/dotty/tools/dotc/parsing/Utility.scala
new file mode 100644
index 000000000..f522492f8
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/parsing/Utility.scala
@@ -0,0 +1,170 @@
+package dotty.tools.dotc.parsing
+
+import scala.collection.mutable
+
+
+/**
+ * The `Utility` object provides utility functions for processing instances
+ * of bound and not bound XML classes, as well as escaping text nodes.
+ *
+ * @author Burak Emir
+ */
+object Utility {
+ import scala.reflect.internal.Chars.SU
+
+ private val unescMap = Map(
+ "lt" -> '<',
+ "gt" -> '>',
+ "amp" -> '&',
+ "quot" -> '"',
+ "apos" -> '\''
+ )
+
+ /**
+ * Appends unescaped string to `s`, `amp` becomes `&amp;`,
+ * `lt` becomes `&lt;` etc..
+ *
+ * @return `'''null'''` if `ref` was not a predefined entity.
+ */
+ private final def unescape(ref: String, s: StringBuilder): StringBuilder =
+ ((unescMap get ref) map (s append _)).orNull
+
+ def parseAttributeValue[T](value: String, text: String => T, entityRef: String => T): List[T] = {
+ val sb = new StringBuilder
+ var rfb: StringBuilder = null
+ val nb = new mutable.ListBuffer[T]()
+
+ val it = value.iterator
+ while (it.hasNext) {
+ var c = it.next()
+ // entity! flush buffer into text node
+ if (c == '&') {
+ c = it.next()
+ if (c == '#') {
+ c = it.next()
+ val theChar = parseCharRef ({ ()=> c },{ () => c = it.next() },{s => throw new RuntimeException(s)}, {s => throw new RuntimeException(s)})
+ sb.append(theChar)
+ }
+ else {
+ if (rfb eq null) rfb = new StringBuilder()
+ rfb append c
+ c = it.next()
+ while (c != ';') {
+ rfb.append(c)
+ c = it.next()
+ }
+ val ref = rfb.toString()
+ rfb.clear()
+ unescape(ref,sb) match {
+ case null =>
+ if (!sb.isEmpty) { // flush buffer
+ nb += text(sb.toString())
+ sb.clear()
+ }
+ nb += entityRef(ref) // add entityref
+ case _ =>
+ }
+ }
+ }
+ else sb append c
+ }
+
+ if (!sb.isEmpty) // flush buffer
+ nb += text(sb.toString())
+
+ nb.toList
+ }
+
+ /**
+ * {{{
+ * CharRef ::= "&amp;#" '0'..'9' {'0'..'9'} ";"
+ * | "&amp;#x" '0'..'9'|'A'..'F'|'a'..'f' { hexdigit } ";"
+ * }}}
+ * See [66]
+ */
+ def parseCharRef(ch: () => Char, nextch: () => Unit, reportSyntaxError: String => Unit, reportTruncatedError: String => Unit): String = {
+ val hex = ch() == 'x'
+ if (hex) nextch()
+ val base = if (hex) 16 else 10
+ var i = 0
+ while (ch() != ';') {
+ ch() match {
+ case '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' =>
+ i = i * base + ch().asDigit
+ case 'a' | 'b' | 'c' | 'd' | 'e' | 'f'
+ | 'A' | 'B' | 'C' | 'D' | 'E' | 'F' =>
+ if (! hex)
+ reportSyntaxError("hex char not allowed in decimal char ref\n" +
+ "Did you mean to write &#x ?")
+ else
+ i = i * base + ch().asDigit
+ case SU =>
+ reportTruncatedError("")
+ case _ =>
+ reportSyntaxError("character '" + ch() + "' not allowed in char ref\n")
+ }
+ nextch()
+ }
+ new String(Array(i), 0, 1)
+ }
+
+ /** {{{
+ * (#x20 | #x9 | #xD | #xA)
+ * }}} */
+ final def isSpace(ch: Char): Boolean = ch match {
+ case '\u0009' | '\u000A' | '\u000D' | '\u0020' => true
+ case _ => false
+ }
+ /** {{{
+ * (#x20 | #x9 | #xD | #xA)+
+ * }}} */
+ final def isSpace(cs: Seq[Char]): Boolean = cs.nonEmpty && (cs forall isSpace)
+
+ /** {{{
+ * NameChar ::= Letter | Digit | '.' | '-' | '_' | ':'
+ * | CombiningChar | Extender
+ * }}}
+ * See [4] and Appendix B of XML 1.0 specification.
+ */
+ def isNameChar(ch: Char) = {
+ import java.lang.Character._
+ // The constants represent groups Mc, Me, Mn, Lm, and Nd.
+
+ isNameStart(ch) || (getType(ch).toByte match {
+ case COMBINING_SPACING_MARK |
+ ENCLOSING_MARK | NON_SPACING_MARK |
+ MODIFIER_LETTER | DECIMAL_DIGIT_NUMBER => true
+ case _ => ".-:" contains ch
+ })
+ }
+
+ /** {{{
+ * NameStart ::= ( Letter | '_' )
+ * }}}
+ * where Letter means in one of the Unicode general
+ * categories `{ Ll, Lu, Lo, Lt, Nl }`.
+ *
+ * We do not allow a name to start with `:`.
+ * See [3] and Appendix B of XML 1.0 specification
+ */
+ def isNameStart(ch: Char) = {
+ import java.lang.Character._
+
+ getType(ch).toByte match {
+ case LOWERCASE_LETTER |
+ UPPERCASE_LETTER | OTHER_LETTER |
+ TITLECASE_LETTER | LETTER_NUMBER => true
+ case _ => ch == '_'
+ }
+ }
+
+ /** {{{
+ * Name ::= ( Letter | '_' ) (NameChar)*
+ * }}}
+ * See [5] of XML 1.0 specification.
+ */
+ def isName(s: String) =
+ s.nonEmpty && isNameStart(s.head) && (s.tail forall isNameChar)
+
+}
+
diff --git a/compiler/src/dotty/tools/dotc/parsing/package.scala b/compiler/src/dotty/tools/dotc/parsing/package.scala
new file mode 100644
index 000000000..8b113ed96
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/parsing/package.scala
@@ -0,0 +1,33 @@
+package dotty.tools.dotc
+
+import util.Chars._
+import core.Names.Name
+import core.StdNames.nme
+import core.NameOps._
+
+package object parsing {
+
+ def precedence(operator: Name): Int =
+ if (operator eq nme.ERROR) -1
+ else {
+ val firstCh = operator(0)
+ if (isScalaLetter(firstCh)) 1
+ else if (operator.isOpAssignmentName) 0
+ else firstCh match {
+ case '|' => 2
+ case '^' => 3
+ case '&' => 4
+ case '=' | '!' => 5
+ case '<' | '>' => 6
+ case ':' => 7
+ case '+' | '-' => 8
+ case '*' | '/' | '%' => 9
+ case _ => 10
+ }
+ }
+
+ def minPrec = 0
+ def minInfixPrec = 1
+ def maxPrec = 11
+
+}