aboutsummaryrefslogtreecommitdiff
path: root/src/dotty/tools/dotc
diff options
context:
space:
mode:
authorMartin Odersky <odersky@gmail.com>2013-05-09 19:37:15 +0200
committerMartin Odersky <odersky@gmail.com>2013-05-09 19:37:15 +0200
commitcf4c428cc58ed330faa236bf54d06c1fad902c8a (patch)
treee56522e356c8ba7c84e7509c618d545e16ec19a8 /src/dotty/tools/dotc
parentbfa03db8ea8897f51316cd77a7c71b2ca25ba531 (diff)
downloaddotty-cf4c428cc58ed330faa236bf54d06c1fad902c8a.tar.gz
dotty-cf4c428cc58ed330faa236bf54d06c1fad902c8a.tar.bz2
dotty-cf4c428cc58ed330faa236bf54d06c1fad902c8a.zip
Some parser revisions
(1) Added markup parsers (2) Syntax change relating to modifiers and annotations of primary constructor (3) Review of parsing with bug fixes and simplifications.
Diffstat (limited to 'src/dotty/tools/dotc')
-rw-r--r--src/dotty/tools/dotc/core/Trees.scala6
-rw-r--r--src/dotty/tools/dotc/parsing/MarkupParserCommon.scala262
-rw-r--r--src/dotty/tools/dotc/parsing/MarkupParsers.scala460
-rw-r--r--src/dotty/tools/dotc/parsing/Parsers.scala976
-rw-r--r--src/dotty/tools/dotc/parsing/ScriptParsers.scala158
-rw-r--r--src/dotty/tools/dotc/parsing/SymbolicXMLBuilder.scala243
6 files changed, 1459 insertions, 646 deletions
diff --git a/src/dotty/tools/dotc/core/Trees.scala b/src/dotty/tools/dotc/core/Trees.scala
index 3083dc807..96533afb7 100644
--- a/src/dotty/tools/dotc/core/Trees.scala
+++ b/src/dotty/tools/dotc/core/Trees.scala
@@ -88,7 +88,7 @@ object Trees {
* It should have as end the end of the opening keywords(s).
* If there is no opening keyword, point should equal end.
*/
- case class Modifiers[T >: Untyped](
+ case class Modifiers[T >: Untyped] (
flags: FlagSet = EmptyFlags,
privateWithin: TypeName = tpnme.EMPTY,
annotations: List[Tree[T]] = Nil) extends Positioned {
@@ -108,6 +108,10 @@ object Trees {
if (pw.isEmpty) this
else copy(privateWithin = pw)
+ def hasFlags = flags != EmptyFlags
+ def hasAnnotations = annotations.nonEmpty
+ def hasPrivateWithin = privateWithin != tpnme.EMPTY
+
def tokenPos: Seq[(Token, Position)] = ???
}
diff --git a/src/dotty/tools/dotc/parsing/MarkupParserCommon.scala b/src/dotty/tools/dotc/parsing/MarkupParserCommon.scala
new file mode 100644
index 000000000..db2fe569b
--- /dev/null
+++ b/src/dotty/tools/dotc/parsing/MarkupParserCommon.scala
@@ -0,0 +1,262 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+package dotty.tools.dotc
+package parsing
+
+import scala.xml._
+import scala.xml.parsing._
+
+import scala.io.Source
+import scala.xml.dtd._
+import scala.annotation.switch
+import Utility.Escapes.{ pairs => unescape }
+
+import Utility.SU
+
+/** This is not a public trait - it contains common code shared
+ * between the library level XML parser and the compiler's.
+ * All members should be accessed through those.
+ */
+private[dotty] trait MarkupParserCommon extends TokenTests {
+ protected def unreachable = scala.sys.error("Cannot be reached.")
+
+ // type HandleType // MarkupHandler, SymbolicXMLBuilder
+ type InputType // Source, CharArrayReader
+ type PositionType // Int, Position
+ type ElementType // NodeSeq, Tree
+ type NamespaceType // NamespaceBinding, Any
+ type AttributesType // (MetaData, NamespaceBinding), mutable.Map[String, Tree]
+
+ def mkAttributes(name: String, pscope: NamespaceType): AttributesType
+ def mkProcInstr(position: PositionType, name: String, text: String): ElementType
+
+ /** parse a start or empty tag.
+ * [40] STag ::= '<' Name { S Attribute } [S]
+ * [44] EmptyElemTag ::= '<' Name { S Attribute } [S]
+ */
+ protected def xTag(pscope: NamespaceType): (String, AttributesType) = {
+ val name = xName
+ xSpaceOpt
+
+ (name, mkAttributes(name, pscope))
+ }
+
+ /** '<?' ProcInstr ::= Name [S ({Char} - ({Char}'>?' {Char})]'?>'
+ *
+ * see [15]
+ */
+ def xProcInstr: ElementType = {
+ val n = xName
+ xSpaceOpt
+ xTakeUntil(mkProcInstr(_, n, _), () => tmppos, "?>")
+ }
+
+ /** attribute value, terminated by either `'` or `"`. value may not contain `<`.
+ @param endCh either `'` or `"`
+ */
+ def xAttributeValue(endCh: Char): String = {
+ val buf = new StringBuilder
+ while (ch != endCh) {
+ // well-formedness constraint
+ if (ch == '<') return errorAndResult("'<' not allowed in attrib value", "")
+ else if (ch == SU) truncatedError("")
+ else buf append ch_returning_nextch
+ }
+ ch_returning_nextch
+ // @todo: normalize attribute value
+ buf.toString
+ }
+
+ def xAttributeValue(): String = {
+ val str = xAttributeValue(ch_returning_nextch)
+ // well-formedness constraint
+ normalizeAttributeValue(str)
+ }
+
+ private def takeUntilChar(it: Iterator[Char], end: Char): String = {
+ val buf = new StringBuilder
+ while (it.hasNext) it.next match {
+ case `end` => return buf.toString
+ case ch => buf append ch
+ }
+ scala.sys.error("Expected '%s'".format(end))
+ }
+
+ /** [42] '<' xmlEndTag ::= '<' '/' Name S? '>'
+ */
+ def xEndTag(startName: String) {
+ xToken('/')
+ if (xName != startName)
+ errorNoEnd(startName)
+
+ xSpaceOpt
+ xToken('>')
+ }
+
+ /** actually, Name ::= (Letter | '_' | ':') (NameChar)* but starting with ':' cannot happen
+ * Name ::= (Letter | '_') (NameChar)*
+ *
+ * see [5] of XML 1.0 specification
+ *
+ * pre-condition: ch != ':' // assured by definition of XMLSTART token
+ * post-condition: name does neither start, nor end in ':'
+ */
+ def xName: String = {
+ if (ch == SU)
+ truncatedError("")
+ else if (!isNameStart(ch))
+ return errorAndResult("name expected, but char '%s' cannot start a name" format ch, "")
+
+ val buf = new StringBuilder
+
+ do buf append ch_returning_nextch
+ while (isNameChar(ch))
+
+ if (buf.last == ':') {
+ reportSyntaxError( "name cannot end in ':'" )
+ buf.toString dropRight 1
+ }
+ else buf.toString
+ }
+
+ private def attr_unescape(s: String) = s match {
+ case "lt" => "<"
+ case "gt" => ">"
+ case "amp" => "&"
+ case "apos" => "'"
+ case "quot" => "\""
+ case "quote" => "\""
+ case _ => "&" + s + ";"
+ }
+
+ /** Replaces only character references right now.
+ * see spec 3.3.3
+ */
+ private def normalizeAttributeValue(attval: String): String = {
+ val buf = new StringBuilder
+ val it = attval.iterator.buffered
+
+ while (it.hasNext) buf append (it.next match {
+ case ' ' | '\t' | '\n' | '\r' => " "
+ case '&' if it.head == '#' => it.next ; xCharRef(it)
+ case '&' => attr_unescape(takeUntilChar(it, ';'))
+ case c => c
+ })
+
+ buf.toString
+ }
+
+ /** CharRef ::= "&#" '0'..'9' {'0'..'9'} ";"
+ * | "&#x" '0'..'9'|'A'..'F'|'a'..'f' { hexdigit } ";"
+ *
+ * see [66]
+ */
+ def xCharRef(ch: () => Char, nextch: () => Unit): String =
+ Utility.parseCharRef(ch, nextch, reportSyntaxError _, truncatedError _)
+
+ def xCharRef(it: Iterator[Char]): String = {
+ var c = it.next
+ Utility.parseCharRef(() => c, () => { c = it.next }, reportSyntaxError _, truncatedError _)
+ }
+
+ def xCharRef: String = xCharRef(() => ch, () => nextch)
+
+ /** Create a lookahead reader which does not influence the input */
+ def lookahead(): BufferedIterator[Char]
+
+ /** The library and compiler parsers had the interesting distinction of
+ * different behavior for nextch (a function for which there are a total
+ * of two plausible behaviors, so we know the design space was fully
+ * explored.) One of them returned the value of nextch before the increment
+ * and one of them the new value. So to unify code we have to at least
+ * temporarily abstract over the nextchs.
+ */
+ def ch: Char
+ def nextch(): Unit
+ protected def ch_returning_nextch: Char
+ def eof: Boolean
+
+ // def handle: HandleType
+ var tmppos: PositionType
+
+ def xHandleError(that: Char, msg: String): Unit
+ def reportSyntaxError(str: String): Unit
+ def reportSyntaxError(pos: Int, str: String): Unit
+
+ def truncatedError(msg: String): Nothing
+ def errorNoEnd(tag: String): Nothing
+
+ protected def errorAndResult[T](msg: String, x: T): T = {
+ reportSyntaxError(msg)
+ x
+ }
+
+ def xToken(that: Char) {
+ if (ch == that) nextch
+ else xHandleError(that, "'%s' expected instead of '%s'".format(that, ch))
+ }
+ def xToken(that: Seq[Char]) { that foreach xToken }
+
+ /** scan [S] '=' [S]*/
+ def xEQ() = { xSpaceOpt; xToken('='); xSpaceOpt }
+
+ /** skip optional space S? */
+ def xSpaceOpt() = while (isSpace(ch) && !eof) nextch
+
+ /** scan [3] S ::= (#x20 | #x9 | #xD | #xA)+ */
+ def xSpace() =
+ if (isSpace(ch)) { nextch; xSpaceOpt }
+ else xHandleError(ch, "whitespace expected")
+
+ /** Apply a function and return the passed value */
+ def returning[T](x: T)(f: T => Unit): T = { f(x); x }
+
+ /** Execute body with a variable saved and restored after execution */
+ def saving[A, B](getter: A, setter: A => Unit)(body: => B): B = {
+ val saved = getter
+ try body
+ finally setter(saved)
+ }
+
+ /** Take characters from input stream until given String "until"
+ * is seen. Once seen, the accumulated characters are passed
+ * along with the current Position to the supplied handler function.
+ */
+ protected def xTakeUntil[T](
+ handler: (PositionType, String) => T,
+ positioner: () => PositionType,
+ until: String): T =
+ {
+ val sb = new StringBuilder
+ val head = until.head
+ val rest = until.tail
+
+ while (true) {
+ if (ch == head && peek(rest))
+ return handler(positioner(), sb.toString)
+ else if (ch == SU)
+ truncatedError("") // throws TruncatedXMLControl in compiler
+
+ sb append ch
+ nextch
+ }
+ unreachable
+ }
+
+ /** Create a non-destructive lookahead reader and see if the head
+ * of the input would match the given String. If yes, return true
+ * and drop the entire String from input; if no, return false
+ * and leave input unchanged.
+ */
+ private def peek(lookingFor: String): Boolean =
+ (lookahead() take lookingFor.length sameElements lookingFor.iterator) && {
+ // drop the chars from the real reader (all lookahead + orig)
+ (0 to lookingFor.length) foreach (_ => nextch)
+ true
+ }
+}
diff --git a/src/dotty/tools/dotc/parsing/MarkupParsers.scala b/src/dotty/tools/dotc/parsing/MarkupParsers.scala
new file mode 100644
index 000000000..1422c83b3
--- /dev/null
+++ b/src/dotty/tools/dotc/parsing/MarkupParsers.scala
@@ -0,0 +1,460 @@
+package dotty.tools
+package dotc
+package parsing
+
+import scala.collection.mutable
+import mutable.{ Buffer, ArrayBuffer, ListBuffer }
+import scala.util.control.ControlThrowable
+import util.SourceFile
+import scala.xml.{ Text, TextBuffer }
+import scala.xml.parsing.MarkupParserCommon
+import scala.xml.Utility.{ isNameStart, isNameChar, isSpace }
+import scala.reflect.internal.Chars.{ SU, LF }
+import Parsers._
+import util.Positions._
+import core._
+import Trees._
+import Constants._
+
+
+// XXX/Note: many/most of the functions in here are almost direct cut and pastes
+// from another file - scala.xml.parsing.MarkupParser, it looks like.
+// (It was like that when I got here.) They used to be commented "[Duplicate]" butx
+// since approximately all of them were, I snipped it as noise. As far as I can
+// tell this wasn't for any particularly good reason, but slightly different
+// compiler and library parser interfaces meant it would take some setup.
+//
+// I rewrote most of these, but not as yet the library versions: so if you are
+// tempted to touch any of these, please be aware of that situation and try not
+// to let it get any worse. -- paulp
+
+/** This trait ...
+ *
+ * @author Burak Emir
+ * @version 1.0
+ */
+object MarkupParsers {
+
+ import UntypedTrees.{untpd, ugen}
+ import untpd._
+
+ case object MissingEndTagControl extends ControlThrowable {
+ override def getMessage = "start tag was here: "
+ }
+
+ case object ConfusedAboutBracesControl extends ControlThrowable {
+ override def getMessage = " I encountered a '}' where I didn't expect one, maybe this tag isn't closed <"
+ }
+
+ case object TruncatedXMLControl extends ControlThrowable {
+ override def getMessage = "input ended while parsing XML"
+ }
+
+ class MarkupParser(parser: Parser, final val preserveWS: Boolean) extends MarkupParserCommon {
+
+ import Tokens.{ EMPTY, LBRACE, RBRACE }
+
+ type PositionType = Position
+ type InputType = CharArrayReader
+ type ElementType = Tree
+ type AttributesType = mutable.Map[String, Tree]
+ type NamespaceType = Any // namespaces ignored
+
+ def mkAttributes(name: String, other: NamespaceType): AttributesType = xAttributes
+
+ val eof = false
+
+ def truncatedError(msg: String): Nothing = throw TruncatedXMLControl
+ def xHandleError(that: Char, msg: String) =
+ if (ch == SU) throw TruncatedXMLControl
+ else reportSyntaxError(msg)
+
+ var input : CharArrayReader = _
+ def lookahead(): BufferedIterator[Char] =
+ (input.buf drop input.charOffset).iterator.buffered
+
+ import parser.{ symbXMLBuilder => handle }
+
+ def curOffset : Int = input.charOffset - 1
+ var tmppos : Position = NoPosition
+ def ch = input.ch
+ /** this method assign the next character to ch and advances in input */
+ def nextch() { input.nextChar() }
+
+ protected def ch_returning_nextch: Char = {
+ val result = ch; input.nextChar(); result
+ }
+
+ def mkProcInstr(position: Position, name: String, text: String): ElementType =
+ parser.symbXMLBuilder.procInstr(position, name, text)
+
+ var xEmbeddedBlock = false
+
+ private var debugLastStartElement = new mutable.Stack[(Int, String)]
+ private def debugLastPos = debugLastStartElement.top._1
+ private def debugLastElem = debugLastStartElement.top._2
+
+ private def errorBraces() = {
+ reportSyntaxError("in XML content, please use '}}' to express '}'")
+ throw ConfusedAboutBracesControl
+ }
+ def errorNoEnd(tag: String) = {
+ reportSyntaxError("expected closing tag of " + tag)
+ throw MissingEndTagControl
+ }
+
+ /** checks whether next character starts a Scala block, if yes, skip it.
+ * @return true if next character starts a scala block
+ */
+ def xCheckEmbeddedBlock: Boolean = {
+ // attentions, side-effect, used in xText
+ xEmbeddedBlock = (ch == '{') && { nextch; (ch != '{') }
+ xEmbeddedBlock
+ }
+
+ /** parse attribute and add it to listmap
+ * [41] Attributes ::= { S Name Eq AttValue }
+ * AttValue ::= `'` { _ } `'`
+ * | `"` { _ } `"`
+ * | `{` scalablock `}`
+ */
+ def xAttributes = {
+ val aMap = mutable.LinkedHashMap[String, Tree]()
+
+ while (isNameStart(ch)) {
+ val start = curOffset
+ val key = xName
+ xEQ
+ val delim = ch
+ val mid = curOffset
+ val value: Tree = ch match {
+ case '"' | '\'' =>
+ val tmp = xAttributeValue(ch_returning_nextch)
+
+ try handle.parseAttribute(Position(start, curOffset, mid), tmp)
+ catch {
+ case e: RuntimeException =>
+ errorAndResult("error parsing attribute value", parser.errorTermTree)
+ }
+
+ case '{' =>
+ nextch
+ xEmbeddedExpr
+ case SU =>
+ throw TruncatedXMLControl
+ case _ =>
+ errorAndResult("' or \" delimited attribute value or '{' scala-expr '}' expected", Literal(Constant("<syntax-error>")))
+ }
+ // well-formedness constraint: unique attribute names
+ if (aMap contains key)
+ reportSyntaxError("attribute %s may only be defined once" format key)
+
+ aMap(key) = value
+ if (ch != '/' && ch != '>')
+ xSpace
+ }
+ aMap
+ }
+
+ /** '<! CharData ::= [CDATA[ ( {char} - {char}"]]>"{char} ) ']]>'
+ *
+ * see [15]
+ */
+ def xCharData: Tree = {
+ val start = curOffset
+ xToken("[CDATA[")
+ val mid = curOffset
+ xTakeUntil(handle.charData, () => Position(start, curOffset, mid), "]]>")
+ }
+
+ def xUnparsed: Tree = {
+ val start = curOffset
+ xTakeUntil(handle.unparsed, () => Position(start, curOffset, start), "</xml:unparsed>")
+ }
+
+ /** Comment ::= '<!--' ((Char - '-') | ('-' (Char - '-')))* '-->'
+ *
+ * see [15]
+ */
+ def xComment: Tree = {
+ val start = curOffset - 2 // Rewinding to include "<!"
+ xToken("--")
+ xTakeUntil(handle.comment, () => Position(start, curOffset, start), "-->")
+ }
+
+ def appendText(pos: Position, ts: Buffer[Tree], txt: String) {
+ val toAppend =
+ if (preserveWS) Seq(txt)
+ else TextBuffer.fromString(txt).toText map (_.text)
+
+ toAppend foreach (t => ts append handle.text(pos, t))
+ }
+
+ /** adds entity/character to ts as side-effect
+ * @precond ch == '&'
+ */
+ def content_AMP(ts: ArrayBuffer[Tree]) {
+ nextch
+ val toAppend = ch match {
+ case '#' => // CharacterRef
+ nextch
+ val theChar = handle.text(tmppos, xCharRef)
+ xToken(';')
+ theChar
+ case _ => // EntityRef
+ val n = xName
+ xToken(';')
+ handle.entityRef(tmppos, n)
+ }
+
+ ts append toAppend
+ }
+
+ /**
+ * @precond ch == '{'
+ * @postcond: xEmbeddedBlock == false!
+ */
+ def content_BRACE(p: Position, ts: ArrayBuffer[Tree]): Unit =
+ if (xCheckEmbeddedBlock) ts append xEmbeddedExpr
+ else appendText(p, ts, xText)
+
+ /** Returns true if it encounters an end tag (without consuming it),
+ * appends trees to ts as side-effect.
+ *
+ * @param ts ...
+ * @return ...
+ */
+ private def content_LT(ts: ArrayBuffer[Tree]): Boolean = {
+ if (ch == '/')
+ return true // end tag
+
+ val toAppend = ch match {
+ case '!' => nextch ; if (ch =='[') xCharData else xComment // CDATA or Comment
+ case '?' => nextch ; xProcInstr // PI
+ case _ => element // child node
+ }
+
+ ts append toAppend
+ false
+ }
+
+ def content: Buffer[Tree] = {
+ val ts = new ArrayBuffer[Tree]
+ while (true) {
+ if (xEmbeddedBlock)
+ ts append xEmbeddedExpr
+ else {
+ tmppos = Position(curOffset)
+ ch match {
+ // end tag, cdata, comment, pi or child node
+ case '<' => nextch ; if (content_LT(ts)) return ts
+ // either the character '{' or an embedded scala block }
+ case '{' => content_BRACE(tmppos, ts) // }
+ // EntityRef or CharRef
+ case '&' => content_AMP(ts)
+ case SU => return ts
+ // text content - here xEmbeddedBlock might be true
+ case _ => appendText(tmppos, ts, xText)
+ }
+ }
+ }
+ unreachable
+ }
+
+ /** '<' element ::= xmlTag1 '>' { xmlExpr | '{' simpleExpr '}' } ETag
+ * | xmlTag1 '/' '>'
+ */
+ def element: Tree = {
+ val start = curOffset
+ val (qname, attrMap) = xTag(())
+ if (ch == '/') { // empty element
+ xToken("/>")
+ handle.element(Position(start, curOffset, start), qname, attrMap, true, new ListBuffer[Tree])
+ }
+ else { // handle content
+ xToken('>')
+ if (qname == "xml:unparsed")
+ return xUnparsed
+
+ debugLastStartElement.push((start, qname))
+ val ts = content
+ xEndTag(qname)
+ debugLastStartElement.pop
+ val pos = Position(start, curOffset, start)
+ qname match {
+ case "xml:group" => handle.group(pos, ts)
+ case _ => handle.element(pos, qname, attrMap, false, ts)
+ }
+ }
+ }
+
+ /** parse character data.
+ * precondition: xEmbeddedBlock == false (we are not in a scala block)
+ */
+ private def xText: String = {
+ assert(!xEmbeddedBlock, "internal error: encountered embedded block")
+ val buf = new StringBuilder
+ def done = buf.toString
+
+ while (ch != SU) {
+ if (ch == '}') {
+ if (charComingAfter(nextch) == '}') nextch
+ else errorBraces()
+ }
+
+ buf append ch
+ nextch
+ if (xCheckEmbeddedBlock || ch == '<' || ch == '&')
+ return done
+ }
+ done
+ }
+
+ /** Some try/catch/finally logic used by xLiteral and xLiteralPattern. */
+ private def xLiteralCommon(f: () => Tree, ifTruncated: String => Unit): Tree = {
+ try return f()
+ catch {
+ case c @ TruncatedXMLControl =>
+ ifTruncated(c.getMessage)
+ case c @ (MissingEndTagControl | ConfusedAboutBracesControl) =>
+ parser.syntaxError(c.getMessage + debugLastElem + ">", debugLastPos)
+ case _: ArrayIndexOutOfBoundsException =>
+ parser.syntaxError("missing end tag in XML literal for <%s>" format debugLastElem, debugLastPos)
+ }
+ finally parser.in resume Tokens.XMLSTART
+
+ parser.errorTermTree
+ }
+
+ /** Use a lookahead parser to run speculative body, and return the first char afterward. */
+ private def charComingAfter(body: => Unit): Char = {
+ try {
+ input = input.lookaheadReader
+ body
+ ch
+ }
+ finally input = parser.in
+ }
+
+ /** xLiteral = element { element }
+ * @return Scala representation of this xml literal
+ */
+ def xLiteral: Tree = xLiteralCommon(
+ () => {
+ input = parser.in
+ handle.isPattern = false
+
+ val ts = new ArrayBuffer[Tree]
+ val start = curOffset
+ tmppos = Position(curOffset) // Iuli: added this line, as it seems content_LT uses tmppos when creating trees
+ content_LT(ts)
+
+ // parse more XML ?
+ if (charComingAfter(xSpaceOpt) == '<') {
+ xSpaceOpt
+ while (ch == '<') {
+ nextch
+ ts append element
+ xSpaceOpt
+ }
+ handle.makeXMLseq(Position(start, curOffset, start), ts)
+ }
+ else {
+ assert(ts.length == 1)
+ ts(0)
+ }
+ },
+ msg => parser.incompleteInputError(msg)
+ )
+
+ /** @see xmlPattern. resynchronizes after successful parse
+ * @return this xml pattern
+ */
+ def xLiteralPattern: Tree = xLiteralCommon(
+ () => {
+ input = parser.in
+ saving[Boolean, Tree](handle.isPattern, handle.isPattern = _) {
+ handle.isPattern = true
+ val tree = xPattern
+ xSpaceOpt
+ tree
+ }
+ },
+ msg => parser.syntaxError(msg, curOffset)
+ )
+
+ def escapeToScala[A](op: => A, kind: String) = {
+ xEmbeddedBlock = false
+ val res = saving[List[Int], A](parser.in.sepRegions, parser.in.sepRegions = _) {
+ parser.in resume LBRACE
+ op
+ }
+ if (parser.in.token != RBRACE)
+ reportSyntaxError(" expected end of Scala "+kind)
+
+ res
+ }
+
+ def xEmbeddedExpr: Tree = escapeToScala(parser.block(), "block")
+
+ /** xScalaPatterns ::= patterns
+ */
+ def xScalaPatterns: List[Tree] = escapeToScala(parser.patterns(), "pattern")
+
+ def reportSyntaxError(pos: Int, str: String) = parser.syntaxError(str, pos)
+ def reportSyntaxError(str: String) {
+ reportSyntaxError(curOffset, "in XML literal: " + str)
+ nextch()
+ }
+
+ /** '<' xPattern ::= Name [S] { xmlPattern | '{' pattern3 '}' } ETag
+ * | Name [S] '/' '>'
+ */
+ def xPattern: Tree = {
+ var start = curOffset
+ val qname = xName
+ debugLastStartElement.push((start, qname))
+ xSpaceOpt
+
+ val ts = new ArrayBuffer[Tree]
+ val isEmptyTag = (ch == '/') && { nextch ; true }
+ xToken('>')
+
+ if (!isEmptyTag) {
+ // recurses until it hits a termination condition, then returns
+ def doPattern: Boolean = {
+ val start1 = curOffset
+ if (xEmbeddedBlock) ts ++= xScalaPatterns
+ else ch match {
+ case '<' => // tag
+ nextch
+ if (ch != '/') ts append xPattern // child
+ else return false // terminate
+
+ case '{' => // embedded Scala patterns
+ while (ch == '{') {
+ nextch
+ ts ++= xScalaPatterns
+ }
+ assert(!xEmbeddedBlock, "problem with embedded block")
+
+ case SU =>
+ throw TruncatedXMLControl
+
+ case _ => // text
+ appendText(Position(start1, curOffset, start1), ts, xText)
+ // here xEmbeddedBlock might be true:
+ // if (xEmbeddedBlock) throw new ApplicationError("after:"+text); // assert
+ }
+ true
+ }
+
+ while (doPattern) { } // call until false
+ xEndTag(qname)
+ debugLastStartElement.pop
+ }
+
+ handle.makeXMLpat(Position(start, curOffset, start), qname, ts)
+ }
+ } /* class MarkupParser */
+}
diff --git a/src/dotty/tools/dotc/parsing/Parsers.scala b/src/dotty/tools/dotc/parsing/Parsers.scala
index 12a828300..d88d9afab 100644
--- a/src/dotty/tools/dotc/parsing/Parsers.scala
+++ b/src/dotty/tools/dotc/parsing/Parsers.scala
@@ -7,6 +7,7 @@ import scala.collection.immutable.BitSet
import util.{ SourceFile, FreshNameCreator }
import Tokens._
import Scanners._
+import MarkupParsers._
import core._
import Flags._
import Contexts._
@@ -20,44 +21,9 @@ import Types._
import Constants._
import NameOps._
import scala.reflect.internal.Chars._
+import ScriptParsers._
import annotation.switch
-
-/** <p>Performs the following context-free rewritings:</p>
- * <ol>
- * <li>
- * Places all pattern variables in Bind nodes. In a pattern, for
- * identifiers <code>x</code>:<pre>
- * x => x @ _
- * x:T => x @ (_ : T)</pre>
- * </li>
- * <li>Removes pattern definitions (PatDef's) as follows:
- * If pattern is a simple (typed) identifier:<pre>
- * <b>val</b> x = e ==> <b>val</b> x = e
- * <b>val</b> x: T = e ==> <b>val</b> x: T = e</pre>
- *
- * if there are no variables in pattern<pre>
- * <b>val</b> p = e ==> e match (case p => ())</pre>
- *
- * if there is exactly one variable in pattern<pre>
- * <b>val</b> x_1 = e <b>match</b> (case p => (x_1))</pre>
- *
- * if there is more than one variable in pattern<pre>
- * <b>val</b> p = e ==> <b>private synthetic val</b> t$ = e <b>match</b> (case p => (x_1, ..., x_N))
- * <b>val</b> x_1 = t$._1
- * ...
- * <b>val</b> x_N = t$._N</pre>
- * </li>
- * <li>
- * Removes function types as follows:<pre>
- * (argtpes) => restpe ==> scala.Function_n[argtpes, restpe]</pre>
- * </li>
- * <li>
- * Wraps naked case definitions in a match as follows:<pre>
- * { cases } ==> (x => x.match {cases})<span style="font-family:normal;">, except when already argument to match</span></pre>
- * </li>
- * </ol>
- */
object Parsers {
import UntypedTrees.{untpd, ugen}
@@ -73,147 +39,56 @@ object Parsers {
def nonePositive: Boolean = parCounts forall (_ <= 0)
}
+ object Location extends Enumeration {
+ val InParens, InBlock, ElseWhere = Value
+ }
- class Parser(val source: SourceFile)(implicit ctx: Context) extends DotClass {
-
-// def this(unit: CompilationUnit) = this(unit, List())
-
- val in = new Scanner(source)
+ object ParamOwner extends Enumeration {
+ val Class, Type, TypeParam, Def = Value
+ }
- /** The parse starting point depends on whether the source file is self-contained:
- * if not, the AST will be supplemented.
- */
- def parseStartRule =
- if (source.isSelfContained) () => compilationUnit()
- else () => scriptBody()
+ /** The parse starting point depends on whether the source file is self-contained:
+ * if not, the AST will be supplemented.
+ */
+ def parser(source: SourceFile)(implicit ctx: Context) =
+ if (source.isSelfContained) new ScriptParser(source)
+ else new Parser(source)
- def sourcePos(off: Int = in.offset): SourcePosition =
- source atPos Position(off)
+ class Parser(val source: SourceFile)(implicit ctx: Context) extends DotClass {
- /** the markup parser
- lazy val xmlp = new MarkupParser(this, true)
+ val in = new Scanner(source)
- object symbXMLBuilder extends SymbolicXMLBuilder(this, true) { // DEBUG choices
- val global: self.global.type = self.global
- def freshName(prefix: String): Name = SourceFileParser.this.freshName(prefix)
- }
- */
val openParens = new ParensCounters
- def xmlLiteral() : Tree = ??? // xmlp.xLiteral
- def xmlLiteralPattern() : Tree = ??? // xmlp.xLiteralPattern
-
/** This is the general parse entry point.
+ * Overridden by ScriptParser
*/
def parse(): Tree = {
- val t = parseStartRule()
+ val t = compilationUnit()
accept(EOF)
t
}
- /** This is the parse entry point for code which is not self-contained, e.g.
- * a script which is a series of template statements. They will be
- * swaddled in Trees until the AST is equivalent to the one returned
- * by compilationUnit().
- */
- def scriptBody(): Tree = unsupported("scriptBody")
- /* TODO: reinstantiate
- val stmts = templateStatSeq(false)._2
- accept(EOF)
-
- def mainModuleName = ctx.settings.script.value
-
- /** If there is only a single object template in the file and it has a
- * suitable main method, we will use it rather than building another object
- * around it. Since objects are loaded lazily the whole script would have
- * been a no-op, so we're not taking much liberty.
- */
- def searchForMain(): Option[Tree] = {
- /** Have to be fairly liberal about what constitutes a main method since
- * nothing has been typed yet - for instance we can't assume the parameter
- * type will look exactly like "Array[String]" as it could have been renamed
- * via import, etc.
- */
- def isMainMethod(t: Tree) = t match {
- case DefDef(_, nme.main, Nil, List(_), _, _) => true
- case _ => false
- }
- /** For now we require there only be one top level object. */
- var seenModule = false
- val newStmts = stmts collect {
- case t @ Import(_, _) => t
- case md @ ModuleDef(mods, name, template)
- if !seenModule && (template.body exists isMainMethod) =>
- seenModule = true
- /** This slightly hacky situation arises because we have no way to communicate
- * back to the scriptrunner what the name of the program is. Even if we were
- * willing to take the sketchy route of settings.script.value = progName, that
- * does not work when using fsc. And to find out in advance would impose a
- * whole additional parse. So instead, if the actual object's name differs from
- * what the script is expecting, we transform it to match.
- */
- md.derivedModuleDef(mods, mainModuleName.toTermName, template)
- case _ =>
- /** If we see anything but the above, fail. */
- return None
- }
- Some(makePackaging(0, emptyPkg, newStmts))
- }
-
- if (mainModuleName == ScriptRunner.defaultScriptMain)
- searchForMain() foreach { return _ }
-
- /** Here we are building an AST representing the following source fiction,
- * where <moduleName> is from -Xscript (defaults to "Main") and <stmts> are
- * the result of parsing the script file.
- *
- * object <moduleName> {
- * def main(argv: Array[String]): Unit = {
- * val args = argv
- * new AnyRef {
- * <stmts>
- * }
- * }
- * }
- */
- import definitions._
-
- def emptyPkg = atPos(0, 0, 0) { Ident(nme.EMPTY_PACKAGE_NAME) }
- def emptyInit = DefDef(
- Modifiers(),
- nme.CONSTRUCTOR,
- Nil,
- List(Nil),
- TypeTree(),
- Block(List(Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), Nil)), Literal(Constant(())))
- )
-
- // def main
- def mainParamType = AppliedTypeTree(Ident(tpnme.Array), List(Ident(tpnme.String)))
- def mainParameter = List(ValDef(Modifiers(Param), "argv", mainParamType, EmptyTree))
- def mainSetArgv = List(ValDef(Modifiers(), "args", TypeTree(), Ident("argv")))
- def mainNew = makeNew(Nil, emptyValDef, stmts, List(Nil), NoPosition, NoPosition)
- def mainDef = DefDef(Modifiers(), nme.main, Nil, List(mainParameter), scalaDot(tpnme.Unit), Block(mainSetArgv, mainNew))
-
- // object Main
- def moduleName = ScriptRunner scriptMain settings
- def moduleBody = Template(List(scalaScalaObjectConstr), emptyValDef, List(emptyInit, mainDef))
- def moduleDef = ModuleDef(Modifiers(), moduleName, moduleBody)
+/* -------------- TOKEN CLASSES ------------------------------------------- */
- // package <empty> { ... }
- makePackaging(0, emptyPkg, List(moduleDef))
- }*/
+ def isIdent = in.token == IDENTIFIER || in.token == BACKQUOTED_IDENT
+ def isIdent(name: Name) = in.token == IDENTIFIER && in.name == name
+ def isLiteral = literalTokens contains in.token
+ def isNumericLit = numericLitTokens contains in.token
+ def isModifier = modifierTokens contains in.token
+ def isExprIntro = canStartExpressionTokens contains in.token
+ def isTemplateIntro = templateIntroTokens contains in.token
+ def isDclIntro = dclIntroTokens contains in.token
+ def isStatSeqEnd = in.token == RBRACE || in.token == EOF
+ def mustStartStat = mustStartStatTokens contains in.token
- /* --------------- PLACEHOLDERS ------------------------------------------- */
+ def isDefIntro(allowedMods: BitSet) =
+ in.token == AT || (allowedMods contains in.token) || (defIntroTokens contains in.token)
- def isWildcard(t: Tree): Boolean = t match {
- case Ident(nme.WILDCARD) => true
- case Typed(t1, _) => isWildcard(t1)
- case Annotated(t1, _) => isWildcard(t1)
- case _ => false
- }
+ def isStatSep: Boolean =
+ in.token == NEWLINE || in.token == NEWLINES || in.token == SEMI
- /* ------------- POSITIONS ------------------------------------------- */
+/* ------------- POSITIONS ------------------------------------------- */
def atPos[T <: Positioned](start: Offset, point: Offset, end: Offset)(t: T): T =
atPos(Position(start, end, point))(t)
@@ -227,30 +102,26 @@ object Parsers {
def atPos[T <: Positioned](pos: Position)(t: T): T =
if (t.pos.isSourceDerived) t else t.withPos(pos)
- def here[T <: Positioned]: T => T = {
- val start = in.offset
- t => atPos(start, start)(t)
- }
-
def tokenRange = Position(in.offset, in.lastCharOffset)
-/* ------------- ERROR HANDLING ------------------------------------------- */
+ def sourcePos(off: Int = in.offset): SourcePosition =
+ source atPos Position(off)
- private var inFunReturnType = false
- private def fromWithinReturnType[T](body: => T): T = {
- val saved = inFunReturnType
- try {
- inFunReturnType = true
- body
- } finally inFunReturnType = saved
- }
+/* ------------- ERROR HANDLING ------------------------------------------- */
- private var lastDefOffset = -1
+ /** The offset of the last time when a statement on a new line was definitely
+ * encountered in the current scope or an outer scope\
+ */
+ private var lastStatOffset = -1
- def setLastDefOffset() =
- if ((mustStartStatTokens contains in.token) && in.isAfterLineEnd)
- lastDefOffset = in.offset
+ def setLastStatOffset() =
+ if (mustStartStat && in.isAfterLineEnd)
+ lastStatOffset = in.offset
+ /** Is offset1 less or equally indented than offset2?
+ * This is the case if the characters between the preceding end-of-line and offset1
+ * are a prefix of the characters between the preceding end-of-line and offset2.
+ */
def isLeqIndented(offset1: Int, offset2: Int): Boolean = {
def lineStart(off: Int): Int =
if (off > 0 && source(off) != LF) lineStart(off - 1)
@@ -261,7 +132,16 @@ object Parsers {
recur(lineStart(offset1), lineStart(offset2))
}
- protected def skip(targetToken: Int = EOF) {
+ /** Skip on error to next safe point.
+ * Safe points are:
+ * - Closing braces, provided they match an opening brace before the error point.
+ * - Closing parens and brackets, provided they match an opening parent or bracket
+ * before the error point and there are no intervening other kinds of parens.
+ * - Semicolons and newlines, provided there are no intervening braces.
+ * - Definite statement starts on new lines, provided they are not more indented
+ * than the last known statement start before the error point.
+ */
+ protected def skip() {
val skippedParens = new ParensCounters
while (true) {
(in.token: @switch) match {
@@ -288,13 +168,11 @@ object Parsers {
case LBRACKET=>
skippedParens.change(LBRACKET, +1)
case _ =>
+ if (mustStartStat &&
+ in.isAfterLineEnd() &&
+ isLeqIndented(in.offset, lastStatOffset))
+ return
}
- if ( (mustStartStatTokens contains in.token) &&
- in.isAfterLineEnd() &&
- isLeqIndented(in.offset, lastDefOffset)
- || targetToken == in.token &&
- skippedParens.nonePositive)
- return
in.nextToken()
}
}
@@ -305,26 +183,38 @@ object Parsers {
def deprecationWarning(msg: String, offset: Int = in.offset) =
ctx.deprecationWarning(msg, source atPos Position(offset))
- /** whether a non-continuable syntax error has been seen */
+ /** The offset where the last syntax error was reported, or if a skip to a
+ * safepoint occurred afterwards, the offset of the safe point.
+ */
private var lastErrorOffset : Int = -1
+ /** Issue an error at given offset if beyond last error offset
+ * and update lastErrorOffset.
+ */
def syntaxError(msg: String, offset: Int = in.offset): Unit =
if (offset > lastErrorOffset) {
syntaxError(msg, Position(offset))
lastErrorOffset = in.offset
}
+ /** Unconditionally issue an error at given position, without
+ * updating lastErrorOffset.
+ */
def syntaxError(msg: String, pos: Position): Unit =
ctx.error(msg, source atPos pos)
+ /** Issue an error at current offset taht input is incomplete */
def incompleteInputError(msg: String) =
ctx.reporter.incompleteInputError(msg, source atPos Position(in.offset))
+ /** If at end of file, issue an incompleteInputError.
+ * Otherwise issue a syntax error and skip to next safe point.
+ */
def syntaxErrorOrIncomplete(msg: String) =
if (in.token == EOF) incompleteInputError(msg)
- else { syntaxError(msg); skip() }
+ else { syntaxError(msg); skip(); lastErrorOffset = in.offset }
- def expectedMsg(token: Int): String =
+ private def expectedMsg(token: Int): String =
showToken(token) + " expected but " + showToken(in.token) + " found."
/** Consume one token of the specified type, or
@@ -347,33 +237,19 @@ object Parsers {
case _ => accept(SEMI)
}
- def acceptStatSepOpt(altEnd: Token = EOF) =
- if (!isStatSeqEnd) acceptStatSep()
-
- def errorTermTree = here(Literal(Constant(null)))
-
- /** Check that type parameter is not by name or repeated */
- def checkNotByNameArgs(tpt: Tree) =
- if (TreeInfo.isByNameParamType(tpt))
- syntaxError("no by-name parameter type allowed here", tpt.pos)
-
-/* -------------- TOKEN CLASSES ------------------------------------------- */
-
- def isUnaryOp = isIdent && nme.raw.isUnary(in.name)
- def isRawStar = isIdent && in.name == nme.raw.STAR
- def isRawBar = isIdent && in.name == nme.raw.BAR
- def isIdent = in.token == IDENTIFIER || in.token == BACKQUOTED_IDENT
- def isIdent(name: Name) = in.token == IDENTIFIER && in.name == name
- def isLiteral = literalTokens contains in.token
- def isExprIntro = canStartExpressionTokens contains in.token
- def isTypeIntro: Boolean = canStartTypeTokens contains in.token
- def isStatSeqEnd = in.token == RBRACE || in.token == EOF
+ def acceptStatSepUnlessAtEnd(altEnd: Token = EOF) =
+ if (!isStatSeqEnd && in.token != altEnd) acceptStatSep()
- def isDefIntro(allowedMods: BitSet) =
- in.token == AT || (allowedMods contains in.token) || (defIntroTokens contains in.token)
+ def errorTermTree = atPos(in.offset) { Literal(Constant(null)) }
- def isStatSep: Boolean =
- in.token == NEWLINE || in.token == NEWLINES || in.token == SEMI
+ private var inFunReturnType = false
+ private def fromWithinReturnType[T](body: => T): T = {
+ val saved = inFunReturnType
+ try {
+ inFunReturnType = true
+ body
+ } finally inFunReturnType = saved
+ }
/* ---------- TREE CONSTRUCTION ------------------------------------------- */
@@ -387,13 +263,13 @@ object Parsers {
/** Convert tree to formal parameter
*/
- def convertToParam(tree: Tree, mods: Modifiers = Modifiers()): ValDef = tree match {
+ def convertToParam(tree: Tree, mods: Modifiers = Modifiers(), expected: String = "formal parameter"): ValDef = tree match {
case Ident(name) =>
Parameter(name.asTermName, TypeTree(), mods) withPos tree.pos
case Typed(Ident(name), tpt) if tpt.isType =>
Parameter(name.asTermName, tpt, mods) withPos tree.pos
case _ =>
- syntaxError("not a legal formal parameter", tree.pos)
+ syntaxError(s"not a legal $expected", tree.pos)
Parameter(nme.ERROR, tree, mods)
}
@@ -409,13 +285,54 @@ object Parsers {
tree
}
-/* --------- OPERAND/OPERATOR STACK --------------------------------------- */
+/* -------------- XML ---------------------------------------------------- */
+
+ /** the markup parser */
+ lazy val xmlp = new MarkupParser(this, true)
- object Location extends Enumeration {
- val InParens, InBlock, ElseWhere = Value
+ object symbXMLBuilder extends SymbolicXMLBuilder(this, true) // DEBUG choices
+
+ def xmlLiteral() : Tree = xmlp.xLiteral
+ def xmlLiteralPattern() : Tree = xmlp.xLiteralPattern
+
+/* -------- COMBINATORS -------------------------------------------------------- */
+
+ def enclosed[T](tok: Token, body: => T): T = {
+ accept(tok)
+ openParens.change(tok, 1)
+ try body
+ finally {
+ accept(tok + 1)
+ openParens.change(tok, -1)
+ }
+ }
+
+ def inParens[T](body: => T): T = enclosed(LPAREN, body)
+ def inBraces[T](body: => T): T = enclosed(LBRACE, body)
+ def inBrackets[T](body: => T): T = enclosed(LBRACKET, body)
+
+ def inDefScopeBraces[T](body: => T): T = {
+ val saved = lastStatOffset
+ try inBraces(body)
+ finally lastStatOffset = saved
+ }
+
+ /** part { `separator` part }
+ */
+ def tokenSeparated[T](separator: Int, part: () => T): List[T] = {
+ val ts = new ListBuffer[T]
+ while (in.token == separator) {
+ in.nextToken()
+ ts += part()
+ }
+ ts.toList
}
- var opstack: List[OpInfo] = Nil
+ def commaSeparated[T](part: () => T): List[T] = tokenSeparated(COMMA, part)
+
+/* --------- OPERAND/OPERATOR STACK --------------------------------------- */
+
+ var opStack: List[OpInfo] = Nil
def precedence(operator: Name): Int =
if (operator eq nme.ERROR) -1
@@ -436,98 +353,67 @@ object Parsers {
}
}
- def checkSize(kind: String, size: Int, max: Int) {
- if (size > max) syntaxError("too many "+kind+", maximum = "+max)
- }
-
def checkAssoc(offset: Int, op: Name, leftAssoc: Boolean) =
if (TreeInfo.isLeftAssoc(op) != leftAssoc)
syntaxError(
"left- and right-associative operators with same precedence may not be mixed", offset)
- def reduceStack(base: List[OpInfo], top0: Tree, prec: Int, leftAssoc: Boolean): Tree = {
- var top = top0
- if (opstack != base && precedence(opstack.head.operator) == prec)
- checkAssoc(opstack.head.offset, opstack.head.operator, leftAssoc)
- while (opstack != base &&
- (prec < precedence(opstack.head.operator) ||
- leftAssoc && prec == precedence(opstack.head.operator))) {
- val opinfo = opstack.head
- opstack = opstack.tail
- val opPos = Position(opinfo.offset, opinfo.offset + opinfo.operator.length)
- val lPos = opinfo.operand.pos
- val start = if (lPos.exists) lPos.start else opPos.start
- val rPos = top.pos
- val end = if (rPos.exists) rPos.end else opPos.end
- top = atPos(start, opinfo.offset, end) {
- InfixOp(opinfo.operand, opinfo.operator, top)
+ def reduceStack(base: List[OpInfo], top: Tree, prec: Int, leftAssoc: Boolean): Tree = {
+ if (opStack != base && precedence(opStack.head.operator) == prec)
+ checkAssoc(opStack.head.offset, opStack.head.operator, leftAssoc)
+ def recur(top: Tree): Tree = {
+ if (opStack == base) top
+ else {
+ val opInfo = opStack.head
+ val opPrec = precedence(opInfo.operator)
+ if (prec < opPrec || leftAssoc && prec == opPrec) {
+ opStack = opStack.tail
+ recur {
+ val opPos = Position(opInfo.offset, opInfo.offset + opInfo.operator.length)
+ atPos(opPos union opInfo.operand.pos union top.pos) {
+ InfixOp(opInfo.operand, opInfo.operator, top)
+ }
+ }
+ }
+ else top
}
}
- top
+ recur(top)
}
+ /** operand { infixop operand} [postfixop],
+ * respecting rules of associativity and precedence.
+ * @param notAnOperator a token that does not count as operator.
+ * @param maybePostfix postfix operators are allowed.
+ */
def infixOps(
- first: Tree, canContinue: Token => Boolean, continue: () => Tree,
- noOp: Name = nme.EMPTY,
+ first: Tree, canStartOperand: Token => Boolean, operand: () => Tree,
+ notAnOperator: Name = nme.EMPTY,
maybePostfix: Boolean = false): Tree = {
- val base = opstack
+ val base = opStack
var top = first
- while (isIdent && in.name != noOp) {
+ while (isIdent && in.name != notAnOperator) {
val op = in.name
top = reduceStack(base, top, precedence(op), TreeInfo.isLeftAssoc(op))
- opstack = OpInfo(top, op, in.offset) :: opstack
+ opStack = OpInfo(top, op, in.offset) :: opStack
ident()
- newLineOptWhenFollowing(canContinue)
- if (maybePostfix && !canContinue(in.token)) {
- val topinfo = opstack.head
- opstack = opstack.tail
- val od = reduceStack(base, topinfo.operand, 0, true)
- return atPos(od.pos.start, topinfo.offset) {
- PostfixOp(od, topinfo.operator)
+ newLineOptWhenFollowing(canStartOperand)
+ if (maybePostfix && !canStartOperand(in.token)) {
+ val topInfo = opStack.head
+ opStack = opStack.tail
+ val od = reduceStack(base, topInfo.operand, 0, true)
+ return atPos(od.pos.start, topInfo.offset) {
+ PostfixOp(od, topInfo.operator)
}
}
- top = continue()
+ top = operand()
}
reduceStack(base, top, 0, true)
}
-/* -------- COMBINATORS -------------------------------------------------------- */
-
- def enclosed[T](tok: Token, body: => T): T = {
- accept(tok)
- openParens.change(tok, 1)
- try body
- finally {
- accept(tok + 1)
- openParens.change(tok, -1)
- }
- }
-
- def inParens[T](body: => T): T = enclosed(LPAREN, body)
- def inBraces[T](body: => T): T = enclosed(LBRACE, body)
- def inBrackets[T](body: => T): T = enclosed(LBRACKET, body)
-
- def inDefScopeBraces[T](body: => T): T = {
- val saved = lastDefOffset
- try inBraces(body)
- finally lastDefOffset = saved
- }
-
- /** part { `sep` part }
- * Or if sepFirst is true, { `sep` part }
- */
- def tokenSeparated[T](separator: Int, part: () => T): List[T] = {
- val ts = new ListBuffer[T]
- while (in.token == separator) {
- in.nextToken()
- ts += part()
- }
- ts.toList
- }
- def commaSeparated[T](part: () => T): List[T] = tokenSeparated(COMMA, part)
-
/* -------- IDENTIFIERS AND LITERALS ------------------------------------------- */
+ /** Accept identifier and return its name as a term name. */
def ident(): TermName =
if (isIdent) {
val name = in.name
@@ -538,10 +424,12 @@ object Parsers {
nme.ERROR
}
+ /** Accept identifier and return Ident with its name as a term name. */
def termIdent(): Ident = atPos(in.offset) {
makeIdent(in.token, ident())
}
+ /** Accept identifier and return Ident with its name as a type name. */
def typeIdent(): Ident = atPos(in.offset) {
makeIdent(in.token, ident().toTypeName)
}
@@ -550,6 +438,7 @@ object Parsers {
if (tok == BACKQUOTED_IDENT) new BackquotedIdent(name)
else Ident(name)
+ /** IdentOrWildcard ::= id | `_' */
def identOrWildcard(): Name =
if (in.token == USCORE) { in.nextToken(); nme.WILDCARD } else ident()
@@ -559,15 +448,28 @@ object Parsers {
def termIdentOrWildcard(): Ident =
if (in.token == USCORE) wildcardIdent() else termIdent()
+ /** Accept identifier acting as a selector on given tree `t`. */
def selector(t: Tree): Tree =
atPos(t.pos.start, in.offset) { Select(t, ident()) }
+ /** Selectors ::= ident { `.' ident()
+ *
+ * Accept `.' separated identifiers acting as a selectors on given tree `t`.
+ * @param finish An alternative parse in case the next token is not an identifier.
+ * If the alternative does not apply, its tree argument is returned unchanged.
+ */
def selectors(t: Tree, finish: Tree => Tree): Tree = {
val t1 = finish(t)
if (t1 ne t) t1 else dotSelectors(selector(t), finish)
}
- def dotSelectors(t: Tree, finish: Tree => Tree = id) =
+ /** Dotelectors ::= { `.' ident()
+ *
+ * Accept `.' separated identifiers acting as a selectors on given tree `t`.
+ * @param finish An alternative parse in case the token following a `.' is not an identifier.
+ * If the alternative does not apply, its tree argument is returned unchanged.
+ */
+ def dotSelectors(t: Tree, finish: Tree => Tree = id) =
if (in.token == DOT) { in.nextToken(); selectors(t, finish) }
else t
@@ -575,14 +477,18 @@ object Parsers {
/** Path ::= StableId
* | [Ident `.'] this
- * SimpleType ::= Path [`.' type]
+ *
+ * @param thisOK If true, [Ident `.'] this is acceptable as the path.
+ * If false, another selection is required aftre the `this`.
+ * @param finish An alternative parse in case the token following a `.' is not an identifier.
+ * If the alternative does not apply, its tree argument is returned unchanged.
*/
def path(thisOK: Boolean, finish: Tree => Tree = id): Tree = {
val start = in.offset
def handleThis(name: TypeName) = {
in.nextToken()
val t = atPos(start) { This(name) }
- if (in.token != DOT && !thisOK) syntaxError("'.' expected")
+ if (!thisOK && in.token != DOT) syntaxError("`.' expected")
dotSelectors(t, finish)
}
def handleSuper(name: TypeName) = {
@@ -601,7 +507,8 @@ object Parsers {
if (in.token == THIS) handleThis(t.name.toTypeName)
else if (in.token == SUPER) handleSuper(t.name.toTypeName)
else selectors(t, finish)
- } else t
+ }
+ else t
}
}
@@ -626,29 +533,33 @@ object Parsers {
/** SimpleExpr ::= literal
* | symbol
* | null
- * @note The returned tree does not yet have a position
+ * @param negOffset The offset of a preceding `-' sign, if any.
+ * If the literal is not negated, negOffset = in.offset.
*/
- def literal(isNegated: Boolean = false): Tree = {
+ def literal(negOffset: Int = in.offset): Tree = {
def finish(value: Any): Tree = {
- val t = Literal(Constant(value))
+ val t = atPos(negOffset) { Literal(Constant(value)) }
in.nextToken()
t
}
- if (in.token == SYMBOLLIT) SymbolLit(in.strVal)
- else finish(in.token match {
- case CHARLIT => in.charVal
- case INTLIT => in.intVal(isNegated).toInt
- case LONGLIT => in.intVal(isNegated)
- case FLOATLIT => in.floatVal(isNegated).toFloat
- case DOUBLELIT => in.floatVal(isNegated)
- case STRINGLIT => in.strVal
- case TRUE => true
- case FALSE => false
- case NULL => null
- case _ =>
- syntaxErrorOrIncomplete("illegal literal")
- null
- })
+ val isNegated = negOffset < in.offset
+ atPos(negOffset) {
+ if (in.token == SYMBOLLIT) SymbolLit(in.strVal)
+ else finish(in.token match {
+ case CHARLIT => in.charVal
+ case INTLIT => in.intVal(isNegated).toInt
+ case LONGLIT => in.intVal(isNegated)
+ case FLOATLIT => in.floatVal(isNegated).toFloat
+ case DOUBLELIT => in.floatVal(isNegated)
+ case STRINGLIT => in.strVal
+ case TRUE => true
+ case FALSE => false
+ case NULL => null
+ case _ =>
+ syntaxErrorOrIncomplete("illegal literal")
+ null
+ })
+ }
}
/* ------------- NEW LINES ------------------------------------------------- */
@@ -672,10 +583,12 @@ object Parsers {
if (in.token == NEWLINE && p(in.next.token)) newLineOpt()
}
- /* ------------- TYPES ---------------------------------------------------- */
+/* ------------- TYPES ------------------------------------------------------ */
- /** Type ::= FunArgTypes `=>' Type
- * | InfixType
+ /** Type ::= FunArgTypes `=>' Type
+ * | InfixType
+ * FunArgTypes ::= InfixType
+ * | `(' [ FunArgType {`,' FunArgType } ] `)'
*/
def typ(): Tree = {
val start = in.offset
@@ -685,18 +598,24 @@ object Parsers {
if (in.token == RPAREN) {
in.nextToken()
atPos(start, accept(ARROW)) { Function(Nil, typ()) }
- } else {
+ }
+ else {
+ openParens.change(LPAREN, 1)
val ts = commaSeparated(funArgType)
+ openParens.change(LPAREN, -1)
accept(RPAREN)
if (in.token == ARROW)
atPos(start, in.skipToken()) { Function(ts, typ()) }
else {
- ts foreach checkNotByNameArgs
+ for (t <- ts)
+ if (TreeInfo.isByNameParamType(t))
+ syntaxError("no by-name parameter type allowed here", t.pos)
val tuple = atPos(start) { ugen.mkTuple(ts) }
infixTypeRest(refinedTypeRest(simpleTypeRest(tuple)))
}
}
- } else infixType()
+ }
+ else infixType()
in.token match {
case ARROW => atPos(start, in.skipToken()) { Function(List(t), typ()) }
@@ -732,7 +651,7 @@ object Parsers {
*/
def simpleType(): Tree = simpleTypeRest {
if (in.token == LPAREN)
- atPos(in.offset) { inParens(ugen.mkTuple(argTypes)) }
+ atPos(in.offset) { ugen.mkTuple(inParens(argTypes())) }
else path(thisOK = false, handleSingletonType) match {
case r @ SingletonTypeTree(_) => r
case r => convertToTypeId(r)
@@ -759,7 +678,7 @@ object Parsers {
/** ArgType ::= Type | `_' TypeBounds
*/
- def argType(): Tree =
+ val argType = () =>
if (in.token == USCORE) {
val start = in.skipToken()
typeBounds().withPos(Position(start, in.offset))
@@ -772,7 +691,7 @@ object Parsers {
/** FunArgType ::= ArgType | `=>' ArgType
*/
- def funArgType(): Tree =
+ val funArgType = () =>
if (in.token == ARROW) atPos(in.skipToken()) { PrefixOp(nme.ARROWkw, argType()) }
else argType()
@@ -784,7 +703,7 @@ object Parsers {
val t = argType()
if (isIdent(nme.raw.STAR)) {
in.nextToken()
- atPos(t.pos.start) { PostfixOp(typ(), nme.raw.STAR) }
+ atPos(t.pos.start) { PostfixOp(t, nme.raw.STAR) }
} else t
}
@@ -842,7 +761,7 @@ object Parsers {
def condExpr(altToken: Token): Tree = {
if (in.token == LPAREN) {
- val t = atPos(in.offset) { inParens(Parens(exprInParens)) }
+ val t = atPos(in.offset) { Parens(inParens(exprInParens())) }
if (in.token == altToken) in.nextToken()
t
} else {
@@ -868,11 +787,9 @@ object Parsers {
* | `do' Expr [semi] `while' Expr
* | `try' Expr Catches [`finally' Expr]
* | `try' Expr [`finally' Expr]
- * | `for' (`(' Enumerators `)' | `{' Enumerators `}')
- * {nl} [`yield'] Expr
- * | `for' Enumerators (`do' Expr | `yield' Expr)
* | `throw' Expr
* | `return' [Expr]
+ * | ForExpr
* | [SimpleExpr `.'] Id `=' Expr
* | SimpleExpr1 ArgumentExprs `=' Expr
* | PostfixExpr [Ascription]
@@ -883,8 +800,8 @@ object Parsers {
* | `:' Annotation {Annotation}
* | `:' `_' `*'
*/
- def exprInParens(): Tree = expr(Location.InParens)
- def exprInBlock(): Tree = expr(Location.InBlock)
+ val exprInParens = () => expr(Location.InParens)
+
def expr(): Tree = expr(Location.ElseWhere)
def expr(location: Location.Value): Tree = {
@@ -893,7 +810,7 @@ object Parsers {
else t
}
- def expr1(location: Location.Value): Tree = in.token match {
+ def expr1(location: Location.Value = Location.ElseWhere): Tree = in.token match {
case IF =>
atPos(in.skipToken()) {
val cond = condExpr(THEN)
@@ -931,40 +848,12 @@ object Parsers {
else EmptyTree()
Try(body, catches, finalizer)
}
- case FOR =>
- atPos(in.skipToken()) {
- var wrappedEnums = true
- val enums =
- if (in.token == LBRACE) inBraces(enumerators())
- else if (in.token == LPAREN) {
- val lparenOffset = in.skipToken()
- val pats = patternsOpt()
- val pat =
- if (in.token == RPAREN) {
- wrappedEnums = false
- in.nextToken()
- atPos(lparenOffset) { ugen.mkTuple(pats) }
- }
- else if (pats.length == 1) pats.head
- else errorTermTree
- val res = generatorRest(pat) :: enumeratorsRest()
- if (wrappedEnums) accept(RPAREN)
- res
- } else {
- wrappedEnums = false
- enumerators()
- }
- if (in.token == YIELD) ForYield(enums, expr())
- else if (in.token == DO) ForDo(enums, expr())
- else {
- if (!wrappedEnums) syntaxErrorOrIncomplete("`yield' or `do' expected")
- ForDo(enums, expr())
- }
- }
case THROW =>
atPos(in.skipToken()) { Throw(expr()) }
case RETURN =>
atPos(in.skipToken()) { Return(if (isExprIntro) expr() else EmptyTree(), EmptyTree()) }
+ case FOR =>
+ forExpr()
case IMPLICIT =>
atPos(in.skipToken()) { implicitClosure(in.skipToken(), location) }
case _ =>
@@ -992,16 +881,16 @@ object Parsers {
def ascription(t: Tree, location: Location.Value) = atPos(t.pos.start, in.skipToken()) {
in.token match {
case USCORE =>
- val uscorePos = in.skipToken()
+ val uscoreStart = in.skipToken()
if (isIdent(nme.raw.STAR)) {
in.nextToken()
if (in.token != RPAREN) syntaxError("`_*' can be used only for last argument")
- Typed(t, atPos(uscorePos) { Ident(tpnme.WILDCARD_STAR) })
+ Typed(t, atPos(uscoreStart) { Ident(tpnme.WILDCARD_STAR) })
} else {
syntaxErrorOrIncomplete("`*' expected"); t
}
case AT =>
- (t /: annotations(skipNewLines = false)) ((t, annot) => Annotated(annot, t))
+ (t /: annotations()) ((t, annot) => Annotated(annot, t))
case _ =>
Typed(t, typeOrInfixType(location))
}
@@ -1011,13 +900,14 @@ object Parsers {
* BlockResult ::= implicit Id [`:' InfixType] `=>' Block
*/
def implicitClosure(start: Int, location: Location.Value): Tree = {
+ val mods = atPos(start) { Modifiers(Implicit) }
val id = termIdent()
val paramExpr =
if (location == Location.InBlock && in.token == COLON)
atPos(id.pos.start, in.skipToken()) { Typed(id, infixType()) }
else
id
- val param = convertToParam(paramExpr, Modifiers(Implicit))
+ val param = convertToParam(paramExpr, mods)
closureRest(start, location, param :: Nil)
}
@@ -1037,14 +927,14 @@ object Parsers {
/** PrefixExpr ::= [`-' | `+' | `~' | `!'] SimpleExpr
*/
val prefixExpr = () =>
- if (isIdent && nme.raw.isUnary(in.name))
- atPos(in.offset) {
- val name = ident()
- if (name == nme.raw.MINUS && (numericLitTokens contains in.token))
- simpleExprRest(literal(isNegated = true), canApply = true)
- else
- PrefixOp(name, simpleExpr())
- }
+ if (isIdent && nme.raw.isUnary(in.name)) {
+ val start = in.offset
+ val name = ident()
+ if (name == nme.raw.MINUS && isNumericLit)
+ simpleExprRest(literal(start), canApply = true)
+ else
+ atPos(start) { PrefixOp(name, simpleExpr()) }
+ }
else simpleExpr()
/** SimpleExpr ::= new Template
@@ -1066,9 +956,9 @@ object Parsers {
case IDENTIFIER | BACKQUOTED_IDENT | THIS | SUPER =>
path(thisOK = true)
case USCORE =>
- atPos(in.skipToken()) { Ident(nme.WILDCARD) }
+ wildcardIdent()
case LPAREN =>
- atPos(in.offset) { inParens(ugen.mkTuple(commaSeparated(exprInParens))) }
+ atPos(in.offset) { ugen.mkTuple(inParens(exprsInParensOpt())) }
case LBRACE =>
canApply = false
blockExpr()
@@ -1076,8 +966,7 @@ object Parsers {
canApply = false
atPos(in.skipToken()) { New(template()) }
case _ =>
- if (literalTokens contains in.token)
- atPos(in.offset) { literal() }
+ if (isLiteral) literal()
else {
syntaxErrorOrIncomplete("illegal start of simple expression")
errorTermTree
@@ -1105,38 +994,43 @@ object Parsers {
}
}
+ /** ExprsInParens ::= ExprInParens {`,' ExprInParens}
+ */
+ def exprsInParensOpt(): List[Tree] =
+ if (in.token == RPAREN) Nil else commaSeparated(exprInParens)
+
/** ArgumentExprs ::= `(' [ExprsInParens] `)'
* | `(' [ExprsInParens `,'] PostfixExpr `:' `_' `*' ')' \
* | [nl] BlockExpr
*/
- def argumentExprs(): List[Tree] = {
- def arg() = exprInParens() match {
- case a @ Assign(Ident(id), rhs) => a.derivedNamedArg(id, rhs)
- case e => e
- }
- in.token match {
- case LBRACE => List(blockExpr())
- case LPAREN => inParens(if (in.token == RPAREN) Nil else commaSeparated(arg))
- case _ => Nil
- }
+ def argumentExprs(): List[Tree] =
+ if (in.token == LBRACE) blockExpr() :: Nil
+ else inParens(if (in.token == RPAREN) Nil else commaSeparated(argumentExpr))
+
+ val argumentExpr = () => exprInParens() match {
+ case a @ Assign(Ident(id), rhs) => a.derivedNamedArg(id, rhs)
+ case e => e
}
/** ArgumentExprss ::= {ArgumentExprs}
*/
- def argumentExprss(): List[List[Tree]] = {
+ def argumentExprss(fn: Tree): Tree = {
newLineOptWhenFollowedBy(LBRACE)
- if (in.token == LPAREN || in.token == LBRACE) argumentExprs() :: argumentExprss() else Nil
+ if (in.token == LPAREN || in.token == LBRACE) argumentExprss(Apply(fn, argumentExprs()))
+ else fn
}
/** BlockExpr ::= `{' (CaseClauses | Block) `}'
*/
- def blockExpr(): Tree = inDefScopeBraces {
- if (in.token == CASE) Match(EmptyTree(), caseClauses())
- else block()
+ def blockExpr(): Tree = atPos(in.offset) {
+ inDefScopeBraces {
+ if (in.token == CASE) Match(EmptyTree(), caseClauses())
+ else block()
+ }
}
/** Block ::= BlockStatSeq
- * @note Return tree does not carry position.
+ * @note Return tree does not carry source position.
*/
def block(): Tree = {
val stats = blockStatSeq()
@@ -1167,7 +1061,7 @@ object Parsers {
if (in.token == IF) guard()
else {
val pat = pattern1()
- if (in.token == EQUALS) atPos(pat.pos.start, in.skipToken()) { GenAlias(pat, expr) }
+ if (in.token == EQUALS) atPos(pat.pos.start, in.skipToken()) { GenAlias(pat, expr()) }
else generatorRest(pat)
}
@@ -1178,10 +1072,52 @@ object Parsers {
def generatorRest(pat: Tree) =
atPos(pat.pos.start, accept(LARROW)) { GenFrom(pat, expr()) }
- /** CaseClauses ::= CaseClause {CaseClause}
+ /** ForExpr ::= `for' (`(' Enumerators `)' | `{' Enumerators `}')
+ * {nl} [`yield'] Expr
+ * | `for' Enumerators (`do' Expr | `yield' Expr)
+ */
+ def forExpr(): Tree = atPos(in.skipToken()) {
+ var wrappedEnums = true
+ val enums =
+ if (in.token == LBRACE) inBraces(enumerators())
+ else if (in.token == LPAREN) {
+ val lparenOffset = in.skipToken()
+ openParens.change(LPAREN, 1)
+ val pats = patternsOpt()
+ val pat =
+ if (in.token == RPAREN || pats.length > 1) {
+ wrappedEnums = false
+ accept(RPAREN)
+ openParens.change(LPAREN, -1)
+ atPos(lparenOffset) { ugen.mkTuple(pats) } // note: alternatives `|' need to be weeded out by typer.
+ }
+ else pats.head
+ val res = generatorRest(pat) :: enumeratorsRest()
+ if (wrappedEnums) {
+ accept(RPAREN)
+ openParens.change(LPAREN, -1)
+ }
+ res
+ } else {
+ wrappedEnums = false
+ enumerators()
+ }
+ if (in.token == YIELD) ForYield(enums, expr())
+ else if (in.token == DO) ForDo(enums, expr())
+ else {
+ if (!wrappedEnums) syntaxErrorOrIncomplete("`yield' or `do' expected")
+ ForDo(enums, expr())
+ }
+ }
+
+ /** CaseClauses ::= CaseClause {CaseClause}
*/
- def caseClauses(): List[CaseDef] =
- caseClause() :: (if (in.token == CASE) caseClauses() else Nil)
+ def caseClauses(): List[CaseDef] = {
+ val buf = new ListBuffer[CaseDef]
+ buf += caseClause()
+ while (in.token == CASE) buf += caseClause()
+ buf.toList
+ }
/** CaseClause ::= case Pattern [Guard] `=>' Block
*/
@@ -1194,7 +1130,7 @@ object Parsers {
/** Pattern ::= Pattern1 { `|' Pattern1 }
*/
- def pattern(): Tree = {
+ val pattern = () => {
val pat = pattern1()
if (isIdent(nme.raw.BAR))
atPos(pat.pos.start) { Alternative(pat :: patternAlts()) }
@@ -1218,7 +1154,7 @@ object Parsers {
*/
val pattern2 = () => infixPattern() match {
case p @ Ident(name) if TreeInfo.isVarPattern(p) && in.token == AT =>
- atPos(p.pos.start, in.skipToken()) { Bind(name, infixType()) }
+ atPos(p.pos.start, in.skipToken()) { Bind(name, infixPattern()) }
case p =>
p
}
@@ -1226,7 +1162,7 @@ object Parsers {
/** InfixPattern ::= SimplePattern {Id [nl] SimplePattern}
*/
def infixPattern(): Tree =
- infixOps(simplePattern(), canStartExpressionTokens, simplePattern, noOp = nme.raw.BAR)
+ infixOps(simplePattern(), canStartExpressionTokens, simplePattern, notAnOperator = nme.raw.BAR)
/** SimplePattern ::= PatVar
* | Literal
@@ -1242,22 +1178,19 @@ object Parsers {
val simplePattern = () => in.token match {
case IDENTIFIER | BACKQUOTED_IDENT | THIS =>
path(thisOK = true) match {
- case id @ Ident(nme.raw.MINUS) if (numericLitTokens contains in.token) =>
- atPos(id.pos.start) { literal(isNegated = true) }
- case t =>
- simplePatternRest(t)
+ case id @ Ident(nme.raw.MINUS) if isNumericLit => literal(id.pos.start)
+ case t => simplePatternRest(t)
}
case USCORE =>
- atPos(in.skipToken()) { Ident(nme.WILDCARD) }
+ wildcardIdent()
case LPAREN =>
- atPos(in.offset) { inParens(ugen.mkTuple(commaSeparated(pattern))) }
+ atPos(in.offset) { ugen.mkTuple(inParens(patternsOpt())) }
case LBRACE =>
dotSelectors(blockExpr())
case XMLSTART =>
xmlLiteralPattern()
case _ =>
- if (literalTokens contains in.token)
- atPos(in.offset) { literal() }
+ if (isLiteral) literal()
else {
syntaxErrorOrIncomplete("illegal start of simple pattern")
errorTermTree
@@ -1267,7 +1200,7 @@ object Parsers {
def simplePatternRest(t: Tree): Tree = {
var p = t
if (in.token == LBRACKET)
- p = atPos(t.pos.start, in.offset) { TypeApply(convertToTypeId(t), typeArgs()) }
+ p = atPos(t.pos.start, in.offset) { TypeApply(p, typeArgs()) }
if (in.token == LPAREN)
p = atPos(t.pos.start, in.offset) { Apply(p, argumentPatterns()) }
p
@@ -1275,8 +1208,11 @@ object Parsers {
/** Patterns ::= Pattern [`,' Pattern]
*/
+ def patterns() = commaSeparated(pattern)
+
def patternsOpt(): List[Tree] =
- if (in.token == RPAREN) Nil else commaSeparated(pattern)
+ if (in.token == RPAREN) Nil else patterns()
+
/** ArgumentPatterns ::= `(' [Patterns] `)'
* | `(' [Patterns `,'] Pattern2 `:' `_' `*' ')
@@ -1286,22 +1222,22 @@ object Parsers {
/* -------- MODIFIERS and ANNOTATIONS ------------------------------------------- */
- private val flagTokens: Map[Int, FlagSet] = Map(
- ABSTRACT -> Abstract,
- FINAL -> Final,
- IMPLICIT -> Implicit,
- LAZY -> Lazy,
- OVERRIDE -> Override,
- PRIVATE -> Private,
- PROTECTED -> Protected,
- SEALED -> Sealed
- )
+ private def flagOfToken(tok: Int): FlagSet = tok match {
+ case ABSTRACT => Abstract
+ case FINAL => Final
+ case IMPLICIT => Implicit
+ case LAZY => Lazy
+ case OVERRIDE => Override
+ case PRIVATE => Private
+ case PROTECTED => Protected
+ case SEALED => Sealed
+ }
/** Drop `private' modifier when followed by a qualifier.
* Contract `abstract' and `override' to ABSOVERRIDE
*/
private def normalize(mods: Modifiers): Modifiers =
- if ((mods is Private) && mods.privateWithin != tpnme.EMPTY)
+ if ((mods is Private) && mods.hasPrivateWithin)
normalize(mods &~ Private)
else if (mods is AbstractAndOverride)
normalize(mods &~ (Abstract | Override) | AbsOverride)
@@ -1309,7 +1245,7 @@ object Parsers {
mods
private def addModifier(mods: Modifiers): Modifiers = {
- val flag = flagTokens(in.token)
+ val flag = flagOfToken(in.token)
if (mods is flag) syntaxError("repeated modifier")
in.nextToken()
mods | flag
@@ -1319,7 +1255,7 @@ object Parsers {
*/
def accessQualifierOpt(mods: Modifiers): Modifiers =
if (in.token == LBRACKET) {
- if (mods.privateWithin != tpnme.EMPTY)
+ if ((mods is Local) || mods.hasPrivateWithin)
syntaxError("duplicate private/protected qualifier")
inBrackets {
if (in.token == THIS) { in.nextToken(); mods | Local }
@@ -1327,7 +1263,8 @@ object Parsers {
}
} else mods
- /** Modifiers ::= {Modifier}
+ /** {Annotation} {Modifier}
+ * Modifiers ::= {Modifier}
* LocalModifiers ::= {LocalModifier}
* AccessModifier ::= (private | protected) [AccessQualifier]
* Modifier ::= LocalModifier
@@ -1335,13 +1272,14 @@ object Parsers {
* | override
* LocalModifier ::= abstract | final | sealed | implicit | lazy
*/
- def modifiers(allowed: BitSet = modifierTokens): Modifiers = normalize {
- def loop(mods: Modifiers): Modifiers =
- if (allowed contains in.token) {
+ def modifiers(allowed: BitSet = modifierTokens, start: Modifiers = Modifiers()): Modifiers = {
+ def loop(mods: Modifiers): Modifiers = {
+ val tok = in.token
+ if (allowed contains tok) {
val mods1 = addModifier(mods)
loop {
in.token match {
- case PRIVATE | PROTECTED => in.nextToken() ; accessQualifierOpt(mods1)
+ case PRIVATE | PROTECTED => accessQualifierOpt(mods1)
case _ => mods1
}
}
@@ -1351,33 +1289,30 @@ object Parsers {
} else {
mods
}
- loop(Modifiers())
+ }
+ normalize(loop(start))
}
/** Annotation ::= `@' SimpleType {ArgumentExprs}
- * ConstrAnnotation ::= `@' SimpleType ArgumentExprs
*/
- def annot(forConstructor: Boolean = false) = {
+ def annot() = {
accept(AT)
- def args =
- if (forConstructor) {
- newLineOptWhenFollowedBy(LBRACE)
- argumentExprs() :: Nil
- } else argumentExprss()
- (simpleType() /: args) (Apply(_, _))
+ argumentExprss(simpleType())
}
- def annotations(skipNewLines: Boolean, forConstructor: Boolean = false): List[Tree] = {
+ def annotations(skipNewLines: Boolean = false): List[Tree] = {
if (skipNewLines) newLineOptWhenFollowedBy(AT)
- if (in.token == AT) annot(forConstructor) :: annotations(skipNewLines, forConstructor)
+ if (in.token == AT) annot() :: annotations(skipNewLines)
else Nil
}
-/* -------- PARAMETERS ------------------------------------------- */
+ def annotsAsMods(skipNewLines: Boolean = false): Modifiers =
+ Modifiers() withAnnotations annotations(skipNewLines)
- object ParamOwner extends Enumeration {
- val Class, Type, TypeParam, Def = Value
- }
+ def defAnnotsMods(allowed: BitSet): Modifiers =
+ modifiers(allowed, annotsAsMods(skipNewLines = true))
+
+ /* -------- PARAMETERS ------------------------------------------- */
/** ClsTypeParamClause::= `[' ClsTypeParam {`,' ClsTypeParam} `]'
* ClsTypeParam ::= {Annotation} [{Modifier} type] [`+' | `-']
@@ -1395,21 +1330,22 @@ object Parsers {
def typeParamClause(ownerKind: ParamOwner.Value): List[TypeDef] = inBrackets {
def typeParam(): TypeDef = {
val isConcreteOwner = ownerKind == ParamOwner.Class || ownerKind == ParamOwner.Def
- val start = in.offset
- val annots = annotations(skipNewLines = false)
- var mods =
- if (ownerKind == ParamOwner.Class) {
- val mods1 = modifiers() withAnnotations annots
- atPos(start, in.offset) {
+ val modStart = in.offset
+ var mods = annotsAsMods()
+ if (ownerKind == ParamOwner.Class) {
+ mods = modifiers(start = mods)
+ mods =
+ atPos(modStart, in.offset) {
if (in.token == TYPE) {
in.nextToken()
- mods1 | TypeParam
+ mods | TypeParam
} else {
- if (mods1.flags != EmptyFlags) syntaxError("`type' expected")
- mods1 | TypeParam | PrivateLocal
+ if (mods.hasFlags) syntaxError("`type' expected")
+ mods | TypeParam | PrivateLocal
}
}
- } else atPos(start) { Modifiers(TypeParam) withAnnotations annots }
+ }
+ else mods = atPos(modStart) (mods | TypeParam)
if (ownerKind != ParamOwner.Def) {
if (isIdent(nme.raw.PLUS)) mods |= Covariant
else if (isIdent(nme.raw.MINUS)) mods |= Contravariant
@@ -1443,26 +1379,27 @@ object Parsers {
*/
def paramClauses(owner: Name): List[List[ValDef]] = {
var implicitFlag = EmptyFlags
+ var implicitOffset = -1 // use once
def param(): ValDef = {
- val start = in.offset
- val annots = annotations(skipNewLines = false)
- val mods =
- if (owner.isTypeName) {
- val mods1 = modifiers() withAnnotations annots
- if (mods1 is Lazy) syntaxError("lazy modifier not allowed here. Use call-by-name parameters instead")
- atPos(start, in.offset) {
+ val modStart = in.offset
+ var mods = annotsAsMods()
+ if (owner.isTypeName) {
+ mods = modifiers(start = mods)
+ mods =
+ atPos(modStart, in.offset) {
if (in.token == VAL) {
in.nextToken()
- mods1 | Param
+ mods | Param
} else if (in.token == VAR) {
in.nextToken()
- mods1 | Param | Mutable
+ mods | Param | Mutable
} else {
- if (mods1.flags != EmptyFlags) syntaxError("`val' or `var' expected")
- mods1 | Param | PrivateLocal
+ if (mods.hasFlags) syntaxError("`val' or `var' expected")
+ mods | Param | PrivateLocal
}
}
- } else atPos(start) { Modifiers(Param) withAnnotations annots }
+ }
+ else mods = atPos(modStart) { mods | Param }
atPos(tokenRange) {
val name = ident()
val tpt =
@@ -1481,6 +1418,10 @@ object Parsers {
val default =
if (in.token == EQUALS) { in.nextToken(); expr() }
else EmptyTree()
+ if (implicitOffset >= 0) {
+ mods = mods.withPos(mods.pos.withStart(implicitOffset))
+ implicitOffset = -1
+ }
ValDef(mods | implicitFlag, name, tpt, default)
}
}
@@ -1488,7 +1429,7 @@ object Parsers {
if (in.token == RPAREN) Nil
else {
if (in.token == IMPLICIT) {
- in.nextToken()
+ implicitOffset = in.skipToken()
implicitFlag = Implicit
}
commaSeparated(param)
@@ -1512,7 +1453,6 @@ object Parsers {
/* -------- DEFS ------------------------------------------- */
-
/** Import ::= import ImportExpr {`,' ImportExpr}
*/
def importClause(): List[Tree] = {
@@ -1527,10 +1467,10 @@ object Parsers {
/** ImportExpr ::= StableId `.' (Id | `_' | ImportSelectors)
*/
- def importExpr(): Tree = path(thisOK = false, handleImport) match {
- case sel @ Select(qual, name) => sel.derivedImport(qual, Ident(name) :: Nil)
+ val importExpr = () => path(thisOK = false, handleImport) match {
case imp: Import => imp
- case _ => accept(DOT); EmptyTree()
+ case sel @ Select(qual, name) => sel.derivedImport(qual, Ident(name) :: Nil)
+ case t => accept(DOT); Import(t, Ident(nme.WILDCARD) :: Nil)
}
val handleImport = { tree: Tree =>
@@ -1546,10 +1486,11 @@ object Parsers {
else {
val sel = importSelector()
sel :: {
- if (!isWildcard(sel) && in.token == COMMA) {
+ if (!TreeInfo.isWildcardArg(sel) && in.token == COMMA) {
in.nextToken()
importSelectors()
- } else Nil
+ }
+ else Nil
}
}
@@ -1582,18 +1523,13 @@ object Parsers {
case VAR =>
patDefOrDcl(posMods(start, mods | Mutable))
case DEF =>
- funDefOrDcl(posMods(start, mods))
+ defDefOrDcl(posMods(start, mods))
case TYPE =>
typeDefOrDcl(posMods(start, mods))
case _ =>
tmplDef(start, mods)
}
- def defAnnotsMods(allowed: BitSet): Modifiers = {
- val annots = annotations(skipNewLines = true)
- modifiers(allowed) withAnnotations annots
- }
-
/** PatDef ::= Pattern2 {`,' Pattern2} [`:' Type] `=' Expr
* VarDef ::= PatDef | Id {`,' Id} `:' Type `=' `_'
* ValDcl ::= Id {`,' Id} `:' Type
@@ -1624,7 +1560,7 @@ object Parsers {
* DefDcl ::= DefSig [`:' Type]
* DefSig ::= id [DefTypeParamClause] ParamClauses
*/
- def funDefOrDcl(mods: Modifiers): Tree = atPos(tokenRange) {
+ def defDefOrDcl(mods: Modifiers): Tree = atPos(tokenRange) {
if (in.token == THIS) {
val vparamss = paramClauses(nme.CONSTRUCTOR)
newLineOptWhenFollowedBy(LBRACE)
@@ -1640,8 +1576,8 @@ object Parsers {
val name = ident()
val tparams = typeParamClauseOpt(ParamOwner.Def)
val vparamss = paramClauses(name)
- newLineOptWhenFollowedBy(LBRACE)
var restype = fromWithinReturnType(typedOpt())
+ newLineOptWhenFollowedBy(LBRACE)
val rhs =
if (isStatSep || in.token == RBRACE) EmptyTree()
else if (restype.isEmpty && in.token == LBRACE) {
@@ -1666,8 +1602,7 @@ object Parsers {
def selfInvocation(): Tree =
atPos(accept(THIS)) {
newLineOptWhenFollowedBy(LBRACE)
- var t = Apply(Ident(nme.CONSTRUCTOR), argumentExprs())
- (t /: argumentExprss())(Apply(_, _))
+ argumentExprss(Apply(Ident(nme.CONSTRUCTOR), argumentExprs()))
}
/** ConstrBlock ::= `{' SelfInvocation {semi BlockStat} `}'
@@ -1694,7 +1629,7 @@ object Parsers {
case EQUALS =>
in.nextToken()
TypeDef(mods, name, tparams, typ())
- case SUPERTYPE | SUBTYPE | SEMI | NEWLINE | NEWLINES | COMMA | RBRACE =>
+ case SUPERTYPE | SUBTYPE | SEMI | NEWLINE | NEWLINES | COMMA | RBRACE | EOF =>
TypeDef(mods, name, tparams, typeBounds())
case _ =>
syntaxErrorOrIncomplete("`=', `>:', or `<:' expected")
@@ -1703,14 +1638,6 @@ object Parsers {
}
}
- /** Hook for IDE, for top-level classes/objects
- def topLevelTmplDef: Tree = {
- val annots = annotations(true)
- val pos = caseAwareTokenOffset
- val mods = modifiers() withAnnotations annots
- tmplDef(pos, mods)
- }*/
-
/** TmplDef ::= ([`case'] `class' | `trait') ClassDef
* | [`case'] `object' ObjectDef
*/
@@ -1730,24 +1657,28 @@ object Parsers {
EmptyTree()
}
- /** ClassDef ::= Id [ClsTypeParamClause] {ConstrAnnotation}
- [AccessModifier] ClsParamClauses TemplateOpt
+ /** ClassDef ::= Id [ClsTypeParamClause]
+ * [ConstrMods] ClsParamClauses TemplateOpt
*/
def classDef(mods: Modifiers): ClassDef = atPos(tokenRange) {
val name = ident().toTypeName
val tparams = typeParamClauseOpt(ParamOwner.Class)
- val constr = atPos(in.offset) {
- val constrMods = atPos(in.offset) {
- val constrAnnots = annotations(skipNewLines = false, forConstructor = true)
- modifiers(accessModifierTokens) withAnnotations constrAnnots
- }
- val vparamss = paramClauses(name)
- ugen.constructor(constrMods, vparamss)
- }
+ val cmods = constrModsOpt()
+ val vparamss = paramClauses(name)
+ val constr = ugen.constructor(cmods, vparamss)
val templ = templateOpt(constr)
ClassDef(mods, name, tparams, templ)
}
+ /** ConstrMods ::= AccessModifier
+ * | Annotation {Annotation} (AccessModifier | `this')
+ */
+ def constrModsOpt(): Modifiers = {
+ val mods = modifiers(accessModifierTokens, annotsAsMods())
+ if (mods.hasAnnotations && !mods.hasFlags) accept(THIS)
+ mods
+ }
+
/** ObjectDef ::= Id TemplateOpt
*/
def objectDef(mods: Modifiers): ModuleDef = {
@@ -1761,9 +1692,8 @@ object Parsers {
/** ConstrApp ::= RefinedType {ArgumentExprs}
*/
- def constrApp() =
- (refinedType() /: argumentExprss()) (Apply(_, _))
-
+ val constrApp = () =>
+ argumentExprss(refinedType())
/** Template ::= ConstrApps [TemplateBody] | TemplateBody
* ConstrApps ::= ConstrApp {`with' ConstrApp}
@@ -1826,22 +1756,22 @@ object Parsers {
def topStatSeq(): List[Tree] = {
val stats = new ListBuffer[Tree]
while (!isStatSeqEnd) {
- setLastDefOffset()
+ setLastStatOffset()
if (in.token == PACKAGE) {
val start = in.skipToken()
- if (in.token == OBJECT) stats += objectDef(atPos(start) { Modifiers(Package) })
+ if (in.token == OBJECT) stats += objectDef(atPos(start, in.offset) { Modifiers(Package) })
else stats += packaging(start)
}
else if (in.token == IMPORT)
stats ++= importClause()
- else if (in.token == AT || (templateIntroTokens contains in.token) || (modifierTokens contains in.token))
+ else if (in.token == AT || isTemplateIntro || isModifier)
stats += tmplDef(in.offset, defAnnotsMods(modifierTokens))
else if (!isStatSep) {
syntaxErrorOrIncomplete("expected class or object definition")
- if (mustStartStatTokens contains in.token) // do parse all definitions even if they are probably local (i.e. a "}" has been forgotten)
+ if (mustStartStat) // do parse all definitions even if they are probably local (i.e. a "}" has been forgotten)
defOrDcl(in.offset, defAnnotsMods(modifierTokens))
}
- acceptStatSepOpt()
+ acceptStatSepUnlessAtEnd()
}
stats.toList
}
@@ -1858,38 +1788,35 @@ object Parsers {
var self: ValDef = EmptyValDef()
val stats = new ListBuffer[Tree]
if (isExprIntro) {
- val first = expr1(Location.ElseWhere) // @S: first statement is potentially converted so cannot be stubbed.
+ val first = expr1()
if (in.token == ARROW) {
first match {
case Typed(tree @ This(tpnme.EMPTY), tpt) =>
self = ugen.selfDef(nme.WILDCARD, tpt).withPos(first.pos)
case _ =>
- convertToParam(first) match {
- case tree @ ValDef(_, name, tpt, _) if (name != nme.ERROR) =>
- self = ugen.selfDef(name, tpt).withPos(first.pos)
- case _ =>
- }
+ val ValDef(_, name, tpt, _) = convertToParam(first, expected = "self type clause")
+ self = ugen.selfDef(name, tpt).withPos(first.pos)
}
in.nextToken()
} else {
stats += first
- acceptStatSepOpt()
+ acceptStatSepUnlessAtEnd()
}
}
var exitOnError = false
while (!isStatSeqEnd && !exitOnError) {
- setLastDefOffset()
+ setLastStatOffset()
if (in.token == IMPORT)
stats ++= importClause()
else if (isExprIntro)
- stats += expr()
+ stats += expr1()
else if (isDefIntro(modifierTokens))
defOrDcl(in.offset, defAnnotsMods(modifierTokens))
else if (!isStatSep) {
- exitOnError = mustStartStatTokens contains in.token
+ exitOnError = mustStartStat
syntaxErrorOrIncomplete("illegal start of definition")
}
- acceptStatSepOpt()
+ acceptStatSepUnlessAtEnd()
}
(self, if (stats.isEmpty) List(EmptyTree()) else stats.toList)
}
@@ -1902,7 +1829,7 @@ object Parsers {
def refineStatSeq(): List[Tree] = {
val stats = new ListBuffer[Tree]
while (!isStatSeqEnd) {
- if (dclIntroTokens contains in.token) {
+ if (isDclIntro) {
stats += defOrDcl(in.offset, Modifiers())
} else if (!isStatSep) {
syntaxErrorOrIncomplete(
@@ -1910,7 +1837,7 @@ object Parsers {
(if (inFunReturnType) " (possible cause: missing `=' in front of current method body)"
else ""))
}
- if (in.token != RBRACE) acceptStatSep()
+ acceptStatSepUnlessAtEnd()
}
stats.toList
}
@@ -1929,7 +1856,7 @@ object Parsers {
val stats = new ListBuffer[Tree]
var exitOnError = false
while (!isStatSeqEnd && in.token != CASE && !exitOnError) {
- setLastDefOffset()
+ setLastStatOffset()
if (in.token == IMPORT) {
stats ++= importClause()
}
@@ -1950,11 +1877,11 @@ object Parsers {
stats += localDef(in.offset, EmptyFlags)
}
else if (!isStatSep && (in.token != CASE)) {
- exitOnError = mustStartStatTokens contains in.token
- val addendum = if (modifierTokens contains in.token) " (no modifiers allowed here)" else ""
- syntaxErrorOrIncomplete("illegal start of statement"+addendum)
+ exitOnError = mustStartStat
+ val addendum = if (isModifier) " (no modifiers allowed here)" else ""
+ syntaxErrorOrIncomplete("illegal start of statement" + addendum)
}
- acceptStatSepOpt(CASE)
+ acceptStatSepUnlessAtEnd(CASE)
}
stats.toList
}
@@ -1969,7 +1896,7 @@ object Parsers {
if (in.token == PACKAGE) {
in.nextToken()
if (in.token == OBJECT) {
- ts += objectDef(atPos(start) { Modifiers(Package) })
+ ts += objectDef(atPos(start, in.offset) { Modifiers(Package) })
if (in.token != EOF) {
acceptStatSep()
ts ++= topStatSeq()
@@ -1981,7 +1908,7 @@ object Parsers {
ts += makePackaging(start, pkg, List())
else if (in.token == LBRACE) {
ts += inDefScopeBraces(makePackaging(start, pkg, topStatSeq()))
- acceptStatSepOpt()
+ acceptStatSepUnlessAtEnd()
ts ++= topStatSeq()
}
else {
@@ -2024,5 +1951,4 @@ object Parsers {
override def templateBody() = skipBraces((EmptyValDef(), List(EmptyTree())))
}
-
}
diff --git a/src/dotty/tools/dotc/parsing/ScriptParsers.scala b/src/dotty/tools/dotc/parsing/ScriptParsers.scala
new file mode 100644
index 000000000..daf8dc9e2
--- /dev/null
+++ b/src/dotty/tools/dotc/parsing/ScriptParsers.scala
@@ -0,0 +1,158 @@
+package dotty.tools
+package dotc
+package parsing
+
+import util.{ SourceFile, FreshNameCreator }
+import core._
+import Flags._
+import Contexts._
+import Names._
+import Trees._
+import Decorators._
+import StdNames._
+import util.Chars.isScalaLetter
+import util.Positions._
+import Types._
+import Constants._
+import NameOps._
+import scala.reflect.internal.Chars._
+import annotation.switch
+import Parsers._
+
+
+/** <p>Performs the following context-free rewritings:</p>
+ * <ol>
+ * <li>
+ * Places all pattern variables in Bind nodes. In a pattern, for
+ * identifiers <code>x</code>:<pre>
+ * x => x @ _
+ * x:T => x @ (_ : T)</pre>
+ * </li>
+ * <li>Removes pattern definitions (PatDef's) as follows:
+ * If pattern is a simple (typed) identifier:<pre>
+ * <b>val</b> x = e ==> <b>val</b> x = e
+ * <b>val</b> x: T = e ==> <b>val</b> x: T = e</pre>
+ *
+ * if there are no variables in pattern<pre>
+ * <b>val</b> p = e ==> e match (case p => ())</pre>
+ *
+ * if there is exactly one variable in pattern<pre>
+ * <b>val</b> x_1 = e <b>match</b> (case p => (x_1))</pre>
+ *
+ * if there is more than one variable in pattern<pre>
+ * <b>val</b> p = e ==> <b>private synthetic val</b> t$ = e <b>match</b> (case p => (x_1, ..., x_N))
+ * <b>val</b> x_1 = t$._1
+ * ...
+ * <b>val</b> x_N = t$._N</pre>
+ * </li>
+ * <li>
+ * Removes function types as follows:<pre>
+ * (argtpes) => restpe ==> scala.Function_n[argtpes, restpe]</pre>
+ * </li>
+ * <li>
+ * Wraps naked case definitions in a match as follows:<pre>
+ * { cases } ==> (x => x.match {cases})<span style="font-family:normal;">, except when already argument to match</span></pre>
+ * </li>
+ * </ol>
+ */
+object ScriptParsers {
+
+ import UntypedTrees.{untpd, ugen}
+ import untpd._
+
+ class ScriptParser(source: SourceFile)(implicit ctx: Context) extends Parser(source) {
+
+ /** This is the parse entry point for code which is not self-contained, e.g.
+ * a script which is a series of template statements. They will be
+ * swaddled in Trees until the AST is equivalent to the one returned
+ * by compilationUnit().
+ */
+ override def parse(): Tree = unsupported("parse")
+ /* TODO: reinstantiate
+ val stmts = templateStatSeq(false)._2
+ accept(EOF)
+
+ def mainModuleName = ctx.settings.script.value
+
+ /** If there is only a single object template in the file and it has a
+ * suitable main method, we will use it rather than building another object
+ * around it. Since objects are loaded lazily the whole script would have
+ * been a no-op, so we're not taking much liberty.
+ */
+ def searchForMain(): Option[Tree] = {
+ /** Have to be fairly liberal about what constitutes a main method since
+ * nothing has been typed yet - for instance we can't assume the parameter
+ * type will look exactly like "Array[String]" as it could have been renamed
+ * via import, etc.
+ */
+ def isMainMethod(t: Tree) = t match {
+ case DefDef(_, nme.main, Nil, List(_), _, _) => true
+ case _ => false
+ }
+ /** For now we require there only be one top level object. */
+ var seenModule = false
+ val newStmts = stmts collect {
+ case t @ Import(_, _) => t
+ case md @ ModuleDef(mods, name, template)
+ if !seenModule && (template.body exists isMainMethod) =>
+ seenModule = true
+ /** This slightly hacky situation arises because we have no way to communicate
+ * back to the scriptrunner what the name of the program is. Even if we were
+ * willing to take the sketchy route of settings.script.value = progName, that
+ * does not work when using fsc. And to find out in advance would impose a
+ * whole additional parse. So instead, if the actual object's name differs from
+ * what the script is expecting, we transform it to match.
+ */
+ md.derivedModuleDef(mods, mainModuleName.toTermName, template)
+ case _ =>
+ /** If we see anything but the above, fail. */
+ return None
+ }
+ Some(makePackaging(0, emptyPkg, newStmts))
+ }
+
+ if (mainModuleName == ScriptRunner.defaultScriptMain)
+ searchForMain() foreach { return _ }
+
+ /** Here we are building an AST representing the following source fiction,
+ * where <moduleName> is from -Xscript (defaults to "Main") and <stmts> are
+ * the result of parsing the script file.
+ *
+ * object <moduleName> {
+ * def main(argv: Array[String]): Unit = {
+ * val args = argv
+ * new AnyRef {
+ * <stmts>
+ * }
+ * }
+ * }
+ */
+ import definitions._
+
+ def emptyPkg = atPos(0, 0, 0) { Ident(nme.EMPTY_PACKAGE_NAME) }
+ def emptyInit = DefDef(
+ Modifiers(),
+ nme.CONSTRUCTOR,
+ Nil,
+ List(Nil),
+ TypeTree(),
+ Block(List(Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), Nil)), Literal(Constant(())))
+ )
+
+ // def main
+ def mainParamType = AppliedTypeTree(Ident(tpnme.Array), List(Ident(tpnme.String)))
+ def mainParameter = List(ValDef(Modifiers(Param), "argv", mainParamType, EmptyTree))
+ def mainSetArgv = List(ValDef(Modifiers(), "args", TypeTree(), Ident("argv")))
+ def mainNew = makeNew(Nil, emptyValDef, stmts, List(Nil), NoPosition, NoPosition)
+ def mainDef = DefDef(Modifiers(), nme.main, Nil, List(mainParameter), scalaDot(tpnme.Unit), Block(mainSetArgv, mainNew))
+
+ // object Main
+ def moduleName = ScriptRunner scriptMain settings
+ def moduleBody = Template(List(scalaScalaObjectConstr), emptyValDef, List(emptyInit, mainDef))
+ def moduleDef = ModuleDef(Modifiers(), moduleName, moduleBody)
+
+ // package <empty> { ... }
+ makePackaging(0, emptyPkg, List(moduleDef))
+ }*/
+ }
+} \ No newline at end of file
diff --git a/src/dotty/tools/dotc/parsing/SymbolicXMLBuilder.scala b/src/dotty/tools/dotc/parsing/SymbolicXMLBuilder.scala
index 2073b8a3f..e59e7975f 100644
--- a/src/dotty/tools/dotc/parsing/SymbolicXMLBuilder.scala
+++ b/src/dotty/tools/dotc/parsing/SymbolicXMLBuilder.scala
@@ -10,6 +10,7 @@ import Flags.Mutable
import Names._, NameOps._, StdNames._, Decorators._, Trees._, TypedTrees._, UntypedTrees._, Constants._
import Symbols._, Contexts._
import util.Positions._
+import Parsers.Parser
import scala.reflect.internal.util.StringOps.splitWhere
import scala.language.implicitConversions
@@ -24,11 +25,13 @@ import scala.language.implicitConversions
* @author Burak Emir
* @version 1.0
*/
-class SymbolicXMLBuilder(preserveWS: Boolean)(implicit ctx: Context) {
+class SymbolicXMLBuilder(parser: Parser, preserveWS: Boolean)(implicit ctx: Context) {
import Constants.Constant
import untpd._
+ import parser.atPos
+
private[parsing] var isPattern: Boolean = _
private object xmltypes extends ScalaTypeNames {
@@ -64,93 +67,95 @@ class SymbolicXMLBuilder(preserveWS: Boolean)(implicit ctx: Context) {
import xmlterms.{_Null, __Elem, __Text, _buf, _md, _plus, _scope, _tmpscope, _xml}
// convenience methods
- private def LL[A](x: A*): List[List[A]] = List(List(x: _*))
- private def const(x: Any)(implicit cpos: Position) = Literal(Constants.Constant(x))
- private def wild(implicit cpos: Position) = Ident(nme.WILDCARD)
- private def wildStar(implicit cpos: Position) = Ident(tpnme.WILDCARD_STAR)
- private def _scala(name: Name)(implicit cpos: Position) = Select(Select(Ident(nme.ROOTPKG), nme.scala_), name)
- private def _scala_xml(name: Name)(implicit cpos: Position) = Select(_scala(_xml), name)
-
- private def _scala_xml_Comment(implicit cpos: Position) = _scala_xml(_Comment)
- private def _scala_xml_Elem(implicit cpos: Position) = _scala_xml(_Elem)
- private def _scala_xml_EntityRef(implicit cpos: Position) = _scala_xml(_EntityRef)
- private def _scala_xml_Group(implicit cpos: Position) = _scala_xml(_Group)
- private def _scala_xml_MetaData(implicit cpos: Position) = _scala_xml(_MetaData)
- private def _scala_xml_NamespaceBinding(implicit cpos: Position) = _scala_xml(_NamespaceBinding)
- private def _scala_xml_NodeBuffer(implicit cpos: Position) = _scala_xml(_NodeBuffer)
- private def _scala_xml_Null(implicit cpos: Position) = _scala_xml(_Null)
- private def _scala_xml_PrefixedAttribute(implicit cpos: Position) = _scala_xml(_PrefixedAttribute)
- private def _scala_xml_ProcInstr(implicit cpos: Position) = _scala_xml(_ProcInstr)
- private def _scala_xml_Text(implicit cpos: Position) = _scala_xml(_Text)
- private def _scala_xml_Unparsed (implicit cpos: Position) = _scala_xml(_Unparsed)
- private def _scala_xml_UnprefixedAttribute(implicit cpos: Position)= _scala_xml(_UnprefixedAttribute)
- private def _scala_xml__Elem(implicit cpos: Position) = _scala_xml(__Elem)
- private def _scala_xml__Text(implicit cpos: Position) = _scala_xml(__Text)
+ private def LL[A](x: A*): List[List[A]] = List(List(x:_*))
+ private def const(x: Any) = Literal(Constant(x))
+ private def wild = Ident(nme.WILDCARD)
+ private def wildStar = Ident(tpnme.WILDCARD_STAR)
+ private def _scala(name: Name) = Select(Select(Ident(nme.ROOTPKG), nme.scala_), name)
+ private def _scala_xml(name: Name) = Select(_scala(_xml), name)
+
+ private def _scala_xml_Comment = _scala_xml(_Comment)
+ private def _scala_xml_Elem = _scala_xml(_Elem)
+ private def _scala_xml_EntityRef = _scala_xml(_EntityRef)
+ private def _scala_xml_Group = _scala_xml(_Group)
+ private def _scala_xml_MetaData = _scala_xml(_MetaData)
+ private def _scala_xml_NamespaceBinding = _scala_xml(_NamespaceBinding)
+ private def _scala_xml_NodeBuffer = _scala_xml(_NodeBuffer)
+ private def _scala_xml_Null = _scala_xml(_Null)
+ private def _scala_xml_PrefixedAttribute = _scala_xml(_PrefixedAttribute)
+ private def _scala_xml_ProcInstr = _scala_xml(_ProcInstr)
+ private def _scala_xml_Text = _scala_xml(_Text)
+ private def _scala_xml_Unparsed = _scala_xml(_Unparsed)
+ private def _scala_xml_UnprefixedAttribute= _scala_xml(_UnprefixedAttribute)
+ private def _scala_xml__Elem = _scala_xml(__Elem)
+ private def _scala_xml__Text = _scala_xml(__Text)
/** Wildly wrong documentation deleted in favor of "self-documenting code." */
protected def mkXML(
+ pos: Position,
isPattern: Boolean,
pre: Tree,
label: Tree,
attrs: Tree,
scope: Tree,
empty: Boolean,
- children: Seq[Tree])(implicit cpos: Position): Tree =
+ children: Seq[Tree]): Tree =
{
def starArgs =
if (children.isEmpty) Nil
- else List(Typed(makeXMLseq(children), wildStar))
+ else List(Typed(makeXMLseq(pos, children), wildStar))
def pat = Apply(_scala_xml__Elem, List(pre, label, wild, wild) ::: convertToTextPat(children))
def nonpat = New(_scala_xml_Elem, List(List(pre, label, attrs, scope, if (empty) Literal(Constant(true)) else Literal(Constant(false))) ::: starArgs))
- if (isPattern) pat else nonpat
+ atPos(pos) { if (isPattern) pat else nonpat }
}
- final def entityRef(n: String)(implicit cpos: Position) =
- New(_scala_xml_EntityRef, LL(const(n)))
+ final def entityRef(pos: Position, n: String) =
+ atPos(pos)( New(_scala_xml_EntityRef, LL(const(n))) )
// create scala.xml.Text here <: scala.xml.Node
- final def text(txt: String)(implicit cpos: Position): Tree =
+ final def text(pos: Position, txt: String): Tree = atPos(pos) {
if (isPattern) makeTextPat(const(txt))
else makeText1(const(txt))
+ }
- def makeTextPat(txt: Tree)(implicit cpos: Position) = Apply(_scala_xml__Text, List(txt))
- def makeText1(txt: Tree)(implicit cpos: Position) = New(_scala_xml_Text, LL(txt))
- def comment(text: String)(implicit cpos: Position) = Comment(const(text))
- def charData(txt: String)(implicit cpos: Position) = makeText1(const(txt))
+ def makeTextPat(txt: Tree) = Apply(_scala_xml__Text, List(txt))
+ def makeText1(txt: Tree) = New(_scala_xml_Text, LL(txt))
+ def comment(pos: Position, text: String) = atPos(pos)( Comment(const(text)) )
+ def charData(pos: Position, txt: String) = atPos(pos)( makeText1(const(txt)) )
- def procInstr(target: String, txt: String)(implicit cpos: Position) =
- ProcInstr(const(target), const(txt))
+ def procInstr(pos: Position, target: String, txt: String) =
+ atPos(pos)( ProcInstr(const(target), const(txt)) )
- protected def Comment(txt: Tree)(implicit cpos: Position) = New(_scala_xml_Comment, LL(txt))
- protected def ProcInstr(target: Tree, txt: Tree)(implicit cpos: Position) = New(_scala_xml_ProcInstr, LL(target, txt))
+ protected def Comment(txt: Tree) = New(_scala_xml_Comment, LL(txt))
+ protected def ProcInstr(target: Tree, txt: Tree) = New(_scala_xml_ProcInstr, LL(target, txt))
/** @todo: attributes */
- def makeXMLpat(n: String, args: Seq[Tree])(implicit cpos: Position): Tree = {
+ def makeXMLpat(pos: Position, n: String, args: Seq[Tree]): Tree = {
val (prepat, labpat) = splitPrefix(n) match {
case (Some(pre), rest) => (const(pre), const(rest))
case _ => (wild, const(n))
}
- mkXML(true, prepat, labpat, null, null, false, args)
+ mkXML(pos, true, prepat, labpat, null, null, false, args)
}
- protected def convertToTextPat(t: Tree)(implicit cpos: Position): Tree = t match {
+ protected def convertToTextPat(t: Tree): Tree = t match {
case _: Literal => makeTextPat(t)
case _ => t
}
- protected def convertToTextPat(buf: Seq[Tree])(implicit cpos: Position): List[Tree] =
+ protected def convertToTextPat(buf: Seq[Tree]): List[Tree] =
(buf map convertToTextPat).toList
- def parseAttribute(s: String)(implicit cpos: Position): Tree = {
+ def parseAttribute(pos: Position, s: String): Tree = {
val ts = scala.xml.Utility.parseAttributeValue(s) map {
- case Text(s) => text(s)
- case EntityRef(s) => entityRef(s)
+ case Text(s) => text(pos, s)
+ case EntityRef(s) => entityRef(pos, s)
}
ts.length match {
case 0 => TypedSplice(tpd.ref(defn.NilModule))
case 1 => ts.head
- case _ => makeXMLseq(ts.toList)
+ case _ => makeXMLseq(pos, ts.toList)
}
}
@@ -160,11 +165,11 @@ class SymbolicXMLBuilder(preserveWS: Boolean)(implicit ctx: Context) {
}
/** could optimize if args.length == 0, args.length == 1 AND args(0) is <: Node. */
- def makeXMLseq(args: Seq[Tree])(implicit cpos: Position) = {
+ def makeXMLseq(pos: Position, args: Seq[Tree]) = {
val buffer = ValDef(Modifiers(), _buf, TypeTree(), New(_scala_xml_NodeBuffer, ListOfNil))
val applies = args filterNot isEmptyText map (t => Apply(Select(Ident(_buf), _plus), List(t)))
- Block(buffer :: applies.toList, Ident(_buf))
+ atPos(pos)( Block(buffer :: applies.toList, Ident(_buf)) )
}
/** Returns (Some(prefix) | None, rest) based on position of ':' */
@@ -174,89 +179,87 @@ class SymbolicXMLBuilder(preserveWS: Boolean)(implicit ctx: Context) {
}
/** Various node constructions. */
- def group(args: Seq[Tree])(implicit cpos: Position): Tree =
- New(_scala_xml_Group, LL(makeXMLseq(args)))
-
- def unparsed(str: String)(implicit cpos: Position): Tree =
- New(_scala_xml_Unparsed, LL(const(str)))
-
- def element(qname: String, attrMap: mutable.Map[String, Tree], empty: Boolean, args: Seq[Tree])(implicit cpos: Position): Tree = {
- val tpos = cpos.toSynthetic
- locally {
- implicit val cpos: Position = tpos
-
- def handleNamespaceBinding(pre: String, z: String): Tree = {
- def mkAssign(t: Tree): Tree = Assign(
- Ident(_tmpscope),
- New(_scala_xml_NamespaceBinding, LL(const(pre), t, Ident(_tmpscope))))
-
- val uri1 = attrMap(z) match {
- case Apply(_, List(uri @ Literal(Constant(_)))) => mkAssign(uri)
- case Select(_, nme.Nil) => mkAssign(const(null)) // allow for xmlns="" -- bug #1626
- case x => mkAssign(x)
- }
- attrMap -= z
- uri1
+ def group(pos: Position, args: Seq[Tree]): Tree =
+ atPos(pos)( New(_scala_xml_Group, LL(makeXMLseq(pos, args))) )
+
+ def unparsed(pos: Position, str: String): Tree =
+ atPos(pos)( New(_scala_xml_Unparsed, LL(const(str))) )
+
+ def element(pos: Position, qname: String, attrMap: mutable.Map[String, Tree], empty: Boolean, args: Seq[Tree]): Tree = {
+ def handleNamespaceBinding(pre: String, z: String): Tree = {
+ def mkAssign(t: Tree): Tree = Assign(
+ Ident(_tmpscope),
+ New(_scala_xml_NamespaceBinding, LL(const(pre), t, Ident(_tmpscope)))
+ )
+
+ val uri1 = attrMap(z) match {
+ case Apply(_, List(uri @ Literal(Constant(_)))) => mkAssign(uri)
+ case Select(_, nme.Nil) => mkAssign(const(null)) // allow for xmlns="" -- bug #1626
+ case x => mkAssign(x)
}
+ attrMap -= z
+ uri1
+ }
- /** Extract all the namespaces from the attribute map. */
- val namespaces: List[Tree] =
- for (z <- attrMap.keys.toList; if z startsWith xmlns) yield {
- val ns = splitPrefix(z) match {
- case (Some(_), rest) => rest
- case _ => null
- }
- handleNamespaceBinding(ns, z)
+ /** Extract all the namespaces from the attribute map. */
+ val namespaces: List[Tree] =
+ for (z <- attrMap.keys.toList ; if z startsWith xmlns) yield {
+ val ns = splitPrefix(z) match {
+ case (Some(_), rest) => rest
+ case _ => null
}
-
- val (pre, newlabel) = splitPrefix(qname) match {
- case (Some(p), x) => (p, x)
- case (None, x) => (null, x)
+ handleNamespaceBinding(ns, z)
}
- def mkAttributeTree(pre: String, key: String, value: Tree) = {
- // XXX this is where we'd like to put Select(value, nme.toString_) for #1787
- // after we resolve the Some(foo) situation.
- val baseArgs = List(const(key), value, Ident(_md))
- val (clazz, attrArgs) =
- if (pre == null) (_scala_xml_UnprefixedAttribute, baseArgs)
- else (_scala_xml_PrefixedAttribute, const(pre) :: baseArgs)
+ val (pre, newlabel) = splitPrefix(qname) match {
+ case (Some(p), x) => (p, x)
+ case (None, x) => (null, x)
+ }
- Assign(Ident(_md), New(clazz, LL(attrArgs: _*)))
- }
+ def mkAttributeTree(pre: String, key: String, value: Tree) = atPos(pos.toSynthetic) {
+ // XXX this is where we'd like to put Select(value, nme.toString_) for #1787
+ // after we resolve the Some(foo) situation.
+ val baseArgs = List(const(key), value, Ident(_md))
+ val (clazz, attrArgs) =
+ if (pre == null) (_scala_xml_UnprefixedAttribute, baseArgs)
+ else (_scala_xml_PrefixedAttribute , const(pre) :: baseArgs)
- def handlePrefixedAttribute(pre: String, key: String, value: Tree) = mkAttributeTree(pre, key, value)
- def handleUnprefixedAttribute(key: String, value: Tree) = mkAttributeTree(null, key, value)
+ Assign(Ident(_md), New(clazz, LL(attrArgs: _*)))
+ }
- val attributes: List[Tree] =
- for ((k, v) <- attrMap.toList.reverse) yield splitPrefix(k) match {
- case (Some(pre), rest) => handlePrefixedAttribute(pre, rest, v)
- case _ => handleUnprefixedAttribute(k, v)
- }
+ def handlePrefixedAttribute(pre: String, key: String, value: Tree) = mkAttributeTree(pre, key, value)
+ def handleUnprefixedAttribute(key: String, value: Tree) = mkAttributeTree(null, key, value)
- lazy val scopeDef = ValDef(Modifiers(), _scope, _scala_xml_NamespaceBinding, Ident(_tmpscope))
- lazy val tmpScopeDef = ValDef(Modifiers(Mutable), _tmpscope, _scala_xml_NamespaceBinding, Ident(_scope))
- lazy val metadataDef = ValDef(Modifiers(Mutable), _md, _scala_xml_MetaData, _scala_xml_Null)
- val makeSymbolicAttrs = if (!attributes.isEmpty) Ident(_md) else _scala_xml_Null
-
- val (attrResult, nsResult) =
- (attributes.isEmpty, namespaces.isEmpty) match {
- case (true, true) => (Nil, Nil)
- case (true, false) => (scopeDef :: Nil, tmpScopeDef :: namespaces)
- case (false, true) => (metadataDef :: attributes, Nil)
- case (false, false) => (scopeDef :: metadataDef :: attributes, tmpScopeDef :: namespaces)
- }
+ val attributes: List[Tree] =
+ for ((k, v) <- attrMap.toList.reverse) yield splitPrefix(k) match {
+ case (Some(pre), rest) => handlePrefixedAttribute(pre, rest, v)
+ case _ => handleUnprefixedAttribute(k, v)
+ }
- val body = mkXML(
- false,
- const(pre),
- const(newlabel),
- makeSymbolicAttrs,
- Ident(_scope),
- empty,
- args)
+ lazy val scopeDef = ValDef(Modifiers(), _scope, _scala_xml_NamespaceBinding, Ident(_tmpscope))
+ lazy val tmpScopeDef = ValDef(Modifiers(Mutable), _tmpscope, _scala_xml_NamespaceBinding, Ident(_scope))
+ lazy val metadataDef = ValDef(Modifiers(Mutable), _md, _scala_xml_MetaData, _scala_xml_Null)
+ val makeSymbolicAttrs = if (!attributes.isEmpty) Ident(_md) else _scala_xml_Null
+
+ val (attrResult, nsResult) =
+ (attributes.isEmpty, namespaces.isEmpty) match {
+ case (true , true) => (Nil, Nil)
+ case (true , false) => (scopeDef :: Nil, tmpScopeDef :: namespaces)
+ case (false, true) => (metadataDef :: attributes, Nil)
+ case (false, false) => (scopeDef :: metadataDef :: attributes, tmpScopeDef :: namespaces)
+ }
- Block(nsResult, Block(attrResult, body))
- }
+ val body = mkXML(
+ pos.toSynthetic,
+ false,
+ const(pre),
+ const(newlabel),
+ makeSymbolicAttrs,
+ Ident(_scope),
+ empty,
+ args
+ )
+
+ atPos(pos.toSynthetic)( Block(nsResult, Block(attrResult, body)) )
}
}