summaryrefslogtreecommitdiff
path: root/src/compiler/scala/tools
diff options
context:
space:
mode:
authorMartin Odersky <odersky@gmail.com>2009-05-08 16:33:15 +0000
committerMartin Odersky <odersky@gmail.com>2009-05-08 16:33:15 +0000
commit14a631a5fec42d04d0723355a0b93e482b5e4662 (patch)
treef639c2a22e89e193b9abea391993ecfd4d5326ee /src/compiler/scala/tools
parent2379eb4ebbd28c8892b50a1d9fa8a687099eea4d (diff)
downloadscala-14a631a5fec42d04d0723355a0b93e482b5e4662.tar.gz
scala-14a631a5fec42d04d0723355a0b93e482b5e4662.tar.bz2
scala-14a631a5fec42d04d0723355a0b93e482b5e4662.zip
massive new collections checkin.
Diffstat (limited to 'src/compiler/scala/tools')
-rw-r--r--src/compiler/scala/tools/ant/ScalaBazaar.scala8
-rw-r--r--src/compiler/scala/tools/ant/sabbus/Compilers.scala5
-rw-r--r--src/compiler/scala/tools/nsc/CompileServer.scala1
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala2
-rw-r--r--src/compiler/scala/tools/nsc/Interpreter.scala2
-rw-r--r--src/compiler/scala/tools/nsc/Main.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeGen.scala14
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/MarkupParsers1.scala705
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/NewScanners.scala21
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala6
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/parser/Parsers1.scala2607
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/parser/Scanners1.scala971
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder1.scala368
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer1.scala29
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala7
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala3
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala6
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala6
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/Inliners.scala2
-rw-r--r--src/compiler/scala/tools/nsc/dependencies/Files.scala2
-rw-r--r--src/compiler/scala/tools/nsc/doc/DefaultDocDriver.scala31
-rw-r--r--src/compiler/scala/tools/nsc/doc/DocUtil.scala4
-rw-r--r--src/compiler/scala/tools/nsc/doc/ModelAdditions.scala3
-rw-r--r--src/compiler/scala/tools/nsc/doc/ModelExtractor.scala29
-rw-r--r--src/compiler/scala/tools/nsc/doc/ModelFrames.scala4
-rw-r--r--src/compiler/scala/tools/nsc/doc/ModelToXML.scala7
-rw-r--r--src/compiler/scala/tools/nsc/matching/ParallelMatching.scala2
-rw-r--r--src/compiler/scala/tools/nsc/models/SemanticTokens.scala2
-rw-r--r--src/compiler/scala/tools/nsc/plugins/Plugin.scala2
-rw-r--r--src/compiler/scala/tools/nsc/symtab/IdeSupport.scala89
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala1
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolWalker.scala2
-rw-r--r--src/compiler/scala/tools/nsc/symtab/Symbols.scala9
-rw-r--r--src/compiler/scala/tools/nsc/symtab/Types.scala24
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala2
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala2
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/UnPickler.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/LiftCode.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/TailCalls.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/UnCurry.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/IdeSupport.scala8
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala5
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala3
-rwxr-xr-xsrc/compiler/scala/tools/nsc/util/CharArrayReader1.scala107
-rw-r--r--src/compiler/scala/tools/nsc/util/NewCharArrayReader.scala2
-rw-r--r--src/compiler/scala/tools/util/AbstractTimer.scala2
50 files changed, 4981 insertions, 144 deletions
diff --git a/src/compiler/scala/tools/ant/ScalaBazaar.scala b/src/compiler/scala/tools/ant/ScalaBazaar.scala
index 6ec96a5773..74f311a376 100644
--- a/src/compiler/scala/tools/ant/ScalaBazaar.scala
+++ b/src/compiler/scala/tools/ant/ScalaBazaar.scala
@@ -11,7 +11,7 @@
package scala.tools.ant {
- import scala.collection.Map
+ import scala.collection.DefaultMap
import scala.collection.mutable.HashMap
import java.io.{File, FileInputStream, FileOutputStream,
FileWriter, StringReader}
@@ -78,11 +78,11 @@ package scala.tools.ant {
private var link: Option[String] = None
/** The sets of files to include in the package */
- private object fileSetsMap extends Map[String, List[FileSet]] {
+ private object fileSetsMap extends DefaultMap[String, List[FileSet]] {
private var content = new HashMap[String, List[FileSet]]()
def get(key: String): Option[List[FileSet]] = content.get(key)
- def size: Int = content.size
- def update(key: String, value: FileSet) = {
+ override def size: Int = content.size
+ def update(key: String, value: FileSet) {
if (content.contains(key) && content(key) != Nil)
content.update(key, value :: content(key))
else content.update(key, List(value))
diff --git a/src/compiler/scala/tools/ant/sabbus/Compilers.scala b/src/compiler/scala/tools/ant/sabbus/Compilers.scala
index ba90614f80..b1566fd12c 100644
--- a/src/compiler/scala/tools/ant/sabbus/Compilers.scala
+++ b/src/compiler/scala/tools/ant/sabbus/Compilers.scala
@@ -12,7 +12,7 @@ package scala.tools.ant.sabbus
import java.net.URL
-object Compilers extends collection.Map[String, Compiler] {
+object Compilers extends collection.DefaultMap[String, Compiler] {
val debug = false
@@ -22,7 +22,7 @@ object Compilers extends collection.Map[String, Compiler] {
def get(id: String) = container.get(id)
- def size = container.size
+ override def size = container.size
def make(id: String, classpath: Array[URL], settings: Settings): Compiler = {
val runtime = Runtime.getRuntime
@@ -43,5 +43,4 @@ object Compilers extends collection.Map[String, Compiler] {
if (debug) println(" memory after: " + (runtime.freeMemory/1048576.).formatted("%10.2f") + " MB")
null
}
-
}
diff --git a/src/compiler/scala/tools/nsc/CompileServer.scala b/src/compiler/scala/tools/nsc/CompileServer.scala
index dd3133acdb..5d14064728 100644
--- a/src/compiler/scala/tools/nsc/CompileServer.scala
+++ b/src/compiler/scala/tools/nsc/CompileServer.scala
@@ -10,7 +10,6 @@ import java.io.{BufferedOutputStream, File, FileOutputStream, PrintStream}
import java.lang.{Runtime, System, Thread}
import scala.concurrent.ops.spawn
-import scala.tools.nsc.doc.{DocDriver => DocGenerator}
import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
import scala.tools.nsc.util.FakePos //Position
import scala.tools.util.SocketServer
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index 73b2f6aff7..3ebe015fd5 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -277,7 +277,7 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
val global: Global.this.type = Global.this
val runsAfter = List[String]()
val runsRightAfter = None
- } with SyntaxAnalyzer
+ } with SyntaxAnalyzer1
// factory method for
// phaseName = "namer"
diff --git a/src/compiler/scala/tools/nsc/Interpreter.scala b/src/compiler/scala/tools/nsc/Interpreter.scala
index eefcc05b35..2973444be6 100644
--- a/src/compiler/scala/tools/nsc/Interpreter.scala
+++ b/src/compiler/scala/tools/nsc/Interpreter.scala
@@ -550,7 +550,7 @@ class Interpreter(val settings: Settings, out: PrintWriter)
lazy val extractor = """
| {
| val s = scala.runtime.ScalaRunTime.stringOf(%s)
- | val nl = if (s.toSeq.contains('\n')) "\n" else ""
+ | val nl = if (s.contains('\n')) "\n" else ""
| nl + s + "\n"
| }
""".stripMargin.format(req fullPath vname)
diff --git a/src/compiler/scala/tools/nsc/Main.scala b/src/compiler/scala/tools/nsc/Main.scala
index 68c6d1bede..bfc6681b5b 100644
--- a/src/compiler/scala/tools/nsc/Main.scala
+++ b/src/compiler/scala/tools/nsc/Main.scala
@@ -76,7 +76,7 @@ object Main extends AnyRef with EvalLoop {
}
} catch {
case ex @ FatalError(msg) =>
- if (command.settings.debug.value)
+ if (true || command.settings.debug.value) // !!!
ex.printStackTrace();
reporter.error(null, "fatal error: " + msg)
}
diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
index 4350288fbc..622d229e45 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
@@ -133,6 +133,8 @@ abstract class TreeGen {
def mkAttributedCastUntyped(tree: Tree, pt: Type): Tree = {
if (settings.debug.value) log("casting " + tree + ":" + tree.tpe + " to " + pt)
assert(!tree.tpe.isInstanceOf[MethodType], tree)
+ assert(!pt.typeSymbol.isPackageClass)
+ assert(!pt.typeSymbol.isPackageObjectClass)
assert(pt eq pt.normalize) //@MAT only called during erasure, which already takes care of that
atPos(tree.pos) {
Apply(TypeApply(mkAttributedSelect(tree, Object_asInstanceOf), List(TypeTree(pt))), List())
@@ -159,8 +161,16 @@ abstract class TreeGen {
qual.symbol.name.toTermName == nme.EMPTY_PACKAGE_NAME)) {
mkAttributedIdent(sym)
} else {
- val result = Select(qual, sym.name) setSymbol sym
- if (qual.tpe ne null) result setType qual.tpe.memberType(sym)
+ val qual1 =
+ if ((qual.tpe ne null) &&
+ sym.owner.isPackageObjectClass &&
+ sym.owner.owner == qual.tpe.typeSymbol) {
+ //println("insert package for "+qual+"/"+sym)
+ val pkgobj = sym.owner.sourceModule
+ Select(qual, nme.PACKAGEkw) setSymbol pkgobj setType singleType(qual.tpe, pkgobj)
+ } else qual
+ val result = Select(qual1, sym.name) setSymbol sym
+ if (qual1.tpe ne null) result setType qual.tpe.memberType(sym)
result
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers1.scala b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers1.scala
new file mode 100644
index 0000000000..bba76c748c
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers1.scala
@@ -0,0 +1,705 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2009 LAMP/EPFL
+ * @author Burak Emir
+ */
+// $Id: MarkupParsers.scala 17315 2009-03-16 17:46:58Z extempore $
+
+package scala.tools.nsc.ast.parser
+
+import scala.collection.mutable
+import scala.tools.nsc.util.{Position,NoPosition,SourceFile,CharArrayReader1}
+import scala.xml.{Text, TextBuffer}
+import SourceFile.{SU,LF}
+import scala.annotation.switch
+
+/** This trait ...
+ *
+ * @author Burak Emir
+ * @version 1.0
+ */
+trait MarkupParsers1 {self: Parsers1 =>
+ case object MissingEndTagException extends RuntimeException {
+ override def getMessage = "start tag was here: "
+ }
+
+ case object ConfusedAboutBracesException extends RuntimeException {
+ override def getMessage = " I encountered a '}' where I didn't expect one, maybe this tag isn't closed <"
+ }
+
+ case object TruncatedXML extends RuntimeException {
+ override def getMessage = "input ended while parsing XML"
+ }
+
+ import global._
+ //import posAssigner.atPos
+
+ class MarkupParser(parser: UnitParser, presWS: Boolean) /*with scala.xml.parsing.MarkupParser[Tree,Tree] */{
+
+ import Tokens.{EMPTY, LBRACE, RBRACE}
+
+ final val preserveWS = presWS
+ var input : CharArrayReader1 = _
+
+ import parser.{symbXMLBuilder => handle, i2p}
+
+ def pos : Int = input.charOffset
+ var tmppos : Position = NoPosition
+ def ch = input.ch
+ /** this method assign the next character to ch and advances in input */
+ def nextch = { val result = input.ch; input.nextChar(); result } // { s.in.next; /*s.xNext;*/ ch = s.in.ch ; pos = s.in.cpos }
+
+ var xEmbeddedBlock = false
+
+ /** munch expected XML token, report syntax error for unexpected.
+ *
+ * @param that ...
+ */
+ /*[Duplicate]*/ def xToken(that: Char) {
+ if (ch == that) nextch
+ else if (ch == SU)
+ throw TruncatedXML
+ else reportSyntaxError("'" + that + "' expected instead of '" + ch + "'")
+ }
+
+ var debugLastStartElement = new mutable.Stack[(Int, String)]
+
+ /** checks whether next character starts a Scala block, if yes, skip it.
+ * @return true if next character starts a scala block
+ */
+ /*[Duplicate]*/ def xCheckEmbeddedBlock: Boolean = {
+ // attentions, side-effect, used in xText
+ xEmbeddedBlock = (ch == '{') && { nextch; (ch != '{') }
+ //Console.println("pos = "+pos+" xEmbeddedBlock returns "+xEmbeddedBlock)
+ xEmbeddedBlock
+ }
+
+ /** parse attribute and add it to listmap
+ * [41] Attributes ::= { S Name Eq AttValue }
+ * AttValue ::= `'` { _ } `'`
+ * | `"` { _ } `"`
+ * | `{` scalablock `}`
+ */
+ /*[Duplicate]*/ def xAttributes = {
+ var aMap = new mutable.HashMap[String, Tree]()
+ while (xml.Parsing.isNameStart(ch)) {
+ val key = xName
+ xEQ
+ val delim = ch
+ val pos1 = pos
+ val value: /* AttribValue[*/Tree/*]*/ = ch match {
+ case '"' | '\'' =>
+ nextch
+ val tmp = xAttributeValue(delim)
+ nextch
+ try {
+ handle.parseAttribute(pos1, tmp)
+ } catch {
+ case e =>
+ reportSyntaxError("error parsing attribute value")
+ parser.errorTermTree
+ }
+
+ case '{' =>
+ nextch
+ xEmbeddedExpr
+ case SU =>
+ throw TruncatedXML
+ case _ =>
+ reportSyntaxError("' or \" delimited attribute value" +
+ " or '{' scala-expr '}' expected" )
+ Literal(Constant("<syntax-error>"))
+ }
+ // well-formedness constraint: unique attribute names
+ if (aMap.contains(key)) {
+ reportSyntaxError( "attribute "+key+" may only be defined once" )
+ }
+ aMap.update(key, value)
+ if ((ch != '/') && (ch != '>')) {
+ xSpace
+ }
+ }
+ aMap
+ }
+
+ /** attribute value, terminated by either ' or ". value may not contain <.
+ * @param endch either ' or "
+ */
+ /*[Duplicate]*/ def xAttributeValue(endCh: Char): String = {
+ val buf = new StringBuilder
+ while (ch != endCh) {
+ if (ch == SU)
+ throw TruncatedXML
+ buf append ch
+ nextch
+ }
+ val str = buf.toString()
+
+ // @todo: normalize attribute value
+ // well-formedness constraint
+ if (str.indexOf('<') != -1) {
+ reportSyntaxError( "'<' not allowed in attrib value" ); ""
+ } else {
+ str
+ }
+ }
+
+ /** parse a start or empty tag.
+ * [40] STag ::= '<' Name { S Attribute } [S]
+ * [44] EmptyElemTag ::= '<' Name { S Attribute } [S]
+ */
+ /*[Duplicate]*/ def xTag: (String, mutable.Map[String, Tree]) = {
+ val elemName = xName
+ xSpaceOpt
+ val aMap =
+ if (xml.Parsing.isNameStart(ch)) xAttributes
+ else new mutable.HashMap[String, Tree]()
+ (elemName, aMap)
+ }
+
+ /** [42] '<' xmlEndTag ::= '<' '/' Name S? '>'
+ */
+ /*[Duplicate]*/ def xEndTag(startName: String) {
+ xToken('/')
+ val endName = xName
+ if (endName != startName) {
+ reportSyntaxError("expected closing tag of " + startName)
+ throw MissingEndTagException
+ }
+ xSpaceOpt
+ xToken('>')
+ }
+
+ /** '<! CharData ::= [CDATA[ ( {char} - {char}"]]>"{char} ) ']]>'
+ *
+ * see [15]
+ */
+ /*[Duplicate]*/ def xCharData: Tree = {
+ xToken('[')
+ xToken('C')
+ xToken('D')
+ xToken('A')
+ xToken('T')
+ xToken('A')
+ xToken('[')
+ val pos1 = pos
+ val sb: StringBuilder = new StringBuilder()
+ while (true) {
+ if (ch==']' &&
+ { sb.append(ch); nextch; ch == ']' } &&
+ { sb.append(ch); nextch; ch == '>' }) {
+ sb.length = sb.length - 2
+ nextch
+ return handle.charData(pos1, sb.toString())
+ } else if (ch == SU)
+ throw TruncatedXML
+ else
+ sb.append(ch)
+ nextch
+ }
+ Predef.error("this cannot happen")
+ }
+
+ def xUnparsed: Tree = {
+ val pos1 = pos
+ val sb: StringBuilder = new StringBuilder()
+ while (true) {
+ if (ch=='<' &&
+ { sb.append(ch); nextch; ch == '/' } &&
+ { sb.append(ch); nextch; ch == 'x' } &&
+ { sb.append(ch); nextch; ch == 'm' } &&
+ { sb.append(ch); nextch; ch == 'l' } &&
+ { sb.append(ch); nextch; ch == ':' } &&
+ { sb.append(ch); nextch; ch == 'u' } &&
+ { sb.append(ch); nextch; ch == 'n' } &&
+ { sb.append(ch); nextch; ch == 'p' } &&
+ { sb.append(ch); nextch; ch == 'a' } &&
+ { sb.append(ch); nextch; ch == 'r' } &&
+ { sb.append(ch); nextch; ch == 's' } &&
+ { sb.append(ch); nextch; ch == 'e' } &&
+ { sb.append(ch); nextch; ch == 'd' } &&
+ { sb.append(ch); nextch; ch == '>' }) {
+ sb.length = sb.length - "</xml:unparsed".length
+ nextch
+ return handle.unparsed(pos1, sb.toString())
+ } else if (ch == SU) {
+ throw TruncatedXML
+ } else sb.append(ch)
+ nextch
+ }
+ Predef.error("this cannot happen")
+ }
+
+ /** CharRef ::= "&#" '0'..'9' {'0'..'9'} ";"
+ * | "&#x" '0'..'9'|'A'..'F'|'a'..'f' { hexdigit } ";"
+ *
+ * see [66]
+ */
+ /*[Duplicate]*/ def xCharRef: String = {
+ val hex = (ch == 'x') && { nextch; true }
+ val base = if (hex) 16 else 10
+ var i = 0
+ while (ch != ';') {
+ (ch: @switch) match {
+ case '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' =>
+ i = i * base + ch.asDigit
+ case 'a' | 'b' | 'c' | 'd' | 'e' | 'f'
+ | 'A' | 'B' | 'C' | 'D' | 'E' | 'F' =>
+ if (!hex)
+ reportSyntaxError("hex char not allowed in decimal char ref\n"
+ +"Did you mean to write &#x ?");
+ else
+ i = i * base + ch.asDigit
+ case SU =>
+ throw TruncatedXML
+ case _ =>
+ reportSyntaxError("character '"+ch+"' not allowed in char ref")
+ }
+ nextch
+ }
+ new String(Array(i.asInstanceOf[Char]))
+ }
+
+ /** Comment ::= '<!--' ((Char - '-') | ('-' (Char - '-')))* '-->'
+ *
+ * see [15]
+ */
+ /*[Duplicate]*/ def xComment: Tree = {
+ val sb: StringBuilder = new StringBuilder()
+ xToken('-')
+ xToken('-')
+ while (true) {
+ if (ch=='-' && { sb.append(ch); nextch; ch == '-' }) {
+ sb.length = sb.length - 1
+ nextch
+ xToken('>')
+ return handle.comment(pos, sb.toString())
+ } else if (ch == SU) {
+ throw TruncatedXML
+ } else sb.append(ch)
+ nextch
+ }
+ Predef.error("this cannot happen")
+ }
+
+ /**
+ * @param pos ...
+ * @param ts ...
+ * @param txt ...
+ */
+ /*[Duplicate]*/ def appendText(pos: Position, ts: mutable.Buffer[Tree],
+ txt: String) {
+ if (!preserveWS) {
+ for (t <- TextBuffer.fromString(txt).toText) {
+ ts.append(handle.text(pos, t.text))
+ }
+ }
+ else
+ ts.append( handle.text(pos, txt))
+ }
+
+ /** adds entity/character to to ts as side-effect
+ * @precond ch == '&amp;'
+ */
+ def content_AMP(ts: mutable.ArrayBuffer[Tree]) {
+ nextch
+ ch match {
+ case '#' => // CharacterRef
+ nextch
+ val theChar = handle.text(tmppos, xCharRef)
+ xToken(';')
+ ts.append(theChar)
+ case _ => // EntityRef
+ val n = xName
+ xToken(';')
+ ts.append(handle.entityRef(tmppos, n))
+ }
+ }
+
+ /**
+ * @precond ch == '{'
+ * @postcond: xEmbeddedBlock == false!
+ */
+ def content_BRACE(p: Position, ts:mutable.ArrayBuffer[Tree]) {
+ if (xCheckEmbeddedBlock)
+ ts.append(xEmbeddedExpr)
+ else {
+ appendText(p, ts, xText)/*
+ val str = new StringBuilder("{")
+ str.append(xText)
+ nextch
+ appendText(p, ts, str.toString())*/
+ }
+ }
+
+ /** Returns true if it encounters an end tag (without consuming it),
+ * appends trees to ts as side-effect.
+ *
+ * @param ts ...
+ * @return ...
+ */
+ private def content_LT(ts: mutable.ArrayBuffer[Tree]): Boolean = {
+ ch match {
+ case '/' =>
+ return true // end tag
+ case '!' =>
+ nextch // CDATA or Comment
+ ts.append(if ('[' == ch) xCharData else xComment)
+ case '?' => // PI
+ nextch
+ ts.append(xProcInstr)
+ case _ =>
+ ts.append(element) // child node
+ }
+ false
+ }
+
+ /*[Duplicate]*/ def content: mutable.Buffer[Tree] = {
+ var ts = new mutable.ArrayBuffer[Tree]
+ var exit = false
+ while (!exit) {
+ if (xEmbeddedBlock)
+ ts.append(xEmbeddedExpr)
+ else {
+ tmppos = pos
+ ch match {
+ case '<' => // end tag, cdata, comment, pi or child node
+ nextch
+ exit = content_LT(ts)
+ case '{' => // either the character '{' or an embedded scala block
+ content_BRACE(tmppos, ts)
+ case '&' => // EntityRef or CharRef
+ content_AMP(ts)
+ case SU =>
+ exit = true
+ case _ => // text content
+ appendText(tmppos, ts, xText)
+ // here xEmbeddedBlock might be true
+ }
+ }
+ }
+ ts
+ }
+
+ /** '<' element ::= xmlTag1 '>' { xmlExpr | '{' simpleExpr '}' } ETag
+ * | xmlTag1 '/' '>'
+ */
+ /*[Duplicate]*/ def element: Tree = {
+ val pos1 = pos
+ val (qname, attrMap) = xTag
+ if (ch == '/') { // empty element
+ xToken('/')
+ xToken('>')
+ handle.element(pos1, qname, attrMap, new mutable.ListBuffer[Tree])
+ }
+ else { // handle content
+ xToken('>')
+ if (qname == "xml:unparsed")
+ return xUnparsed
+
+ debugLastStartElement.push((pos1, qname))
+ val ts = content
+ xEndTag(qname)
+ debugLastStartElement.pop
+ qname match {
+ case "xml:group" => handle.group(pos1, ts)
+ case _ => handle.element(pos1, qname, attrMap, ts)
+ }
+ }
+ }
+
+ /** actually, Name ::= (Letter | '_' | ':') (NameChar)* but starting with ':' cannot happen
+ * Name ::= (Letter | '_') (NameChar)*
+ *
+ * see [5] of XML 1.0 specification
+ *
+ * pre-condition: ch != ':' // assured by definition of XMLSTART token
+ * post-condition: name does neither start, nor end in ':'
+ */
+ /*[Duplicate]*/ def xName: String = {
+ if (ch == SU) {
+ throw TruncatedXML
+ } else if ( !xml.Parsing.isNameStart(ch)) {
+ reportSyntaxError("name expected, but char '"+ch+"' cannot start a name")
+ return ""
+ }
+ val buf = new StringBuilder
+ do {
+ buf append ch; nextch
+ } while (xml.Parsing.isNameChar(ch))
+ if (':' == buf.last) {
+ reportSyntaxError( "name cannot end in ':'" )
+ buf.setLength(buf.length - 1)
+ }
+ val n = buf.toString().intern()
+ //cbuf.length = 0
+ n
+ }
+
+ /** scan [S] '=' [S]*/
+ /*[Duplicate]*/ def xEQ = { xSpaceOpt; xToken('='); xSpaceOpt }
+
+ /** skip optional space S? */
+ /*[Duplicate]*/ def xSpaceOpt = { while (xml.Parsing.isSpace(ch)) { nextch }}
+
+ /** scan [3] S ::= (#x20 | #x9 | #xD | #xA)+ */
+ /*[Duplicate]*/ def xSpace =
+ if (xml.Parsing.isSpace(ch)) { nextch; xSpaceOpt }
+ else if (ch == SU)
+ throw TruncatedXML
+ else reportSyntaxError("whitespace expected")
+
+ /** '<?' ProcInstr ::= Name [S ({Char} - ({Char}'>?' {Char})]'?>'
+ *
+ * see [15]
+ */
+ /*[Duplicate]*/ def xProcInstr: Tree = {
+ val sb: StringBuilder = new StringBuilder()
+ val n = xName
+ if (xml.Parsing.isSpace(ch)) {
+ xSpace
+ while (true) {
+ if (ch == '?' && { sb.append(ch); nextch; ch == '>' }) {
+ sb.length = sb.length - 1
+ nextch
+ return handle.procInstr(tmppos, n, sb.toString)
+ } else
+ sb.append(ch);
+ nextch
+ }
+ }
+ xToken('?')
+ xToken('>')
+ handle.procInstr(tmppos, n, sb.toString)
+ }
+
+ /** parse character data.
+ * precondition: xEmbeddedBlock == false (we are not in a scala block)
+ */
+ /*[Duplicate]*/ def xText: String = {
+ if (xEmbeddedBlock) Predef.error("internal error: encountered embedded block"); // assert
+ //Console.println("xText ch now "+ch)
+ //if( xCheckEmbeddedBlock ) {
+ // return ""
+ //} else {
+ var exit = false
+ val buf = new StringBuilder
+ while (!exit && (ch!=SU)) {
+ buf append ch
+ val expectRBRACE = ch == '}'
+ // TODO check for "}}"
+ nextch
+ if (expectRBRACE) {
+ if (ch == '}')
+ nextch
+ else {
+ reportSyntaxError("in XML content, please use '}}' to express '}'")
+ throw ConfusedAboutBracesException
+ }
+ }
+ exit = xCheckEmbeddedBlock ||(ch == '<') || (ch == '&')
+ }
+ val str = buf.toString()
+ //cbuf.length = 0
+ str
+ //}
+ }
+
+ //val cbuf = new StringBuilder()
+
+ /** append Unicode character to name buffer*/
+ //private def putChar(c: char) = cbuf.append(c)
+
+ /** xLiteral = element { element }
+ * @return Scala representation of this xml literal
+ * precondition: s.xStartsXML == true
+ */
+ def xLiteral: Tree = try {
+ input = parser.in
+ handle.isPattern = false
+
+ //val pos = s.currentPos
+ var tree:Tree = null
+ val ts = new mutable.ArrayBuffer[Tree]()
+ tmppos = (pos) // Iuli: added this line, as it seems content_LT uses tmppos when creating trees
+// assert(ch == '<')
+// nextch
+ content_LT(ts)
+ //Console.println("xLiteral:ts = "+ts.toList)
+ //lastend = s.in.bp
+ //lastch = s.in.ch
+ //if (settings.debug.value) {
+ // Console.println("DEBUG 1: I am getting char '"+ch+"' at lastend "+lastend+" pos = "+pos); // DEBUG
+ //}
+ input = input.lookaheadReader
+ xSpaceOpt
+ // parse more XML ?
+ if (ch == '<') {
+ input = parser.in
+ xSpaceOpt
+ while (ch == '<') {
+ nextch
+ ts.append(element)
+ xSpaceOpt
+ }
+ tree = handle.makeXMLseq((pos), ts)
+ } else {
+ input = parser.in
+ assert(ts.length == 1)
+ tree = ts(0)
+ }
+ tree
+ } catch {
+ case c @ TruncatedXML =>
+ parser.incompleteInputError(c.getMessage)
+ EmptyTree
+
+ case c @ (MissingEndTagException | ConfusedAboutBracesException) =>
+ parser.syntaxError((debugLastStartElement.top._1):Int,
+ c.getMessage + debugLastStartElement.top._2+">")
+ EmptyTree
+
+ case _:ArrayIndexOutOfBoundsException =>
+ parser.syntaxError((debugLastStartElement.top._1),
+ "missing end tag in XML literal for <"
+ +debugLastStartElement.top._2+">");
+ EmptyTree
+ } finally {
+ parser.in.resume(Tokens.XMLSTART)
+ }
+
+ /** @see xmlPattern. resynchronizes after successful parse
+ * @return this xml pattern
+ * precondition: s.xStartsXML == true
+ */
+ def xLiteralPattern: Tree = try {
+ input = parser.in
+ val oldMode = handle.isPattern;
+ handle.isPattern = true
+// assert(ch == '<')
+// nextch
+ var tree = xPattern; xSpaceOpt;
+ handle.isPattern = oldMode;
+ tree
+ } catch {
+ case c @ TruncatedXML =>
+ parser.syntaxError(pos - 1, c.getMessage)
+ EmptyTree
+
+ case c @ (MissingEndTagException | ConfusedAboutBracesException) =>
+ parser.syntaxError((debugLastStartElement.top._1),
+ c.getMessage + debugLastStartElement.top._2+">")
+ EmptyTree
+
+ case _:ArrayIndexOutOfBoundsException =>
+ parser.syntaxError((debugLastStartElement.top._1),
+ "missing end tag in XML literal for <"
+ +debugLastStartElement.top._2+">")
+ EmptyTree
+ } finally {
+ parser.in.resume(Tokens.XMLSTART)
+ }
+
+ def escapeToScala[A](op: => A, kind: String) = {
+ xEmbeddedBlock = false
+ val savedSepRegions = parser.in.sepRegions
+ parser.in.resume(LBRACE)
+ try {
+ op //p.expr(true,false);
+ } finally {
+ parser.in.sepRegions = savedSepRegions // parser.in.sepRegions.tail
+ if (parser.in.token != RBRACE) {
+ reportSyntaxError(" expected end of Scala "+kind)
+ }
+ }
+ }
+
+ def xEmbeddedExpr: Tree = escapeToScala(parser.block(), "block")
+
+ /** xScalaPatterns ::= patterns
+ */
+ def xScalaPatterns: List[Tree] = escapeToScala(parser.patterns(true), "pattern")
+
+ var scannerState: List[List[Int]] = Nil
+
+/*
+ private def pushScannerState {
+ scannerState = s.sepRegions :: scannerState
+ s.sepRegions = Nil
+ }
+ private def popScannerState {
+ s.sepRegions = scannerState.head
+ scannerState = scannerState.tail
+ }
+ */
+/*
+ private def init {
+ ch = s.in.ch
+ pos = s.in.cpos
+ }
+ */
+
+ def reportSyntaxError(str: String) = {
+ parser.syntaxError(pos - 1, "in XML literal: " + str)
+ nextch
+ }
+/*
+ private def sync {
+ xEmbeddedBlock = false
+ s.xSync
+ }
+*/
+ /** '<' xPattern ::= Name [S] { xmlPattern | '{' pattern3 '}' } ETag
+ * | Name [S] '/' '>'
+ */
+ def xPattern: Tree = {
+ val pos1 = pos
+ val qname = xName
+ debugLastStartElement.push((pos1, qname))
+ xSpaceOpt
+ if (ch == '/') { // empty tag
+ nextch
+ xToken('>')
+ return handle.makeXMLpat(pos1, qname, new mutable.ArrayBuffer[Tree]())
+ }
+
+ // else: tag with content
+ xToken('>')
+ var ts = new mutable.ArrayBuffer[Tree]
+ var exit = false
+ while (! exit) {
+ val pos2 = pos
+ if (xEmbeddedBlock) {
+ ts ++= xScalaPatterns
+ } else
+ ch match {
+ case '<' => // tag
+ nextch
+ if (ch != '/') { //child
+ ts.append(xPattern)
+ } else {
+ exit = true
+ }
+ case '{' => // embedded Scala patterns
+ while (ch == '{') {
+ nextch
+ ts ++= xScalaPatterns
+ }
+ // postcond: xEmbeddedBlock = false;
+ if (xEmbeddedBlock) Predef.error("problem with embedded block"); // assert
+
+ case SU =>
+ throw TruncatedXML
+
+ case _ => // teMaxt
+ appendText(pos2, ts, xText)
+ // here xEmbeddedBlock might be true;
+ //if( xEmbeddedBlock ) throw new ApplicationError("after:"+text); // assert
+ }
+ }
+ xEndTag(qname)
+ debugLastStartElement.pop
+ handle.makeXMLpat(pos1, qname, ts)
+ }
+
+ } /* class MarkupParser */
+}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/NewScanners.scala b/src/compiler/scala/tools/nsc/ast/parser/NewScanners.scala
index db547b81db..8696e194ba 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/NewScanners.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/NewScanners.scala
@@ -16,6 +16,27 @@ trait NewScanners {
import Tokens._
trait CoreScannerInput extends BufferedIterator[Char] {
private[NewScanners] val scratch = new StringBuilder
+
+ /** iterates over and applies <code>f</code> to the next element
+ * if this iterator has a next element that <code>f</code> is defined for.
+ */
+ def readIf[T](f : PartialFunction[Char,T]) : Option[T] =
+ if (hasNext && f.isDefinedAt(head))
+ Some(f(next))
+ else None
+
+ /** iterates over elements as long as <code>f</code> is true
+ * for each element, returns whether anything was read
+ */
+ def readWhile(f : Char => Boolean) : Boolean = {
+ var read = false
+ while (hasNext && f(head)) {
+ next
+ read = true
+ }
+ read
+ }
+
def readIfStartsWith(c : Char) : Boolean =
if (head == c) { next; true } else false
def readIfStartsWith(c0 : Char, c1 : Char) : Boolean =
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index cfa91371fc..ec739fa6e0 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -1919,17 +1919,17 @@ trait Parsers extends NewScanners with MarkupParsers {
*/
def importSelector(names: ListBuffer[(Name, Name)]): Boolean =
if (inToken == USCORE) {
- inNextToken; names += (nme.WILDCARD, null); true
+ inNextToken; names += ((nme.WILDCARD, null)); true
} else {
val name = ident()
- names += (
+ names += ((
name,
if (inToken == ARROW) {
inNextToken
if (inToken == USCORE) { inNextToken; nme.WILDCARD } else ident()
} else {
name
- })
+ }))
false
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers1.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers1.scala
new file mode 100755
index 0000000000..8e181d8f4a
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers1.scala
@@ -0,0 +1,2607 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2009 LAMP/EPFL
+ * @author Martin Odersky
+ */
+// $Id: Parsers.scala 17415 2009-03-31 13:38:18Z imaier $
+//todo: allow infix type patterns
+
+
+package scala.tools.nsc.ast.parser
+
+import scala.collection.mutable.ListBuffer
+import scala.tools.nsc.util.{Position, OffsetPosition, NoPosition, BatchSourceFile}
+import symtab.Flags
+import Tokens._
+
+//todo verify when stableId's should be just plain qualified type ids
+
+/** <p>Performs the following context-free rewritings:</p>
+ * <ol>
+ * <li>
+ * Places all pattern variables in Bind nodes. In a pattern, for
+ * identifiers <code>x</code>:<pre>
+ * x => x @ _
+ * x:T => x @ (_ : T)</pre>
+ * </li>
+ * <li>Removes pattern definitions (PatDef's) as follows:
+ * If pattern is a simple (typed) identifier:<pre>
+ * <b>val</b> x = e ==> <b>val</b> x = e
+ * <b>val</b> x: T = e ==> <b>val</b> x: T = e</pre>
+ *
+ * if there are no variables in pattern<pre>
+ * <b>val</b> p = e ==> e match (case p => ())</pre>
+ *
+ * if there is exactly one variable in pattern<pre>
+ * <b>val</b> x_1 = e <b>match</b> (case p => (x_1))</pre>
+ *
+ * if there is more than one variable in pattern<pre>
+ * <b>val</b> p = e ==> <b>private synthetic val</b> t$ = e <b>match</b> (case p => (x_1, ..., x_N))
+ * <b>val</b> x_1 = t$._1
+ * ...
+ * <b>val</b> x_N = t$._N</pre>
+ * </li>
+ * <li>
+ * Removes function types as follows:<pre>
+ * (argtpes) => restpe ==> scala.Function_n[argtpes, restpe]</pre>
+ * </li>
+ * <li>
+ * Wraps naked case definitions in a match as follows:<pre>
+ * { cases } ==> (x => x.match {cases})<span style="font-family:normal;">, except when already argument to match</span></pre>
+ * </li>
+ * </ol>
+ */
+trait Parsers1 extends Scanners1 with MarkupParsers1 {
+self =>
+ val global : Global
+ import global._
+
+ private val glob: global.type = global
+ import global.posAssigner.atPos
+
+ case class OpInfo(operand: Tree, operator: Name, pos: Int)
+
+ class UnitParser(val unit: global.CompilationUnit) extends Parser {
+ val in = new UnitScanner(unit)
+ in.init()
+
+ def freshName(pos: Position, prefix: String): Name =
+ unit.fresh.newName(pos, prefix)
+
+ implicit def i2p(offset: Int): Position = new OffsetPosition(unit.source,offset)
+
+ def warning(pos: Int, msg: String) { unit.warning(pos, msg) }
+
+ def incompleteInputError(msg: String) {
+ unit.incompleteInputError(unit.source.asInstanceOf[BatchSourceFile].content.length - 1, msg)
+ }
+ def deprecationWarning(pos: Int, msg: String) {
+ unit.deprecationWarning(pos, msg)
+ }
+ def syntaxError(pos: Int, msg: String) { unit.error(pos, msg) }
+
+ /** the markup parser */
+ lazy val xmlp = new MarkupParser(this, true)
+
+ object symbXMLBuilder extends SymbolicXMLBuilder1(treeBuilder, this, true) { // DEBUG choices
+ val global: self.global.type = self.global
+ def freshName(prefix: String): Name = UnitParser.this.freshName(NoPosition, prefix)
+ }
+ def xmlLiteral : Tree = xmlp.xLiteral
+ def xmlLiteralPattern : Tree = xmlp.xLiteralPattern
+ }
+
+ class ScanOnly(unit: global.CompilationUnit) extends UnitParser(unit) {
+ override def parse(): Tree = {
+ while (in.token != EOF) in.nextToken
+ null
+ }
+ }
+
+ // parser constants, here so they don't pollute parser debug listing
+ private object ParserConfiguration {
+ final val Local = 0
+ final val InBlock = 1
+ final val InTemplate = 2
+ final val MINUS: Name = "-"
+ final val PLUS : Name = "+"
+ final val BANG : Name = "!"
+ final val TILDE: Name = "~"
+ final val AMP : Name = "&"
+ final val SLASH: Name = "/"
+ final val STAR : Name = "*"
+ final val BAR : Name = "|"
+ final val LT : Name = "<"
+ }
+
+ abstract class Parser {
+ ParserConfiguration.hashCode
+ import ParserConfiguration._
+ val in: Scanner
+ //val unit : CompilationUnit
+ //import in.ScanPosition
+ protected def freshName(pos: Position, prefix: String): Name
+ protected def posToReport: Int = in.offset
+
+ protected implicit def i2p(offset: Int): Position
+ //private implicit def p2i(pos: Position) = pos.offset.get
+
+ private def inToken = in.token
+ private def inSkipToken = in.skipToken()
+ private def inNextToken = in.nextToken()
+ private def inCurrentPos = in.offset
+ private def inNextTokenCode : Int = in.next.token
+ private def inName = in.name
+ private def charVal = in.charVal
+ private def intVal(isNegated: Boolean) = in.intVal(isNegated).asInstanceOf[Int]
+ private def longVal(isNegated: Boolean) = in.intVal(isNegated)
+ private def floatVal(isNegated: Boolean) = in.floatVal(isNegated).asInstanceOf[Float]
+ private def doubleVal(isNegated: Boolean) = in.floatVal(isNegated)
+ private def stringVal = in.strVal
+
+ /** whether a non-continuable syntax error has been seen */
+ //private var syntaxErrorSeen = false
+ private var lastErrorPos : Int = -1
+
+ object treeBuilder extends TreeBuilder {
+ val global: self.global.type = self.global
+ def freshName(pos : Position, prefix: String): Name =
+ Parser.this.freshName(pos, prefix)
+ }
+ import treeBuilder._
+
+ /** The implicit view parameters of the surrounding class */
+ var implicitClassViews: List[Tree] = Nil
+
+ /** this is the general parse method
+ */
+ def parse(): Tree = {
+ val t = compilationUnit()
+ accept(EOF)
+ t
+ }
+
+/* --------------- PLACEHOLDERS ------------------------------------------- */
+
+ /** The implicit parameters introduced by `_' in the current expression.
+ * Parameters appear in reverse order
+ */
+ var placeholderParams: List[ValDef] = Nil
+
+ /** The placeholderTypes introduced by `_' in the current type.
+ * Parameters appear in reverse order
+ */
+ var placeholderTypes: List[TypeDef] = Nil
+
+ def checkNoEscapingPlaceholders[T](op: => T): T = {
+ val savedPlaceholderParams = placeholderParams
+ val savedPlaceholderTypes = placeholderTypes
+ placeholderParams = List()
+ placeholderTypes = List()
+
+ val res = op
+
+ placeholderParams match {
+ case vd :: _ =>
+ syntaxError(vd.pos, "unbound placeholder parameter", false)
+ placeholderParams = List()
+ case _ =>
+ }
+ placeholderTypes match {
+ case td :: _ =>
+ syntaxError(td.pos, "unbound wildcard type", false)
+ placeholderTypes = List()
+ case _ =>
+ }
+ placeholderParams = savedPlaceholderParams
+ placeholderTypes = savedPlaceholderTypes
+
+ res
+ }
+
+ def placeholderTypeBoundary(op: => Tree): Tree = {
+ val savedPlaceholderTypes = placeholderTypes
+ placeholderTypes = List()
+ var t = op
+ if (!placeholderTypes.isEmpty && t.isInstanceOf[AppliedTypeTree]) {
+ t = ExistentialTypeTree(t, placeholderTypes.reverse)
+ placeholderTypes = List()
+ }
+ placeholderTypes = placeholderTypes ::: savedPlaceholderTypes
+ t
+ }
+
+/* ------------- ERROR HANDLING ------------------------------------------- */
+
+ protected def skip() {
+ var nparens = 0
+ var nbraces = 0
+ while (true) {
+ inToken match {
+ case EOF =>
+ return
+ case SEMI =>
+ if (nparens == 0 && nbraces == 0) return
+ case NEWLINE =>
+ if (nparens == 0 && nbraces == 0) return
+ case NEWLINES =>
+ if (nparens == 0 && nbraces == 0) return
+ case RPAREN =>
+ nparens -= 1
+ case RBRACE =>
+ if (nbraces == 0) return
+ nbraces -= 1
+ case LPAREN =>
+ nparens += 1
+ case LBRACE =>
+ nbraces += 1
+ case _ =>
+ }
+ inNextToken
+ }
+ }
+ def warning(pos: Int, msg: String): Unit
+ def incompleteInputError(msg: String): Unit
+ def deprecationWarning(pos: Int, msg: String): Unit
+ private def syntaxError(pos: Position, msg: String, skipIt: Boolean) {
+ pos.offset match {
+ case None => syntaxError(msg,skipIt)
+ case Some(offset) => syntaxError(offset, msg, skipIt)
+ }
+ }
+ def syntaxError(pos: Int, msg: String): Unit
+ def syntaxError(msg: String, skipIt: Boolean) {
+ syntaxError(inCurrentPos, msg, skipIt)
+ }
+
+ def syntaxError(pos: Int, msg: String, skipIt: Boolean) {
+ if (pos > lastErrorPos) {
+ syntaxError(pos, msg)
+ // no more errors on this token.
+ lastErrorPos = inCurrentPos
+ }
+ if (skipIt)
+ skip()
+ }
+
+ def warning(msg: String) { warning(inCurrentPos, msg) }
+
+ def syntaxErrorOrIncomplete(msg: String, skipIt: Boolean) {
+ val inToken = this.inToken
+ if (inToken == EOF)
+ incompleteInputError(msg)
+ else
+ syntaxError(inCurrentPos, msg, skipIt)
+ }
+ // unused.
+ /* Commented out because the comment says it is unused.
+ Probably eliminate eventually. GAW 2008.05.01
+ def mismatch(expected: Int, found: Int) {
+ val posToReport = this.posToReport
+ val msg =
+ ScannerConfiguration.token2string(expected) + " expected but " +
+ ScannerConfiguration.token2string(found) + " found."
+
+ if (found == EOF)
+ incompleteInputError(msg)
+ else
+ syntaxError(posToReport, msg, true)
+ }
+ */
+
+ /** Consume one token of the specified type, or
+ * signal an error if it is not there.
+ */
+ def accept(token: Int): Int = {
+ val pos = inCurrentPos
+ if (inToken != token) {
+ val posToReport =
+ //if (inCurrentPos.line(unit.source).get(0) > in.lastPos.line(unit.source).get(0))
+ // in.lastPos
+ //else
+ inCurrentPos
+ val msg =
+ token2string(token) + " expected but " +token2string(inToken) + " found."
+
+ if (inToken == EOF)
+ incompleteInputError(msg)
+ else
+ syntaxError(posToReport, msg, true)
+ }
+ if (inToken == token) inNextToken
+ pos
+ }
+ def surround[T](open: Int, close: Int)(f: => T, orElse: T): T = {
+ val wasOpened = inToken == open
+ accept(open)
+ if (wasOpened) {
+ val ret = f
+ accept(close)
+ ret
+ } else orElse
+ }
+
+ /** semi = nl {nl} | `;'
+ * nl = `\n' // where allowed
+ */
+ def acceptStatSep(): Boolean =
+ if (inToken == NEWLINE || inToken == NEWLINES) { inNextToken; true }
+ else {
+ val ret = inToken == SEMI
+ accept(SEMI)
+ ret
+ }
+
+ def errorTypeTree = TypeTree().setType(ErrorType).setPos((inCurrentPos))
+ def errorTermTree = Literal(Constant(null)).setPos((inCurrentPos))
+ def errorPatternTree = Ident(nme.WILDCARD).setPos((inCurrentPos))
+
+/* -------------- TOKEN CLASSES ------------------------------------------- */
+
+ def isModifier: Boolean = inToken match {
+ case ABSTRACT | FINAL | SEALED | PRIVATE |
+ PROTECTED | OVERRIDE | IMPLICIT | LAZY => true
+ case _ => false
+ }
+
+ def isLocalModifier: Boolean = inToken match {
+ case ABSTRACT | FINAL | SEALED | IMPLICIT | LAZY => true
+ case _ => false
+ }
+
+ def isDefIntro: Boolean = inToken match {
+ case VAL | VAR | DEF | TYPE | OBJECT |
+ CASEOBJECT | CLASS | CASECLASS | TRAIT => true
+ case _ => false
+ }
+
+ def isDclIntro: Boolean = inToken match {
+ case VAL | VAR | DEF | TYPE => true
+ case _ => false
+ }
+
+ def isIdent = inToken == IDENTIFIER || inToken == BACKQUOTED_IDENT
+
+ def isExprIntroToken(token: Int): Boolean = token match {
+ case CHARLIT | INTLIT | LONGLIT | FLOATLIT | DOUBLELIT |
+ STRINGLIT | SYMBOLLIT | TRUE | FALSE | NULL | IDENTIFIER | BACKQUOTED_IDENT |
+ THIS | SUPER | IF | FOR | NEW | USCORE | TRY | WHILE |
+ DO | RETURN | THROW | LPAREN | LBRACE | XMLSTART => true
+ case _ => false
+ }
+
+ def isExprIntro: Boolean = isExprIntroToken(inToken)
+
+ def isTypeIntroToken(token: Int): Boolean = token match {
+ case IDENTIFIER | BACKQUOTED_IDENT | THIS |
+ SUPER | USCORE | LPAREN | AT => true
+ case _ => false
+ }
+
+ def isTypeIntro: Boolean = isTypeIntroToken(inToken)
+
+ def isStatSep(token: Int): Boolean =
+ token == NEWLINE || token == NEWLINES || token == SEMI
+
+ def isStatSep: Boolean = isStatSep(inToken)
+
+
+/* --------- COMMENT AND ATTRIBUTE COLLECTION ----------------------------- */
+
+ /** Join the comment associated with a definition
+ */
+ def joinComment(trees: => List[Tree]): List[Tree] = {
+ val buf = in.flushDoc
+ if ((buf ne null) && buf.length > 0) trees map (t => DocDef(buf, t) setPos t.pos)
+ else trees
+ }
+
+/* ---------- TREE CONSTRUCTION ------------------------------------------- */
+
+ /** Convert tree to formal parameter list
+ */
+ def convertToParams(tree: Tree): List[ValDef] = tree match {
+ case Parens(ts) =>
+ ts map convertToParam
+ case _ =>
+ List(convertToParam(tree))
+ }
+
+ /** Convert tree to formal parameter
+ */
+ def convertToParam(tree: Tree): ValDef =
+ atPos(tree.pos) {
+ def removeAsPlaceholder(name: Name) {
+ placeholderParams = placeholderParams filter (_.name != name)
+ }
+ tree match {
+ case Ident(name) =>
+ removeAsPlaceholder(name)
+ ValDef(Modifiers(Flags.PARAM), name, TypeTree(), EmptyTree)
+ case Typed(tree @ Ident(name), tpe) if (tpe.isType) => // get the ident!
+ removeAsPlaceholder(name)
+ ValDef(Modifiers(Flags.PARAM), name, tpe, EmptyTree).setPos(tree.pos)
+ case _ =>
+ syntaxError(tree.pos, "not a legal formal parameter", false)
+ ValDef(Modifiers(Flags.PARAM), nme.ERROR, errorTypeTree, EmptyTree)
+ }
+ }
+
+ /** Convert (qual)ident to type identifier
+ */
+ def convertToTypeId(tree: Tree): Tree = tree match {
+ case Ident(name) =>
+ Ident(name.toTypeName).setPos(tree.pos)
+ case Select(qual, name) =>
+ Select(qual, name.toTypeName).setPos(tree.pos)
+ case _ =>
+ syntaxError(tree.pos, "identifier expected", false)
+ errorTypeTree
+ }
+
+ /** make closure from tree staring with a `.' */
+ def makeDotClosure(tree: Tree): Tree = {
+ val pname = freshName(tree.pos, "x$")
+ def insertParam(tree: Tree): Tree = atPos(tree.pos) {
+ tree match {
+ case Ident(name) =>
+ Select(Ident(pname), name)
+ case Select(qual, name) =>
+ Select(insertParam(qual), name)
+ case Apply(fn, args) =>
+ Apply(insertParam(fn), args)
+ case TypeApply(fn, args) =>
+ TypeApply(insertParam(fn), args)
+ case _ =>
+ syntaxError(tree.pos, "cannot convert to closure", false)
+ errorTermTree
+ }
+ }
+ Function(List(makeSyntheticParam(pname)), insertParam(tree))
+ }
+
+/* --------- OPERAND/OPERATOR STACK --------------------------------------- */
+
+ var opstack: List[OpInfo] = Nil
+
+ def precedence(operator: Name): Int =
+ if (operator eq nme.ERROR) -1
+ else {
+ val firstCh = operator(0)
+ if (((firstCh >= 'A') && (firstCh <= 'Z')) ||
+ ((firstCh >= 'a') && (firstCh <= 'z')))
+ 1
+ else if (nme.isOpAssignmentName(operator))
+ 0
+ else
+ firstCh match {
+ case '|' => 2
+ case '^' => 3
+ case '&' => 4
+ case '=' | '!' => 5
+ case '<' | '>' => 6
+ case ':' => 7
+ case '+' | '-' => 8
+ case '*' | '/' | '%' => 9
+ case _ => 10
+ }
+ }
+
+ def checkSize(kind: String, size: Int, max: Int) {
+ if (size > max) syntaxError("too many "+kind+", maximum = "+max, false)
+ }
+
+ def checkAssoc(pos: Int, op: Name, leftAssoc: Boolean) =
+ if (treeInfo.isLeftAssoc(op) != leftAssoc)
+ syntaxError(
+ pos, "left- and right-associative operators with same precedence may not be mixed", false)
+
+ def reduceStack(isExpr: Boolean, base: List[OpInfo], top0: Tree,
+ prec: Int, leftAssoc: Boolean): Tree = {
+ var top = top0
+ if (opstack != base && precedence(opstack.head.operator) == prec)
+ checkAssoc(opstack.head.pos, opstack.head.operator, leftAssoc)
+ while (opstack != base &&
+ (prec < precedence(opstack.head.operator) ||
+ (leftAssoc && prec == precedence(opstack.head.operator)))) {
+ top = atPos(opstack.head.pos) {
+ makeBinop(isExpr, opstack.head.operand, opstack.head.operator, top)
+ }
+ opstack = opstack.tail
+ }
+ top
+ }
+
+/* -------- IDENTIFIERS AND LITERALS ------------------------------------------- */
+
+ def ident(): Name =
+ if (inToken == IDENTIFIER || inToken == BACKQUOTED_IDENT) {
+ val name = inName.encode
+ inNextToken
+ name
+ } else {
+ accept(IDENTIFIER)
+ nme.ERROR
+ }
+
+ def selector(t: Tree): Tree =
+ atPos(inCurrentPos)(Select(t, ident()))
+
+ /** Path ::= StableId
+ * | [Ident `.'] this
+ * AnnotType ::= Path [`.' type]
+ */
+ def path(thisOK: Boolean, typeOK: Boolean): Tree = {
+ var t: Tree = null
+ if (inToken == THIS) {
+ t = atPos(inSkipToken) { This(nme.EMPTY.toTypeName) }
+ if (!thisOK || inToken == DOT) {
+ t = selectors(t, typeOK, accept(DOT))
+ }
+ } else if (inToken == SUPER) {
+ // val pos = inCurrentPos
+ val pos = inSkipToken
+ val (mix,usePos) = mixinQualifierOpt(pos)
+ t = atPos(usePos) {
+ Super(nme.EMPTY.toTypeName, mix)
+ }
+ t = atPos(accept(DOT)) { selector(t) }
+ if (inToken == DOT)
+ t = selectors(t, typeOK, inSkipToken)
+ } else {
+ val i = atPos(inCurrentPos) {
+ if (inToken == BACKQUOTED_IDENT) new BackQuotedIdent(ident())
+ else Ident(ident())
+ }
+ t = i
+ if (inToken == DOT) {
+ val pos = inSkipToken
+ if (inToken == THIS) {
+ inNextToken
+ t = atPos(i.pos) { This(i.name.toTypeName) }
+ if (!thisOK || inToken == DOT)
+ t = selectors(t, typeOK, accept(DOT))
+ } else if (inToken == SUPER) {
+ inNextToken
+ val (mix,pos) = mixinQualifierOpt(i.pos)
+ t = atPos(pos) { Super(i.name.toTypeName, mix) }
+ t = atPos(accept(DOT)) {selector(t)}
+ if (inToken == DOT)
+ t = selectors(t, typeOK, inSkipToken)
+ } else {
+ t = selectors(t, typeOK, pos)
+ }
+ }
+ }
+ t
+ }
+
+ def selectors(t: Tree, typeOK: Boolean, pos : Int): Tree =
+ if (typeOK && inToken == TYPE) {
+ inNextToken
+ atPos(pos) { SingletonTypeTree(t) }
+ } else {
+ val t1 = atPos(pos) { selector(t); }
+ if (inToken == DOT) { selectors(t1, typeOK, inSkipToken) }
+ else t1
+ }
+
+ /** MixinQualifier ::= `[' Id `]'
+ */
+ def mixinQualifierOpt(pos: Position): (Name, Position) =
+ if (inToken == LBRACKET) {
+ inNextToken
+ val pos = inCurrentPos
+ val name = ident().toTypeName
+ accept(RBRACKET)
+ (name, pos)
+ } else {
+ (nme.EMPTY.toTypeName, pos)
+ }
+
+ /** StableId ::= Id
+ * | Path `.' Id
+ * | [id '.'] super [`[' id `]']`.' id
+ */
+ def stableId(): Tree =
+ path(false, false)
+
+ /** QualId ::= Id {`.' Id}
+ */
+ def qualId(): Tree = {
+ val id = atPos(inCurrentPos) { Ident(ident()) }
+ if (inToken == DOT) { selectors(id, false, inSkipToken) }
+ else id
+ }
+
+ /** SimpleExpr ::= literal
+ * | symbol
+ * | null
+ */
+
+ def literal(isPattern: Boolean, isNegated: Boolean): Tree = {
+ def litToTree() = atPos(inCurrentPos) {
+ Literal(
+ inToken match {
+ case CHARLIT => Constant(charVal)
+ case INTLIT => Constant(intVal(isNegated))
+ case LONGLIT => Constant(longVal(isNegated))
+ case FLOATLIT => Constant(floatVal(isNegated))
+ case DOUBLELIT => Constant(doubleVal(isNegated))
+ case STRINGLIT | SYMBOLLIT => Constant(stringVal)
+ case TRUE => Constant(true)
+ case FALSE => Constant(false)
+ case NULL => Constant(null)
+ case _ =>
+ syntaxErrorOrIncomplete("illegal literal", true)
+ null
+ })
+ }
+
+ val isSymLit = inToken == SYMBOLLIT
+ val t = litToTree()
+ val pos = inSkipToken
+ if (isSymLit) {
+ atPos(pos) {
+ var symid = scalaDot(nme.Symbol)
+ Apply(symid, List(t))
+ }
+ } else {
+ t
+ }
+ }
+
+ def newLineOpt() {
+ if (inToken == NEWLINE) inNextToken
+ }
+
+ def newLinesOpt() {
+ if (inToken == NEWLINE || inToken == NEWLINES)
+ inNextToken
+ }
+
+ def newLineOptWhenFollowedBy(token: Int) {
+ // note: next is defined here because current == NEWLINE
+ if (inToken == NEWLINE && inNextTokenCode == token) newLineOpt()
+ }
+
+ def newLineOptWhenFollowing(p: Int => Boolean) {
+ // note: next is defined here because current == NEWLINE
+ if (inToken == NEWLINE && p(inNextTokenCode)) newLineOpt()
+ }
+
+/* ------------- TYPES ---------------------------------------------------- */
+
+ /** TypedOpt ::= [`:' Type]
+ */
+ def typedOpt(): Tree =
+ if (inToken == COLON) { inNextToken; typ() }
+ else TypeTree()
+
+ /** RequiresTypedOpt ::= [requires AnnotType]
+ */
+ def requiresTypeOpt(): Tree =
+ if (inToken == REQUIRES) {
+ deprecationWarning(inCurrentPos, "`requires T' has been deprecated; use `{ self: T => ...' instead")
+ inNextToken; annotType(false)
+ } else TypeTree()
+
+ /** Types ::= Type {`,' Type}
+ * (also eats trailing comma if it finds one)
+ */
+ def types(isPattern: Boolean, isTypeApply: Boolean, isFuncArg: Boolean): List[Tree] = {
+ val ts = new ListBuffer[Tree] + argType(isPattern, isTypeApply, isFuncArg)
+ while (inToken == COMMA) {
+ val pos = inCurrentPos
+ inNextToken
+ if (inToken == RPAREN) {
+ deprecationWarning(pos, "Trailing commas have been deprecated")
+ return ts.toList
+ } else {
+ ts += argType(isPattern, isTypeApply, isFuncArg)
+ }
+ }
+ ts.toList
+ }
+
+ /** modes for infix types */
+ object InfixMode extends Enumeration {
+ val FirstOp, LeftOp, RightOp = Value
+ }
+
+ /** Type ::= InfixType `=>' Type
+ * | `(' [`=>' Type] `)' `=>' Type
+ * | InfixType [ExistentialClause]
+ * ExistentialClause ::= forSome `{' ExistentialDcl {semi ExistentialDcl}} `}'
+ * ExistentialDcl ::= type TypeDcl | val ValDcl
+ */
+ def typ(): Tree = typ(false)
+
+ def typ(isPattern: Boolean): Tree = placeholderTypeBoundary {
+ val t =
+ if (inToken == LPAREN) {
+ val pos = inSkipToken
+ if (inToken == RPAREN) {
+ inNextToken
+ atPos(accept(ARROW)) { makeFunctionTypeTree(List(), typ(isPattern)) }
+ } else {
+ val ts = types(isPattern, false, true)
+ accept(RPAREN)
+ if (inToken == ARROW) atPos(inSkipToken) {
+ makeFunctionTypeTree(ts, typ(isPattern))
+ }
+ else {
+ for (t <- ts) t match {
+ case AppliedTypeTree(Select(_, n), _)
+ if (n == nme.BYNAME_PARAM_CLASS_NAME.toTypeName) =>
+ syntaxError(t.pos, "no by-name parameter type allowed here", false)
+ case _ =>
+ }
+ infixTypeRest(pos, annotTypeRest(pos, isPattern, makeTupleType(ts, true)), false, InfixMode.FirstOp)
+ }
+ }
+ } else {
+ infixType(isPattern, InfixMode.FirstOp)
+ }
+ if (inToken == ARROW)
+ atPos(inSkipToken) {
+ makeFunctionTypeTree(List(t), typ(isPattern))
+ }
+ else if (inToken == FORSOME)
+ atPos(inSkipToken) {
+ val whereClauses = refinement()
+ for (wc <- whereClauses) {
+ wc match {
+ case TypeDef(_, _, _, TypeBoundsTree(_, _)) |
+ ValDef(_, _, _, EmptyTree) | EmptyTree =>
+ ;
+ case _ =>
+ syntaxError(wc.pos, "not a legal existential clause", false)
+ }
+ }
+ ExistentialTypeTree(t, whereClauses)
+ }
+ else t
+ }
+
+ /** InfixType ::= CompoundType {id [nl] CompoundType}
+ */
+ def infixType(isPattern: Boolean, mode: InfixMode.Value): Tree = placeholderTypeBoundary {
+ infixTypeRest(inCurrentPos, infixTypeFirst(isPattern), isPattern, mode)
+ }
+
+ def infixTypeFirst(isPattern: Boolean): Tree =
+ if (inToken == LBRACE) scalaAnyRefConstr else annotType(isPattern)
+
+ def infixTypeRest(pos: Int, t0: Tree, isPattern: Boolean, mode: InfixMode.Value): Tree = {
+ val t = compoundTypeRest(pos, t0, isPattern)
+ if (isIdent && inName != nme.STAR) {
+ val opPos = inCurrentPos
+ val leftAssoc = treeInfo.isLeftAssoc(inName)
+ if (mode == InfixMode.LeftOp) checkAssoc(opPos, inName, true)
+ else if (mode == InfixMode.RightOp) checkAssoc(opPos, inName, false)
+ val op = ident()
+ newLineOptWhenFollowing(isTypeIntroToken)
+ def mkOp(t1: Tree) = atPos(opPos) { AppliedTypeTree(Ident(op.toTypeName), List(t, t1)) }
+ if (leftAssoc)
+ infixTypeRest(inCurrentPos, mkOp(compoundType(isPattern)), isPattern, InfixMode.LeftOp)
+ else
+ mkOp(infixType(isPattern, InfixMode.RightOp))
+ } else t
+ }
+
+ /** CompoundType ::= AnnotType {with AnnotType} [Refinement]
+ * | Refinement
+ */
+ def compoundType(isPattern: Boolean): Tree =
+ compoundTypeRest(inCurrentPos, infixTypeFirst(isPattern), isPattern)
+
+ def compoundTypeRest(pos: Int, t: Tree, isPattern: Boolean): Tree = {
+ var ts = new ListBuffer[Tree] + t
+ while (inToken == WITH) {
+ inNextToken; ts += annotType(isPattern)
+ }
+ newLineOptWhenFollowedBy(LBRACE)
+ atPos(pos) {
+ if (inToken == LBRACE) {
+ // Warn if they are attempting to refine Unit; we can't be certain it's
+ // scala.Unit they're refining because at this point all we have is an
+ // identifier, but at a later stage we lose the ability to tell an empty
+ // refinement from no refinement at all. See bug #284.
+ for (Ident(name) <- ts) name.toString match {
+ case "Unit" | "scala.Unit" =>
+ warning("Detected apparent refinement of Unit; are you missing an '=' sign?")
+ case _ =>
+ }
+ CompoundTypeTree(Template(ts.toList, emptyValDef, refinement()))
+ }
+ else
+ makeIntersectionTypeTree(ts.toList)
+ }
+ }
+
+ /** AnnotType ::= SimpleType {Annotation}
+ * SimpleType ::= SimpleType TypeArgs
+ * | SimpleType `#' Id
+ * | StableId
+ * | Path `.' type
+ * | `(' Types [`,'] `)'
+ * | WildcardType
+ */
+ def annotType(isPattern: Boolean): Tree = placeholderTypeBoundary {
+ val pos = inCurrentPos
+
+ val t: Tree = annotTypeRest(pos, isPattern,
+ if (inToken == LPAREN) {
+ inNextToken
+ val ts = types(isPattern, false, false)
+ accept(RPAREN)
+ atPos(pos) { makeTupleType(ts, true) }
+ } else if (inToken == USCORE) {
+ wildcardType(inSkipToken)
+ } else {
+ val r = path(false, true)
+ r match {
+ case SingletonTypeTree(_) => r
+ case _ => convertToTypeId(r)
+ }
+ })
+ (t /: annotations(false)) (makeAnnotated)
+ }
+
+ def annotTypeRest(pos: Int, isPattern: Boolean, t: Tree): Tree =
+ if (inToken == HASH) {
+ inSkipToken
+ val posId = inCurrentPos
+ val id = ident
+ annotTypeRest(pos, isPattern, atPos(posId) { SelectFromTypeTree(t, id.toTypeName) })
+ } else if (inToken == LBRACKET) {
+ val usePos = if (t.pos != NoPosition) t.pos else i2p(pos)
+ annotTypeRest(pos, isPattern, atPos(usePos) { AppliedTypeTree(t, typeArgs(isPattern, false)) })
+ }
+ else
+ t
+
+ /** WildcardType ::= `_' TypeBounds
+ */
+ def wildcardType(pos: Int) = {
+ val pname = freshName(pos, "_$").toTypeName
+ val param = atPos(pos) { makeSyntheticTypeParam(pname, typeBounds()) }
+ placeholderTypes = param :: placeholderTypes
+ Ident(pname) setPos pos
+ }
+
+ /** TypeArgs ::= `[' ArgType {`,' ArgType} `]'
+ */
+ def typeArgs(isPattern: Boolean, isTypeApply: Boolean): List[Tree] = {
+ accept(LBRACKET)
+ val ts = types(isPattern, isTypeApply, false)
+ accept(RBRACKET)
+ ts
+ }
+
+ /** ArgType ::= Type
+ */
+ def argType(isPattern: Boolean, isTypeApply: Boolean, isFuncArg: Boolean): Tree =
+ if (isPattern) {
+ if (inToken == USCORE)
+ if (inToken == SUBTYPE || inToken == SUPERTYPE) wildcardType(inSkipToken)
+ else atPos(inSkipToken) { Bind(nme.WILDCARD.toTypeName, EmptyTree) }
+ else if (inToken == IDENTIFIER && treeInfo.isVariableName(inName.toTypeName))
+ atPos(inCurrentPos) {
+ Bind(ident().toTypeName, EmptyTree)
+ }
+ else {
+ typ(true)
+ }
+ } else if (isFuncArg) {
+ // copy-paste (with change) from def paramType
+ if (inToken == ARROW)
+ atPos(inSkipToken) {
+ AppliedTypeTree(
+ rootScalaDot(nme.BYNAME_PARAM_CLASS_NAME.toTypeName), List(typ()))
+ }
+ else {
+ val t = typ()
+ if (isIdent && inName == STAR) {
+ inNextToken
+ atPos(t.pos) {
+ AppliedTypeTree(
+ rootScalaDot(nme.REPEATED_PARAM_CLASS_NAME.toTypeName), List(t))
+ }
+ } else t
+ }
+ } else if (isTypeApply) {
+ typ()
+ } else {
+ typ()
+ }
+
+/* ----------- EXPRESSIONS ------------------------------------------------ */
+
+ /** EqualsExpr ::= `=' Expr
+ */
+ def equalsExpr(): Tree = {
+ accept(EQUALS)
+ expr()
+ }
+
+ /** Exprs ::= Expr {`,' Expr}
+ *
+ * (also eats trailing comma if it finds one)
+ */
+ def exprs(): List[Tree] = {
+ val ts = new ListBuffer[Tree] + expr()
+ while (inToken == COMMA) {
+ val pos = inCurrentPos
+ inNextToken
+ if (inToken == RPAREN) {
+ deprecationWarning(pos, "Trailing commas have been deprecated")
+ return ts.toList
+ } else {
+ ts += expr()
+ }
+ }
+ ts.toList
+ }
+
+
+ /** Expr ::= (Bindings | Id | `_') `=>' Expr
+ * | Expr1
+ * ResultExpr ::= (Bindings | Id `:' CompoundType) `=>' Block
+ * | Expr1
+ * Expr1 ::= if `(' Expr `)' {nl} Expr [[semi] else Expr]
+ * | try (`{' Block `}' | Expr) [catch `{' CaseClauses `}'] [finally Expr]
+ * | while `(' Expr `)' {nl} Expr
+ * | do Expr [semi] while `(' Expr `)'
+ * | for (`(' Enumerators `)' | '{' Enumerators '}') {nl} [yield] Expr
+ * | throw Expr
+ * | return [Expr]
+ * | [SimpleExpr `.'] Id `=' Expr
+ * | SimpleExpr1 ArgumentExprs `=' Expr
+ * | PostfixExpr Ascription
+ * | PostfixExpr match `{' CaseClauses `}'
+ * Bindings ::= `(' [Binding {`,' Binding}] `)'
+ * Binding ::= (Id | `_') [`:' Type]
+ * Ascription ::= `:' CompoundType
+ * | `:' Annotation {Annotation}
+ * | `:' `_' `*'
+ */
+ def expr(): Tree = expr(Local)
+ /* hook for IDE, unlike expression can be stubbed
+ * don't use for any tree that can be inspected in the parser!
+ */
+ def statement(location: Int): Tree = expr(location)
+ def expr(location: Int): Tree = {
+ def isWildcard(t: Tree): Boolean = t match {
+ case Ident(name1) if !placeholderParams.isEmpty && name1 == placeholderParams.head.name => true
+ case Typed(t1, _) => isWildcard(t1)
+ case Annotated(t1, _) => isWildcard(t1)
+ case _ => false
+ }
+ var savedPlaceholderParams = placeholderParams
+ placeholderParams = List()
+ var res = inToken match {
+ case IF =>
+ val pos = inSkipToken
+ val cond = surround(LPAREN,RPAREN)(expr(),Literal(true))
+ newLinesOpt()
+ val thenp = expr()
+ val elsep = if (inToken == ELSE) { inNextToken; expr() }
+ else Literal(())
+ atPos(pos) { If(cond, thenp, elsep) }
+ case TRY =>
+ atPos(inSkipToken) {
+ val body =
+ if (inToken == LBRACE) surround(LBRACE, RBRACE)(block(), Literal(()))
+ else if (inToken == LPAREN) surround(LPAREN, RPAREN)(expr(), Literal(()))
+ else expr()
+ val catches =
+ if (inToken == CATCH) {
+ inNextToken
+ val cases = surround(LBRACE,RBRACE)(caseClauses(), Nil)
+ cases
+ } else Nil
+ val finalizer =
+ if (inToken == FINALLY) { inNextToken; expr() }
+ else EmptyTree
+ Try(body, catches, finalizer)
+ }
+ case WHILE =>
+ val pos = inSkipToken
+ val lname: Name = freshName(pos, "while$")
+ val cond = surround(LPAREN,RPAREN)(expr(),Literal(true))
+ newLinesOpt()
+ val body = expr()
+ atPos(pos) { makeWhile(lname, cond, body) }
+ case DO =>
+ val pos = inSkipToken
+ val lname: Name = freshName(pos, "doWhile$")
+ val body = expr()
+ if (isStatSep) inNextToken
+ accept(WHILE)
+ val cond = surround(LPAREN,RPAREN)(expr(), Literal(true))
+ atPos(pos) { makeDoWhile(lname, body, cond) }
+ case FOR =>
+ atPos(inSkipToken) {
+ val startToken = inToken
+ val (open,close) = if (startToken == LBRACE) (LBRACE,RBRACE) else (LPAREN,RPAREN)
+ val enums = surround(open,close)(enumerators(), Nil)
+ newLinesOpt()
+ if (inToken == YIELD) {
+ inNextToken; makeForYield(enums, expr())
+ } else makeFor(enums, expr())
+ }
+ case RETURN =>
+ atPos(inSkipToken) {
+ Return(if (isExprIntro) expr() else Literal(()))
+ }
+ case THROW =>
+ atPos(inSkipToken) {
+ Throw(expr())
+ }
+ case DOT =>
+ deprecationWarning(inCurrentPos, "`.f' has been deprecated; use `_.f' instead")
+ atPos(inSkipToken) {
+ if (isIdent) {
+ makeDotClosure(stripParens(simpleExpr()))
+ } else {
+ syntaxErrorOrIncomplete("identifier expected", true)
+ errorTermTree
+ }
+ }
+ case _ =>
+ var t = postfixExpr()
+ if (inToken == EQUALS) {
+ t match {
+ case Ident(_) | Select(_, _) | Apply(_, _) =>
+ t = atPos(inSkipToken) { makeAssign(t, expr()) }
+ case _ =>
+ }
+ } else if (inToken == COLON) {
+ t = stripParens(t)
+ val pos = inSkipToken
+ if (inToken == USCORE) {
+ //todo: need to handle case where USCORE is a wildcard in a type
+ val pos1 = inSkipToken
+ if (isIdent && inName == nme.STAR) {
+ inNextToken
+ t = atPos(pos) {
+ Typed(t, atPos(pos1) { Ident(nme.WILDCARD_STAR.toTypeName) })
+ }
+ } else {
+ syntaxErrorOrIncomplete("`*' expected", true)
+ }
+ } else if (in.token == AT) {
+ t = (t /: annotations(false)) (makeAnnotated)
+ } else {
+ t = atPos(pos) {
+ val tpt =
+ if (location != Local) infixType(false, InfixMode.FirstOp)
+ else typ()
+ if (isWildcard(t))
+ (placeholderParams: @unchecked) match {
+ case (vd @ ValDef(mods, name, _, _)) :: rest =>
+ placeholderParams = copy.ValDef(vd, mods, name, tpt.duplicate, EmptyTree) :: rest
+ }
+ // this does not correspond to syntax, but is necessary to
+ // accept closures. We might restrict closures to be between {...} only!
+ Typed(t, tpt)
+ }
+ }
+ } else if (inToken == MATCH) {
+ t = atPos(inSkipToken) {
+ val cases = surround(LBRACE,RBRACE)(caseClauses(), Nil)
+ Match(stripParens(t), cases)
+ }
+ }
+ // in order to allow anonymous functions as statements (as opposed to expressions) inside
+ // templates, we have to disambiguate them from self type declarations - bug #1565
+ // The case still missed is unparenthesized single argument, like "x: Int => x + 1", which
+ // may be impossible to distinguish from a self-type and so remains an error. (See #1564)
+ def lhsIsTypedParamList() = t match {
+ case Parens(xs) if xs forall (_.isInstanceOf[Typed]) => true
+ case _ => false
+ }
+ if (inToken == ARROW && (location != InTemplate || lhsIsTypedParamList)) {
+ t = atPos(inSkipToken) {
+ Function(convertToParams(t), if (location != InBlock) expr() else block())
+ }
+ }
+ stripParens(t)
+ }
+ if (!placeholderParams.isEmpty && !isWildcard(res)) {
+ res = atPos(res.pos){ Function(placeholderParams.reverse, res) }
+ placeholderParams = List()
+ }
+ placeholderParams = placeholderParams ::: savedPlaceholderParams
+ res
+ }
+
+ /** PostfixExpr ::= InfixExpr [Id [nl]]
+ * InfixExpr ::= PrefixExpr
+ * | InfixExpr Id [nl] InfixExpr
+ */
+ def postfixExpr(): Tree = {
+ val base = opstack
+ var top = prefixExpr()
+ while (isIdent) {
+ top = reduceStack(
+ true, base, top, precedence(inName), treeInfo.isLeftAssoc(inName))
+ val op = inName
+ opstack = OpInfo(top, op, inCurrentPos) :: opstack
+ ident()
+ newLineOptWhenFollowing(isExprIntroToken)
+ if (isExprIntro) {
+ top = prefixExpr()
+ } else {
+ val topinfo = opstack.head
+ opstack = opstack.tail
+ return Select(
+ stripParens(reduceStack(true, base, topinfo.operand, 0, true)),
+ topinfo.operator.encode).setPos(topinfo.pos)
+ }
+ }
+ reduceStack(true, base, top, 0, true)
+ }
+
+ /** PrefixExpr ::= [`-' | `+' | `~' | `!' | `&'] SimpleExpr
+ */
+ def prefixExpr(): Tree = {
+ def unaryOp(): Name = "unary_" + ident()
+ if (isIdent && inName == MINUS) {
+ val name = unaryOp()
+ inToken match {
+ case INTLIT | LONGLIT | FLOATLIT | DOUBLELIT => literal(false, true)
+ case _ => atPos(inCurrentPos) { Select(stripParens(simpleExpr()), name) }
+ }
+ } else if (isIdent && (inName == PLUS || inName == TILDE || inName == BANG)) {
+ val pos = inCurrentPos
+ val name = unaryOp()
+ atPos(pos) { Select(stripParens(simpleExpr()), name) }
+ } else if (isIdent && inName == AMP) {
+ deprecationWarning(inCurrentPos, "`&f' has been deprecated; use `f _' instead")
+ val pos = inCurrentPos
+ val name = ident()
+ atPos(pos) { Typed(stripParens(simpleExpr()), Function(List(), EmptyTree)) }
+/* XX-LIFTING
+ } else if (settings.Xexperimental.value && isIdent && inName == SLASH) {
+ val pos = inSkipToken
+ val name = freshName()
+ liftedGenerators += ValFrom(pos, Bind(name, Ident(nme.WILDCARD)), simpleExpr())
+ Ident(name) setPos pos
+*/
+ } else {
+ simpleExpr()
+ }
+ }
+ def xmlLiteral(): Tree
+
+ /* SimpleExpr ::= new (ClassTemplate | TemplateBody)
+ * | BlockExpr
+ * | SimpleExpr1 [`_']
+ * SimpleExpr1 ::= literal
+ * | xLiteral
+ * | Path
+ * | `(' [Exprs [`,']] `)'
+ * | SimpleExpr `.' Id
+ * | SimpleExpr TypeArgs
+ * | SimpleExpr1 ArgumentExprs
+ */
+ def simpleExpr(): Tree = {
+ var t: Tree = null
+ var canApply = true
+ inToken match {
+ case CHARLIT | INTLIT | LONGLIT | FLOATLIT | DOUBLELIT | STRINGLIT |
+ SYMBOLLIT | TRUE | FALSE | NULL =>
+ t = literal(false, false)
+ case XMLSTART => t = xmlLiteral()
+ case IDENTIFIER | BACKQUOTED_IDENT | THIS | SUPER =>
+ t = path(true, false)
+ case USCORE =>
+ val pos = inSkipToken
+ val pname = freshName(pos, "x$")
+ val param = atPos(pos){ makeSyntheticParam(pname) }
+ placeholderParams = param :: placeholderParams
+ t = atPos(pos) { Ident(pname) }
+ case LPAREN =>
+ val pos = inSkipToken
+ val ts = if (inToken == RPAREN) List() else exprs()
+ accept(RPAREN)
+ t = Parens(ts) setPos (pos)
+ case LBRACE =>
+ t = blockExpr()
+ canApply = false
+ case NEW =>
+ t = atPos(inSkipToken) {
+ val (parents, argss, self, stats) = template(false)
+ makeNew(parents, self, stats, argss)
+ }
+ canApply = false
+ case _ =>
+ syntaxErrorOrIncomplete("illegal start of simple expression", true)
+ t = errorTermTree
+ }
+ simpleExprRest(t, canApply)
+ }
+
+ def simpleExprRest(t: Tree, canApply: Boolean): Tree = {
+ if (canApply) newLineOptWhenFollowedBy(LBRACE)
+ inToken match {
+ case DOT =>
+ simpleExprRest(atPos(inSkipToken) { selector(stripParens(t)) }, true)
+ case LBRACKET =>
+ val t1 = stripParens(t)
+ t1 match {
+ case Ident(_) | Select(_, _) =>
+ val pos = if (t1.pos == NoPosition) i2p(inCurrentPos) else t1.pos
+ simpleExprRest(atPos(pos) { TypeApply(t1, typeArgs(false, true)) }, true)
+ case _ =>
+ t1
+ }
+ case LPAREN | LBRACE if (canApply) =>
+ // again, position should be on idetifier, not (
+ var pos = if (t.pos == NoPosition) i2p(inCurrentPos) else t.pos
+ simpleExprRest(atPos(pos) {
+ // look for anonymous function application like (f _)(x) and
+ // translate to (f _).apply(x), bug #460
+ val sel = t match {
+ case Parens(List(Typed(_, _: Function))) =>
+ Select(stripParens(t), nme.apply)
+ case _ =>
+ stripParens(t)
+ }
+ Apply(sel, argumentExprs())
+ }, true)
+ case USCORE =>
+ atPos(inSkipToken) { Typed(stripParens(t), Function(List(), EmptyTree)) }
+ case _ =>
+ t
+ }
+ }
+
+ /** ArgumentExprs ::= `(' [Exprs [`,']] `)'
+ * | [nl] BlockExpr
+ */
+ def argumentExprs(): List[Tree] = {
+ if (inToken == LBRACE) {
+ List(blockExpr())
+ } else {
+ val ts = surround(LPAREN,RPAREN)(if (inToken == RPAREN) List() else exprs(), List())
+ ts
+ }
+ }
+
+ /** BlockExpr ::= `{' (CaseClauses | Block) `}'
+ */
+ def blockExpr(): Tree = {
+ assert(inToken == LBRACE)
+ val res = atPos(accept(LBRACE)) { // no need to surround
+ if (inToken == CASE) Match(EmptyTree, caseClauses())
+ else block()
+ }
+ accept(RBRACE)
+ res
+ }
+
+ /** Block ::= BlockStatSeq
+ */
+ def block(): Tree = {
+ makeBlock(blockStatSeq(new ListBuffer[Tree]))
+ }
+
+ /** CaseClauses ::= CaseClause {CaseClause}
+ */
+ def caseClauses(): List[CaseDef] = {
+ val ts = new ListBuffer[CaseDef]
+ do { ts += caseClause()
+ } while (inToken == CASE)
+ ts.toList
+ }
+
+ /** CaseClause ::= case Pattern [Guard] `=>' Block
+ */
+ def caseClause(): CaseDef =
+ atPos(accept(CASE)) {
+ val pat = pattern()
+ val gd = guard()
+ makeCaseDef(pat, gd, caseBlock())
+ }
+ // IDE HOOK (so we can memoize case blocks)
+ def caseBlock(): Tree =
+ atPos(accept(ARROW))(block())
+
+ /** Guard ::= if PostfixExpr
+ */
+ def guard(): Tree =
+ if (inToken == IF) { inNextToken; stripParens(postfixExpr()) }
+ else EmptyTree
+
+ /** Enumerators ::= Generator {semi Enumerator}
+ * Enumerator ::= Generator
+ * | Guard
+ * | val Pattern1 `=' Expr
+ */
+ def enumerators(): List[Enumerator] = {
+ val newStyle = inToken != VAL // todo: deprecate old style
+ //if (!newStyle)
+ // deprecationWarning(inCurrentPos, "for (val x <- ... ) has been deprecated; use for (x <- ... ) instead")
+ val enums = new ListBuffer[Enumerator]
+ generator(enums, false)
+ while (isStatSep) {
+ inNextToken
+ if (newStyle) {
+ if (inToken == IF) enums += Filter(guard())
+ else generator(enums, true)
+ } else {
+ if (inToken == VAL) generator(enums, true)
+ else enums += Filter(expr())
+ }
+ }
+ enums.toList
+ }
+
+ /** Generator ::= Pattern1 (`<-' | '=') Expr [Guard]
+ */
+ def generator(enums: ListBuffer[Enumerator], eqOK: Boolean) {
+ if (inToken == VAL) inNextToken
+ val pos = inCurrentPos;
+ val pat = pattern1(false)
+ val tok = inToken
+ if (tok == EQUALS && eqOK) inNextToken
+ else accept(LARROW)
+ enums += makeGenerator(pos, pat, tok == EQUALS, expr)
+ if (inToken == IF) enums += Filter(guard())
+ }
+ //def p2i(pos : ScanPosition) : Int;
+
+/* -------- PATTERNS ------------------------------------------- */
+
+ /** Patterns ::= Pattern { `,' Pattern }
+ * SeqPatterns ::= SeqPattern { `,' SeqPattern }
+ *
+ * (also eats trailing comma if it finds one)
+ */
+ def patterns(seqOK: Boolean): List[Tree] = {
+ val ts = new ListBuffer[Tree] + pattern(seqOK)
+ while (inToken == COMMA) {
+ val pos = inCurrentPos
+ inNextToken
+ if (inToken == RPAREN) {
+ deprecationWarning(pos, "Trailing commas have been deprecated")
+ return ts.toList
+ } else {
+ ts += pattern(seqOK)
+ }
+ }
+ ts.toList
+ }
+
+ /** Pattern ::= Pattern1 { `|' Pattern1 }
+ * SeqPattern ::= SeqPattern1 { `|' SeqPattern1 }
+ */
+ def pattern(seqOK: Boolean): Tree = {
+ val pos = inCurrentPos
+ val t = pattern1(seqOK)
+ if (isIdent && inName == BAR) {
+ val ts = new ListBuffer[Tree] + t
+ while (isIdent && inName == BAR) {
+ inNextToken; ts += pattern1(seqOK)
+ }
+ atPos(pos) { makeAlternative(ts.toList) }
+ } else t
+ }
+
+ def pattern(): Tree = pattern(false)
+
+ /** Pattern1 ::= varid `:' TypePat
+ * | `_' `:' TypePat
+ * | Pattern2
+ * SeqPattern1 ::= varid `:' TypePat
+ * | `_' `:' TypePat
+ * | [SeqPattern2]
+ */
+ def pattern1(seqOK: Boolean): Tree = {
+ //if (false && /*disabled, no regexp matching*/ seqOK && !isExprIntro) {
+ //atPos(inCurrentPos) { Sequence(List()) }
+ //} else {
+ val p = pattern2(seqOK)
+ p match {
+ case Ident(name) if (treeInfo.isVarPattern(p) && inToken == COLON) =>
+ atPos(inSkipToken) { Typed(p, compoundType(true)) }
+ case _ =>
+ p
+ }
+ //}
+ }
+
+ /* Pattern2 ::= varid [ @ Pattern3 ]
+ * | Pattern3
+ * SeqPattern2 ::= varid [ @ SeqPattern3 ]
+ * | SeqPattern3
+ */
+ def pattern2(seqOK: Boolean): Tree = {
+ val p = pattern3(seqOK)
+ if (inToken == AT) {
+ p match {
+ case Ident(name) =>
+ if (name == nme.WILDCARD) {
+ inNextToken; pattern3(seqOK)
+ } else if (treeInfo.isVarPattern(p)) {
+ inNextToken
+ atPos(p.pos) { Bind(name, pattern3(seqOK)) }
+ } else {
+ p
+ }
+ case _ =>
+ p
+ }
+ } else p
+ }
+
+ /* Pattern3 ::= SimplePattern
+ * | SimplePattern {Id [nl] SimplePattern}
+ * SeqPattern3 ::= SeqSimplePattern [ '*' | '?' | '+' ]
+ * | SeqSimplePattern {Id [nl] SeqSimplePattern}
+ */
+ def pattern3(seqOK: Boolean): Tree = {
+ val base = opstack
+ var top = simplePattern(seqOK)
+ if (seqOK && isIdent && inName == STAR)
+ return atPos(inSkipToken)(Star(stripParens(top)))
+
+ while (isIdent && inName != BAR) {
+ top = reduceStack(
+ false, base, top, precedence(inName), treeInfo.isLeftAssoc(inName))
+ val op = inName
+ opstack = OpInfo(top, op, inCurrentPos) :: opstack
+ ident()
+ top = simplePattern(seqOK)
+ }
+ stripParens(reduceStack(false, base, top, 0, true))
+ }
+
+ def xmlLiteralPattern(): Tree
+
+ /** SimplePattern ::= varid
+ * | `_'
+ * | literal
+ * | XmlPattern
+ * | StableId [TypeArgs] [`(' [SeqPatterns [`,']] `)']
+ * | `(' [Patterns [`,']] `)'
+ * SimpleSeqPattern ::= varid
+ * | `_'
+ * | literal
+ * | XmlPattern
+ * | `<' xLiteralPattern
+ * | StableId [TypeArgs] [`(' [SeqPatterns [`,']] `)']
+ * | `(' [SeqPatterns [`,']] `)'
+ *
+ * XXX: Hook for IDE
+ */
+ def simplePattern(seqOK: Boolean): Tree = inToken match {
+ case IDENTIFIER | BACKQUOTED_IDENT | THIS =>
+ var t = stableId()
+ inToken match {
+ case INTLIT | LONGLIT | FLOATLIT | DOUBLELIT =>
+ t match {
+ case Ident(name) if name == nme.MINUS =>
+ return literal(true, true)
+ case _ =>
+ }
+ case _ =>
+ }
+/* not yet
+ if (inToken == LBRACKET)
+ atPos(inCurrentPos) {
+ val ts = typeArgs(true, false)
+ accept(LPAREN)
+ val ps = if (inToken == RPAREN) List() else patterns(true, false)
+ accept(RPAREN)
+ Apply(TypeApply(convertToTypeId(t), ts), ps)
+ }
+ else */
+ if (inToken == LPAREN) {
+ atPos(t.pos) { Apply(t, argumentPatterns()) }
+ } else t
+ case USCORE =>
+ atPos(inSkipToken) { Ident(nme.WILDCARD) }
+ case CHARLIT | INTLIT | LONGLIT | FLOATLIT | DOUBLELIT |
+ STRINGLIT | SYMBOLLIT | TRUE | FALSE | NULL =>
+ literal(true, false)
+ case LPAREN =>
+ val pos = inSkipToken
+ val ps = if (inToken == RPAREN) List() else patterns(false)
+ accept(RPAREN)
+ Parens(ps) setPos (pos)
+ case XMLSTART =>
+ xmlLiteralPattern()
+ case _ =>
+ syntaxErrorOrIncomplete("illegal start of simple pattern", true)
+ errorPatternTree
+ }
+
+ def argumentPatterns(): List[Tree] = {
+ accept(LPAREN)
+ val ps = if (inToken == RPAREN) List() else patterns(true)
+ accept(RPAREN)
+ ps
+ }
+
+/* -------- MODIFIERS and ANNOTATIONS ------------------------------------------- */
+
+ private def normalize(mods: Modifiers): Modifiers =
+ if ((mods hasFlag Flags.PRIVATE) && mods.privateWithin != nme.EMPTY.toTypeName)
+ mods &~ Flags.PRIVATE
+ else if ((mods hasFlag Flags.ABSTRACT) && (mods hasFlag Flags.OVERRIDE))
+ mods &~ (Flags.ABSTRACT | Flags.OVERRIDE) | Flags.ABSOVERRIDE
+ else
+ mods
+
+ private def addMod(mods: Modifiers, mod: Long): Modifiers = {
+ if (mods hasFlag mod) syntaxError(inCurrentPos, "repeated modifier", false)
+ inNextToken
+ mods | mod
+ }
+
+ /** AccessQualifier ::= "[" (Id | this) "]"
+ */
+ def accessQualifierOpt(mods: Modifiers): Modifiers = {
+ var result = mods
+ if (inToken == LBRACKET) {
+ inNextToken
+ if (mods.privateWithin != nme.EMPTY.toTypeName)
+ syntaxError("duplicate private/protected qualifier", false)
+ result = if (inToken == THIS) { inNextToken; mods | Flags.LOCAL }
+ else Modifiers(mods.flags, ident().toTypeName)
+ accept(RBRACKET)
+ }
+ result
+ }
+
+ /** AccessModifier ::= (private | protected) [AccessQualifier]
+ */
+ def accessModifierOpt(): Modifiers = normalize {
+ inToken match {
+ case PRIVATE => inNextToken; accessQualifierOpt(Modifiers(Flags.PRIVATE))
+ case PROTECTED => inNextToken; accessQualifierOpt(Modifiers(Flags.PROTECTED))
+ case _ => NoMods
+ }
+ }
+
+ /** Modifiers ::= {Modifier}
+ * Modifier ::= LocalModifier
+ * | AccessModifier
+ * | override
+ */
+ def modifiers(): Modifiers = normalize {
+ def loop(mods: Modifiers): Modifiers = inToken match {
+ case ABSTRACT =>
+ loop(addMod(mods, Flags.ABSTRACT))
+ case FINAL =>
+ loop(addMod(mods, Flags.FINAL))
+ case SEALED =>
+ loop(addMod(mods, Flags.SEALED))
+ case PRIVATE =>
+ loop(accessQualifierOpt(addMod(mods, Flags.PRIVATE)))
+ case PROTECTED =>
+ loop(accessQualifierOpt(addMod(mods, Flags.PROTECTED)))
+ case OVERRIDE =>
+ loop(addMod(mods, Flags.OVERRIDE))
+ case IMPLICIT =>
+ loop(addMod(mods, Flags.IMPLICIT))
+ case LAZY =>
+ loop(addMod(mods, Flags.LAZY))
+ case NEWLINE =>
+ inNextToken
+ loop(mods)
+ case _ =>
+ mods
+ }
+ loop(NoMods)
+ }
+
+ /** LocalModifiers ::= {LocalModifier}
+ * LocalModifier ::= abstract | final | sealed | implicit | lazy
+ */
+ def localModifiers(): Modifiers = {
+ def loop(mods: Modifiers): Modifiers = inToken match {
+ case ABSTRACT =>
+ loop(addMod(mods, Flags.ABSTRACT))
+ case FINAL =>
+ loop(addMod(mods, Flags.FINAL))
+ case SEALED =>
+ loop(addMod(mods, Flags.SEALED))
+ case IMPLICIT =>
+ loop(addMod(mods, Flags.IMPLICIT))
+ case LAZY =>
+ loop(addMod(mods, Flags.LAZY))
+ case _ =>
+ mods
+ }
+ loop(NoMods)
+ }
+
+ /** Annotations ::= {Annotation [nl]}
+ * Annotation ::= `@' AnnotationExpr
+ */
+ def annotations(skipNewLines: Boolean): List[Annotation] = {
+ var annots = new ListBuffer[Annotation]
+ while (inToken == AT) {
+ inNextToken
+ annots += annotationExpr()
+ if (skipNewLines) newLineOpt()
+ }
+ annots.toList
+ }
+
+ /** AnnotationExpr ::= StableId [TypeArgs] [`(' [Exprs] `)'] [[nl] `{' {NameValuePair} `}']
+ * NameValuePair ::= val id `=' PrefixExpr
+ */
+ def annotationExpr(): Annotation = {
+ def nameValuePair(): Tree = {
+ var pos = inCurrentPos
+ accept(VAL)
+ val aname = ident()
+ accept(EQUALS)
+ val rhs = stripParens(prefixExpr())
+ atPos(pos) { ValDef(NoMods, aname, TypeTree(), rhs) }
+ }
+ val pos = inCurrentPos
+ var t: Tree = convertToTypeId(stableId())
+ if (inToken == LBRACKET)
+ t = atPos(inCurrentPos)(AppliedTypeTree(t, typeArgs(false, false)))
+ val args = if (inToken == LPAREN) argumentExprs() else List()
+ newLineOptWhenFollowedBy(LBRACE)
+ val nameValuePairs: List[Tree] = if (inToken == LBRACE) {
+ inNextToken
+ val nvps = new ListBuffer[Tree] + nameValuePair()
+ while (inToken == COMMA) {
+ inNextToken
+ nvps += nameValuePair()
+ }
+ accept(RBRACE)
+ nvps.toList
+ } else List()
+ val constr = atPos(pos) { New(t, List(args)) }
+ Annotation(constr, nameValuePairs) setPos pos
+ }
+
+/* -------- PARAMETERS ------------------------------------------- */
+
+ /** ParamClauses ::= {ParamClause} [[nl] `(' implicit Params `)']
+ * ParamClause ::= [nl] `(' [Params] ')'
+ * Params ::= Param {`,' Param}
+ * Param ::= {Annotation} Id [`:' ParamType]
+ * ClassParamClauses ::= {ClassParamClause} [[nl] `(' implicit ClassParams `)']
+ * ClassParamClause ::= [nl] `(' [ClassParams] ')'
+ * ClassParams ::= ClassParam {`,' ClassParam}
+ * ClassParam ::= {Annotation} [{Modifier} (`val' | `var')] Id [`:' ParamType]
+ */
+ def paramClauses(owner: Name, implicitViews: List[Tree], ofCaseClass: Boolean): List[List[ValDef]] = {
+ var implicitmod = 0
+ var caseParam = ofCaseClass
+ def param(): ValDef = {
+ var pos = inCurrentPos
+
+ {
+ val annots = annotations(false)
+ var mods = Modifiers(Flags.PARAM)
+ if (owner.isTypeName) {
+ mods = modifiers() | Flags.PARAMACCESSOR
+ if (mods.hasFlag(Flags.LAZY)) syntaxError("lazy modifier not allowed here. Use call-by-name parameters instead", false)
+ if (inToken == VAL) {
+ inNextToken
+ } else if (inToken == VAR) {
+ mods = mods | Flags.MUTABLE
+ inNextToken
+ } else {
+ if (mods.flags != Flags.PARAMACCESSOR) accept(VAL)
+ if (!(caseParam)) mods = mods | Flags.PRIVATE | Flags.LOCAL
+ }
+ if (caseParam) mods = mods | Flags.CASEACCESSOR
+ }
+ val namePos = inCurrentPos
+ val name = ident()
+ if (name != nme.ERROR) pos = namePos
+ var bynamemod = 0
+ val tpt =
+ if (settings.Xexperimental.value && !owner.isTypeName && inToken != COLON) {
+ TypeTree()
+ } else { // XX-METHOD-INFER
+ accept(COLON)
+ if (inToken == ARROW) {
+ if (owner.isTypeName && !mods.hasFlag(Flags.LOCAL))
+ syntaxError(
+ inCurrentPos,
+ (if (mods.hasFlag(Flags.MUTABLE)) "`var'" else "`val'") +
+ " parameters may not be call-by-name", false)
+ else bynamemod = Flags.BYNAMEPARAM
+ }
+ paramType()
+ }
+ atPos(pos){
+ ValDef((mods | implicitmod | bynamemod) withAnnotations annots, name, tpt, EmptyTree)
+ }
+ }
+ }
+ def paramClause(): List[ValDef] = {
+ val params = new ListBuffer[ValDef]
+ if (inToken != RPAREN) {
+ if (inToken == IMPLICIT) {
+ if (!implicitViews.isEmpty)
+ syntaxError("cannot have both view bounds `<%' and implicit parameters", false)
+ inNextToken
+ implicitmod = Flags.IMPLICIT
+ }
+ params += param()
+ while (inToken == COMMA) {
+ inNextToken; params += param()
+ }
+ }
+ params.toList
+ }
+ val vds = new ListBuffer[List[ValDef]]
+ val pos = inCurrentPos
+ newLineOptWhenFollowedBy(LPAREN)
+ if (ofCaseClass && inToken != LPAREN)
+ deprecationWarning(inCurrentPos, "case classes without a parameter list have been deprecated;\n"+
+ "use either case objects or case classes with `()' as parameter list.")
+ while (implicitmod == 0 && inToken == LPAREN) {
+ inNextToken
+ vds += paramClause()
+ accept(RPAREN)
+ caseParam = false
+ newLineOptWhenFollowedBy(LPAREN)
+ }
+ val result = vds.toList
+ if (owner == nme.CONSTRUCTOR &&
+ (result.isEmpty ||
+ (!result.head.isEmpty && result.head.head.mods.hasFlag(Flags.IMPLICIT))))
+ if (inToken == LBRACKET)
+ syntaxError(pos, "no type parameters allowed here", false)
+ else if(inToken == EOF)
+ incompleteInputError("auxiliary constructor needs non-implicit parameter list")
+ else
+ syntaxError(pos, "auxiliary constructor needs non-implicit parameter list", false)
+ addImplicitViews(owner, result, implicitViews)
+ }
+
+ /** ParamType ::= Type | `=>' Type | Type `*'
+ */
+ def paramType(): Tree =
+ if (inToken == ARROW)
+ atPos(inSkipToken) {
+ AppliedTypeTree(
+ rootScalaDot(nme.BYNAME_PARAM_CLASS_NAME.toTypeName), List(typ()))
+ }
+ else {
+ val t = typ()
+ if (isIdent && inName == STAR) {
+ inNextToken
+ atPos(t.pos) {
+ AppliedTypeTree(
+ rootScalaDot(nme.REPEATED_PARAM_CLASS_NAME.toTypeName), List(t))
+ }
+ } else t
+ }
+
+ /** TypeParamClauseOpt ::= [TypeParamClause]
+ * TypeParamClause ::= `[' VariantTypeParam {`,' VariantTypeParam} `]']
+ * VariantTypeParam ::= [`+' | `-'] TypeParam
+ * FunTypeParamClauseOpt ::= [FunTypeParamClause]
+ * FunTypeParamClause ::= `[' TypeParam {`,' TypeParam} `]']
+ * TypeParam ::= Id TypeParamClauseOpt TypeBounds [<% Type]
+ */
+ def typeParamClauseOpt(owner: Name, implicitViewBuf: ListBuffer[Tree]): List[TypeDef] = {
+ def typeParam(): TypeDef = {
+ var mods = Modifiers(Flags.PARAM)
+ if (owner.isTypeName && isIdent) {
+ if (inName == PLUS) {
+ inNextToken
+ mods = mods | Flags.COVARIANT
+ } else if (inName == MINUS) {
+ inNextToken
+ mods = mods | Flags.CONTRAVARIANT
+ }
+ }
+ val pos = inCurrentPos
+ val pname =
+ (if (inToken == USCORE) { // @M! also allow underscore
+ inNextToken
+ nme.WILDCARD
+ } else ident()).toTypeName
+
+ val tparams = typeParamClauseOpt(pname, null) // @M TODO null --> no higher-order view bounds for now
+ val param = atPos(pos) { TypeDef(mods, pname, tparams, typeBounds()) }
+ if (inToken == VIEWBOUND && (implicitViewBuf ne null))
+ implicitViewBuf += atPos(inSkipToken) {
+ makeFunctionTypeTree(List(Ident(pname)), typ())
+ }
+ param
+ }
+ val params = new ListBuffer[TypeDef]
+ newLineOptWhenFollowedBy(LBRACKET)
+ if (inToken == LBRACKET) {
+ inNextToken
+ params += typeParam()
+ while (inToken == COMMA) {
+ inNextToken
+ params += typeParam()
+ }
+ accept(RBRACKET)
+ }
+ params.toList
+ }
+
+ /** TypeBounds ::= [`>:' Type] [`<:' Type]
+ */
+ def typeBounds(): TypeBoundsTree =
+ TypeBoundsTree(
+ bound(SUPERTYPE, nme.Nothing),
+ bound(SUBTYPE, nme.Any))
+
+ def bound(tok: Int, default: Name): Tree =
+ if (inToken == tok) { inNextToken; typ() }
+ else rootScalaDot(default.toTypeName)
+
+/* -------- DEFS ------------------------------------------- */
+
+
+ /** Import ::= import ImportExpr {`,' ImportExpr}
+ */
+ def importClause(): List[Tree] = {
+ accept(IMPORT)
+ val ts = new ListBuffer[Tree] + importExpr()
+ while (inToken == COMMA) {
+ inNextToken; ts += importExpr()
+ }
+ ts.toList
+ }
+
+ /** ImportExpr ::= StableId `.' (Id | `_' | ImportSelectors)
+ * XXX: Hook for IDE
+ */
+ def importExpr(): Tree =
+ atPos(inCurrentPos) {
+ var t: Tree = null
+ //var pos : ScanPosition = null.asInstanceOf[ScanPosition]
+ var pos : Int = -1
+ if (inToken == THIS) {
+ t = atPos(inCurrentPos) { This(nme.EMPTY.toTypeName) }
+ t = atPos(accept(DOT)) { selector(t) }
+ pos = accept(DOT)
+ } else {
+ val i = atPos(inCurrentPos) { Ident(ident()) }
+ pos = accept(DOT)
+ if (inToken == THIS) {
+ inNextToken
+ t = atPos(i.pos) { This(i.name.toTypeName) }
+ t = atPos(accept(DOT)) { selector(t) }
+ pos = accept(DOT)
+ } else {
+ t = i
+ }
+ }
+ def loop: Tree =
+ if (inToken == USCORE) {
+ inNextToken
+ Import(t, List((nme.WILDCARD, null)))
+ } else if (inToken == LBRACE) {
+ Import(t, importSelectors())
+ } else {
+ val identPos = inCurrentPos
+ val name = ident() // @S: use position of identifier, not dot!
+ pos = if (name == nme.ERROR) pos else identPos
+ if (inToken == DOT) {
+ t = atPos(pos) { Select(t, name) }
+ pos = accept(DOT)
+ loop
+ } else {
+ Import(t, List((name, name)))
+ }
+ }
+ loop
+ }
+
+ /** ImportSelectors ::= `{' {ImportSelector `,'} (ImportSelector | `_') `}'
+ */
+ def importSelectors(): List[(Name, Name)] = {
+ val names = new ListBuffer[(Name, Name)]
+ accept(LBRACE)
+ var isLast = importSelector(names)
+ while (!isLast && inToken == COMMA) {
+ inNextToken
+ isLast = importSelector(names)
+ }
+ accept(RBRACE)
+ names.toList
+ }
+
+ /** ImportSelector ::= Id [`=>' Id | `=>' `_']
+ */
+ def importSelector(names: ListBuffer[(Name, Name)]): Boolean =
+ if (inToken == USCORE) {
+ inNextToken; names += ((nme.WILDCARD, null)); true
+ } else {
+ val name = ident()
+ names += ((
+ name,
+ if (inToken == ARROW) {
+ inNextToken
+ if (inToken == USCORE) { inNextToken; nme.WILDCARD } else ident()
+ } else {
+ name
+ }))
+ false
+ }
+
+ /** Def ::= val PatDef
+ * | var VarDef
+ * | def FunDef
+ * | type [nl] TypeDef
+ * | TmplDef
+ * Dcl ::= val ValDcl
+ * | var ValDcl
+ * | def FunDcl
+ * | type [nl] TypeDcl
+ */
+ def defOrDcl(mods: Modifiers): List[Tree] = {
+ if ((mods hasFlag Flags.LAZY) && in.token != VAL)
+ syntaxError("lazy not allowed here. Only vals can be lazy", false)
+ inToken match {
+ case VAL =>
+ patDefOrDcl(mods)
+ case VAR =>
+ patDefOrDcl(mods | Flags.MUTABLE)
+ case DEF =>
+ List(funDefOrDcl(mods))
+ case TYPE =>
+ inNextToken
+ newLinesOpt()
+ List(typeDefOrDcl(mods))
+ case _ =>
+ List(tmplDef(mods))
+ }
+ }
+ /** IDE hook: for non-local defs or dcls with modifiers and annotations */
+ def nonLocalDefOrDcl : List[Tree] = {
+ val annots = annotations(true)
+ defOrDcl(modifiers() withAnnotations annots)
+ }
+ /** not hooked by the IDE, will not undergo stubbing. Used for early initialization blocks. */
+ def preNonLocalDefOrDcl : List[Tree] = {
+ val annots = annotations(true)
+ defOrDcl(modifiers() withAnnotations annots)
+ }
+
+
+ /** PatDef ::= Pattern2 {`,' Pattern2} [`:' Type] `=' Expr
+ * ValDcl ::= Id {`,' Id} `:' Type
+ * VarDef ::= PatDef | Id {`,' Id} `:' Type `=' `_'
+ */
+ def patDefOrDcl(mods: Modifiers): List[Tree] = {
+ var newmods = mods
+ val lhsBuf = new ListBuffer[Tree]
+ do {
+ inNextToken
+ val p = pattern2(false)
+ lhsBuf += stripParens(p)
+ } while (inToken == COMMA)
+ val lhs = lhsBuf.toList
+ val tp = typedOpt()
+ val rhs =
+ if (tp.isEmpty || inToken == EQUALS) {
+ accept(EQUALS)
+ if (!tp.isEmpty && newmods.hasFlag(Flags.MUTABLE) &&
+ (lhs.toList forall (_.isInstanceOf[Ident])) && inToken == USCORE) {
+ inNextToken
+ newmods = newmods | Flags.DEFAULTINIT
+ EmptyTree
+ } else {
+ expr()
+ }
+ } else {
+ newmods = newmods | Flags.DEFERRED
+ EmptyTree
+ }
+ var originalUsed = false
+ def mkDefs(p: Tree): List[Tree] = {
+ //Console.println("DEBUG: p = "+p.toString()); // DEBUG
+ val trees =
+ makePatDef(newmods,
+ if (tp.isEmpty)
+ p
+ else
+ Typed(p, tp),
+ if (inIDE && !originalUsed) {
+ // because duplicates have weaker status than originals
+ // need an original.
+ originalUsed = true
+ rhs
+ } else rhs.duplicate) map atPos(p.pos)
+ if (newmods.hasFlag(Flags.DEFERRED)) {
+ trees match {
+ case List(ValDef(_, _, _, EmptyTree)) =>
+ if (mods.hasFlag(Flags.LAZY))
+ syntaxError(p.pos, "lazy values may not be abstract", false)
+ case _ => syntaxError(p.pos, "pattern definition may not be abstract", false)
+ }
+ }
+ trees
+ }
+ for (p <- lhs.toList; d <- mkDefs(p)) yield d
+ }
+
+ /** VarDef ::= PatDef
+ * | Id {`,' Id} `:' Type `=' `_'
+ * VarDcl ::= Id {`,' Id} `:' Type
+ def varDefOrDcl(mods: Modifiers): List[Tree] = {
+ var newmods = mods | Flags.MUTABLE
+ val lhs = new ListBuffer[(Int, Name)]
+ do {
+ inNextToken
+ lhs += (inCurrentPos, ident())
+ } while (inToken == COMMA)
+ val tp = typedOpt()
+ val rhs = if (tp.isEmpty || inToken == EQUALS) {
+ accept(EQUALS)
+ if (!tp.isEmpty && inToken == USCORE) {
+ inNextToken
+ EmptyTree
+ } else {
+ expr()
+ }
+ } else {
+ newmods = newmods | Flags.DEFERRED
+ EmptyTree
+ }
+ var originalUsed = false
+ for ((pos, name) <- lhs.toList) yield atPos(pos) {
+ if (inIDE && !originalUsed) {
+ originalUsed = true
+ ValDef(newmods, name, tp, rhs)
+ } else ValDef(newmods, name, tp.duplicate, rhs.duplicate)
+ }
+ }
+ */
+
+ /** FunDef ::= FunSig `:' Type `=' Expr
+ * | FunSig [nl] `{' Block `}'
+ * | this ParamClause ParamClauses (`=' ConstrExpr | [nl] ConstrBlock)
+ * FunDcl ::= FunSig [`:' Type]
+ * FunSig ::= id [FunTypeParamClause] ParamClauses
+ */
+ def funDefOrDcl(mods: Modifiers): Tree = {
+ var pos = inSkipToken // position of `def'
+ if (inToken == THIS) {
+ atPos(inCurrentPos) {
+ inNextToken
+ val vparamss = paramClauses(nme.CONSTRUCTOR, implicitClassViews map (_.duplicate), false)
+ newLineOptWhenFollowedBy(LBRACE)
+ val rhs = if (inToken == LBRACE) constrBlock(vparamss)
+ else { accept(EQUALS); constrExpr(vparamss) }
+ DefDef(mods, nme.CONSTRUCTOR, List(), vparamss, TypeTree(), rhs)
+ }
+ } else {
+ var newmods = mods
+ val namePos = inCurrentPos
+ val name = ident()
+ if (name != nme.ERROR) pos = namePos
+ atPos(pos) {
+ // implicitViewBuf is for view bounded type parameters of the form
+ // [T <% B]; it contains the equivalent implicit parameter, i.e. (implicit p: T => B)
+ val implicitViewBuf = new ListBuffer[Tree]
+ val tparams = typeParamClauseOpt(name, implicitViewBuf)
+ val vparamss = paramClauses(name, implicitViewBuf.toList, false)
+ newLineOptWhenFollowedBy(LBRACE)
+ var restype = typedOpt()
+ val rhs =
+ if (isStatSep || inToken == RBRACE) {
+ if (restype.isEmpty) restype = scalaUnitConstr
+ newmods = newmods | Flags.DEFERRED
+ EmptyTree
+ } else if (restype.isEmpty && inToken == LBRACE) {
+ restype = scalaUnitConstr
+ blockExpr()
+ } else equalsExpr()
+ DefDef(newmods, name, tparams, vparamss, restype, rhs)
+ }
+ }
+ }
+
+
+ /** ConstrExpr ::= SelfInvocation
+ * | ConstrBlock
+ */
+ def constrExpr(vparamss: List[List[ValDef]]): Tree =
+ if (inToken == LBRACE) constrBlock(vparamss)
+ else Block(List(selfInvocation(vparamss)), Literal(()))
+
+ /** SelfInvocation ::= this ArgumentExprs {ArgumentExprs}
+ */
+ def selfInvocation(vparamss: List[List[ValDef]]): Tree =
+ atPos(accept(THIS)) {
+ newLineOptWhenFollowedBy(LBRACE)
+ var t = Apply(Ident(nme.CONSTRUCTOR), argumentExprs())
+ while (inToken == LPAREN || inToken == LBRACE) {
+ t = Apply(t, argumentExprs())
+ newLineOptWhenFollowedBy(LBRACE)
+ }
+ if (implicitClassViews.isEmpty) t
+ else Apply(t, vparamss.last.map(vp => Ident(vp.name)))
+ }
+
+ /** ConstrBlock ::= `{' SelfInvocation {semi BlockStat} `}'
+ */
+ def constrBlock(vparamss: List[List[ValDef]]): Tree =
+ atPos(inSkipToken) {
+ val statlist = new ListBuffer[Tree]
+ statlist += selfInvocation(vparamss)
+ val stats = if (isStatSep) { inNextToken; blockStatSeq(statlist) }
+ else statlist.toList
+ accept(RBRACE)
+ Block(stats, Literal(()))
+ }
+
+ /** TypeDef ::= Id [TypeParamClause] `=' Type
+ * TypeDcl ::= Id [TypeParamClause] TypeBounds
+ */
+ def typeDefOrDcl(mods: Modifiers): Tree =
+ atPos(inCurrentPos) {
+ val name = ident().toTypeName
+
+ // @M! a type alias as well as an abstract type may declare type parameters
+ val tparams = inToken match {
+ case LBRACKET =>
+ typeParamClauseOpt(name, null)
+ case _ =>
+ Nil
+ }
+
+ inToken match {
+ case EQUALS =>
+ inNextToken
+ TypeDef(mods, name, tparams, typ())
+ case SUPERTYPE | SUBTYPE | SEMI | NEWLINE | NEWLINES | COMMA | RBRACE =>
+ TypeDef(mods | Flags.DEFERRED, name, tparams, typeBounds())
+ case _ =>
+ syntaxErrorOrIncomplete("`=', `>:', or `<:' expected", true)
+ EmptyTree
+ }
+ }
+
+ /** Hook for IDE, for top-level classes/objects */
+ def topLevelTmplDef: Tree = {
+ val annots = annotations(true)
+ val mods = modifiers() withAnnotations annots
+ tmplDef(mods)
+ }
+
+ /** TmplDef ::= [case] class ClassDef
+ * | [case] object ObjectDef
+ * | [override] trait TraitDef
+ */
+ def tmplDef(mods: Modifiers): Tree = {
+ if (mods.hasFlag(Flags.LAZY)) syntaxError("classes cannot be lazy", false)
+ inToken match {
+ case TRAIT =>
+ classDef(mods | Flags.TRAIT | Flags.ABSTRACT)
+ case CLASS =>
+ classDef(mods)
+ case CASECLASS =>
+ classDef(mods | Flags.CASE)
+ case OBJECT =>
+ objectDef(mods)
+ case CASEOBJECT =>
+ objectDef(mods | Flags.CASE)
+ case _ =>
+ syntaxErrorOrIncomplete("expected start of definition", true)
+ EmptyTree
+ }
+ }
+
+ /** ClassDef ::= Id [TypeParamClause] {Annotation}
+ [AccessModifier] ClassParamClauses RequiresTypeOpt ClassTemplateOpt
+ * TraitDef ::= Id [TypeParamClause] RequiresTypeOpt TraitTemplateOpt
+ */
+ def classDef(mods: Modifiers): ClassDef = {
+ var pos = inSkipToken
+ var namePos = inCurrentPos
+ val name = ident().toTypeName
+ if (name != nme.ERROR) pos = namePos
+ atPos(pos) {
+ val savedViews = implicitClassViews
+ val implicitViewBuf = new ListBuffer[Tree]
+ val tparams = typeParamClauseOpt(name, implicitViewBuf)
+ implicitClassViews = implicitViewBuf.toList
+ if (!implicitClassViews.isEmpty && mods.hasFlag(Flags.TRAIT)) {
+ syntaxError("traits cannot have type parameters with <% bounds", false)
+ implicitClassViews = List()
+ }
+ val constrAnnots = annotations(false)
+ val (constrMods, vparamss) =
+ if (mods.hasFlag(Flags.TRAIT)) (Modifiers(Flags.TRAIT), List())
+ else (accessModifierOpt(), paramClauses(name, implicitClassViews, mods.hasFlag(Flags.CASE)))
+ val thistpe = requiresTypeOpt()
+ var mods1 =
+ if (mods hasFlag Flags.TRAIT)
+ if (inToken == SUBTYPE) mods | Flags.DEFERRED
+ else mods
+ else if (inToken == SUBTYPE) {
+ syntaxError("classes are not allowed to be virtual", false)
+ mods
+ }
+ else
+ mods
+ var template = templateOpt(mods1, name, constrMods withAnnotations constrAnnots, vparamss)
+ if (!thistpe.isEmpty) {
+ if (template.self.isEmpty) {
+ template = copy.Template(
+ template, template.parents, makeSelfDef(nme.WILDCARD, thistpe), template.body)
+ } else syntaxError("`requires' cannot be combined with explicit self type", false)
+ }
+ if (isInterface(mods1, template.body)) mods1 |= Flags.INTERFACE
+ val result = ClassDef(mods1, name, tparams, template)
+ implicitClassViews = savedViews
+ result
+ }
+ }
+
+ /** ObjectDef ::= Id ClassTemplateOpt
+ */
+ def objectDef(mods: Modifiers): ModuleDef = {
+ var pos = inSkipToken
+ var namePos = inCurrentPos
+ val name = ident().toTermName
+ if (name != nme.ERROR) pos = namePos
+ atPos(pos) {
+ val mods1 = if (inToken == SUBTYPE) mods | Flags.DEFERRED else mods
+ val template = templateOpt(mods1, name, NoMods, List())
+ ModuleDef(mods1, name, template)
+ }
+ }
+
+
+ /** ClassParents ::= AnnotType {`(' [Exprs [`,']] `)'} {with AnnotType}
+ * TraitParents ::= AnnotType {with AnnotType}
+ */
+ def templateParents(isTrait: Boolean): (List[Tree], List[List[Tree]]) = {
+ val parents = new ListBuffer[Tree] + annotType(false)
+ val argss = new ListBuffer[List[Tree]]
+ if (inToken == LPAREN && !isTrait)
+ do { argss += argumentExprs() } while (inToken == LPAREN)
+ else argss += List()
+ while (inToken == WITH) {
+ inNextToken
+ parents += annotType(false)
+ }
+ (parents.toList, argss.toList)
+ }
+
+ /** ClassTemplate ::= [EarlyDefs with] ClassParents [TemplateBody]
+ * TraitTemplate ::= [EarlyDefs with] TraitParents [TemplateBody]
+ * EarlyDefs ::= `{' [EarlyDef {semi EarlyDef}] `}'
+ * EarlyDef ::= Annotations Modifiers PatDef
+ */
+ def template(isTrait: Boolean): (List[Tree], List[List[Tree]], ValDef, List[Tree]) = {
+ newLineOptWhenFollowedBy(LBRACE)
+ if (inToken == LBRACE) {
+ // @S: pre template body cannot stub like post body can!
+ val (self, body) = templateBody(true)
+ if (inToken == WITH && self.isEmpty) {
+ val earlyDefs: List[Tree] = body flatMap {
+ case vdef @ ValDef(mods, name, tpt, rhs) if !(mods hasFlag Flags.DEFERRED) =>
+ List(copy.ValDef(vdef, mods | Flags.PRESUPER, name, tpt, rhs))
+ case tdef @ TypeDef(mods, name, tparams, rhs) =>
+ List(copy.TypeDef(tdef, mods | Flags.PRESUPER, name, tparams, rhs))
+ case stat if !stat.isEmpty =>
+ syntaxError(stat.pos, "only type definitions and concrete field definitions allowed in early object initialization section", false)
+ List()
+ case _ => List()
+ }
+ inNextToken
+ val (parents, argss) = templateParents(isTrait)
+ val (self1, body1) = templateBodyOpt(isTrait)
+ (parents, argss, self1, earlyDefs ::: body1)
+ } else {
+ (List(), List(List()), self, body)
+ }
+ } else {
+ val (parents, argss) = templateParents(isTrait)
+ val (self, body) = templateBodyOpt(isTrait)
+ (parents, argss, self, body)
+ }
+ }
+
+ def isInterface(mods: Modifiers, body: List[Tree]): Boolean =
+ (mods hasFlag Flags.TRAIT) && (body forall treeInfo.isInterfaceMember)
+
+ /** ClassTemplateOpt ::= 'extends' ClassTemplate | [['extends'] TemplateBody]
+ * TraitTemplateOpt ::= TraitExtends TraitTemplate | [['extends'] TemplateBody] | '<:' TemplateBody
+ * TraitExtends ::= 'extends' | `<:'
+ */
+ def templateOpt(mods: Modifiers, name: Name, constrMods: Modifiers,
+ vparamss: List[List[ValDef]]): Template = {
+ val pos = inCurrentPos;
+ val (parents0, argss, self, body) =
+ if (inToken == EXTENDS || settings.Xexperimental.value && (mods hasFlag Flags.TRAIT) && inToken == SUBTYPE) {
+ inNextToken
+ template(mods hasFlag Flags.TRAIT)
+ } else if ((inToken == SUBTYPE) && (mods hasFlag Flags.TRAIT)) {
+ inNextToken
+ template(true)
+ } else {
+ newLineOptWhenFollowedBy(LBRACE)
+ val (self, body) = templateBodyOpt(false)
+ (List(), List(List()), self, body)
+ }
+ var parents = parents0
+ if (name != nme.ScalaObject.toTypeName && !isInterface(mods, body))
+ parents = parents ::: List(scalaScalaObjectConstr)
+ if (parents.isEmpty)
+ parents = List(scalaAnyRefConstr)
+ if (mods.hasFlag(Flags.CASE)) parents = parents ::: List(productConstr)
+ val tree = Template(parents, self, constrMods, vparamss, argss, body)
+ // @S: if nothing parsed, don't use next position!
+ // @S: if primary constructor does not always have the same position, then the IDE gets confused.
+ // @S: since build compiler is used to generate IDE files, don't set position here!
+ tree
+ // if (pos == inCurrentPos || inIDE) tree else atPos(pos) {tree}
+ }
+
+/* -------- TEMPLATES ------------------------------------------- */
+
+ /** TemplateBody ::= [nl] `{' TemplateStatSeq `}'
+ * @param isPre specifies whether in early initializer (true) or not (false)
+ */
+ def templateBody(isPre: Boolean) = {
+ accept(LBRACE)
+ val result @ (self, stats) = templateStatSeq(isPre)
+ accept(RBRACE)
+ if (stats.isEmpty) (self, List(EmptyTree)) else result
+ }
+ def templateBodyOpt(traitParentSeen: Boolean): (ValDef, List[Tree]) = {
+ newLineOptWhenFollowedBy(LBRACE)
+ if (inToken == LBRACE) {
+ templateBody(false)
+ } else {
+ if (inToken == LPAREN)
+ syntaxError((if (traitParentSeen) "parents of traits" else "traits or objects")+
+ " may not have parameters", true)
+ (emptyValDef, List())
+ }
+ }
+
+ /** Refinement ::= [nl] `{' RefineStat {semi RefineStat} `}'
+ */
+ def refinement(): List[Tree] = {
+ accept(LBRACE)
+ val body = refineStatSeq()
+ accept(RBRACE)
+ body
+ }
+
+/* -------- STATSEQS ------------------------------------------- */
+
+ /** Packaging ::= package QualId [nl] `{' TopStatSeq `}'
+ */
+ def packaging(pkgPos: Int): Tree = {
+ val pkg = qualId()
+ val pos = if (pkg.pos != NoPosition) pkg.pos else i2p(pkgPos)
+ atPos(pos) {
+ newLineOptWhenFollowedBy(LBRACE)
+ accept(LBRACE)
+ val stats = topStatSeq()
+ accept(RBRACE)
+ makePackaging(pkg, stats)
+ }
+ }
+
+ /** TopStatSeq ::= TopStat {semi TopStat}
+ * TopStat ::= Annotations Modifiers TmplDef
+ * | Packaging
+ * | package object objectDef
+ * | Import
+ * |
+ */
+ def topStatSeq(): List[Tree] = {
+ val stats = new ListBuffer[Tree]
+ while (inToken != RBRACE && inToken != EOF) {
+ if (inToken == PACKAGE) {
+ val pkgPos = accept(PACKAGE)
+ stats += {
+ if (inToken == OBJECT)
+ atPos(pkgPos) { makePackageObject(objectDef(NoMods)) }
+ else packaging(pkgPos)
+ }
+ } else if (inToken == IMPORT) {
+ stats ++= importClause()
+ // XXX: IDE hook this all.
+ } else if (inToken == CLASS ||
+ inToken == CASECLASS ||
+ inToken == TRAIT ||
+ inToken == OBJECT ||
+ inToken == CASEOBJECT ||
+ inToken == LBRACKET || //todo: remove
+ inToken == AT ||
+ isModifier) {
+ stats ++ joinComment(List(topLevelTmplDef))
+ } else if (!isStatSep) {
+ syntaxErrorOrIncomplete("expected class or object definition", true)
+ }
+ if (inToken != RBRACE && inToken != EOF) acceptStatSep()
+ }
+ stats.toList
+ }
+
+ /** TemplateStatSeq ::= [id [`:' Type] `=>'] TemplateStat {semi TemplateStat}
+ * TemplateStat ::= Import
+ * | Annotations Modifiers Def
+ * | Annotations Modifiers Dcl
+ * | Expr1
+ * | super ArgumentExprs {ArgumentExprs}
+ * |
+ * @param isPre specifies whether in early initializer (true) or not (false)
+ */
+ def templateStatSeq(isPre : Boolean) = checkNoEscapingPlaceholders {
+ var self: ValDef = emptyValDef
+ val stats = new ListBuffer[Tree]
+ if (isExprIntro) {
+ val first = expr(InTemplate) // @S: first statement is potentially converted so cannot be stubbed.
+ if (inToken == ARROW) {
+ first match {
+ case Typed(tree @ This(name), tpt) if (name == nme.EMPTY.toTypeName) =>
+ self = makeSelfDef(nme.WILDCARD, tpt).setPos(tree.pos)
+ case _ =>
+ convertToParam(first) match {
+ case tree @ ValDef(_, name, tpt, EmptyTree) if (name != nme.ERROR) =>
+ self = makeSelfDef(name, tpt).setPos(tree.pos)
+ case _ =>
+ }
+ }
+ inNextToken
+ } else {
+ stats += first
+ if (in.token != RBRACE && in.token != EOF/* !isStatSep(in.token)*/) acceptStatSep()
+ }
+ }
+ while (inToken != RBRACE && inToken != EOF) {
+ if (inToken == IMPORT) {
+ stats ++= importClause()
+ } else if (isExprIntro) {
+ stats += statement(InTemplate)
+ } else if (isDefIntro || isModifier || inToken == LBRACKET /*todo: remove */ || inToken == AT) {
+ if (isPre) // @S: avoid caching by calling a different method that does the same thing (except in the IDE)
+ stats ++= joinComment(preNonLocalDefOrDcl)
+ else stats ++= joinComment(nonLocalDefOrDcl)
+ } else if (!isStatSep) {
+ syntaxErrorOrIncomplete("illegal start of definition", true)
+ }
+ if (inToken != RBRACE && inToken != EOF) acceptStatSep()
+ }
+ (self, stats.toList)
+ }
+
+
+
+ /** RefineStatSeq ::= RefineStat {semi RefineStat}
+ * RefineStat ::= Dcl
+ * | type TypeDef
+ * |
+ */
+ def refineStatSeq(): List[Tree] = checkNoEscapingPlaceholders {
+ val stats = new ListBuffer[Tree]
+ while (inToken != RBRACE && inToken != EOF) {
+ if (isDclIntro) { // don't IDE hook
+ stats ++= joinComment(defOrDcl(NoMods))
+ } else if (!isStatSep) {
+ syntaxErrorOrIncomplete("illegal start of declaration", true)
+ }
+ if (inToken != RBRACE) acceptStatSep()
+ }
+ stats.toList
+ }
+
+ /** overridable IDE hook for local definitions of blockStatSeq
+ * Here's an idea how to fill in start and end positions.
+ def localDef : List[Tree] = {
+ atEndPos {
+ atStartPos(inCurrentPos) {
+ val annots = annotations(true)
+ val mods = localModifiers() withAnnotations annots
+ if (!(mods hasFlag ~(Flags.IMPLICIT | Flags.LAZY))) defOrDcl(mods)
+ else List(tmplDef(mods))
+ }
+ } (inCurrentPos)
+ }
+ */
+
+ def localDef : List[Tree] = {
+ val annots = annotations(true)
+ val mods = localModifiers() withAnnotations annots
+ if (!(mods hasFlag ~(Flags.IMPLICIT | Flags.LAZY))) defOrDcl(mods)
+ else List(tmplDef(mods))
+ }
+
+ /** BlockStatSeq ::= { BlockStat semi } [ResultExpr]
+ * BlockStat ::= Import
+ * | Annotations [implicit] [lazy] Def
+ * | Annotations LocalModifiers TmplDef
+ * | Expr1
+ * |
+ */
+ def blockStatSeq(stats: ListBuffer[Tree]): List[Tree] = checkNoEscapingPlaceholders {
+ var keepGoing = true
+ var hasError = false
+ while ((inToken != RBRACE) && (inToken != EOF) && (inToken != CASE) && keepGoing) {
+ var hasError0 = hasError
+ hasError = false
+ if (inToken == IMPORT) {
+ stats ++= importClause()
+ acceptStatSep()
+ } else if (isExprIntro) {
+ stats += statement(InBlock)
+ if (inToken != RBRACE && inToken != CASE) acceptStatSep()
+ } else if (isDefIntro || isLocalModifier || in.token == AT) {
+ stats ++= localDef
+ if (inToken == RBRACE || inToken == CASE) {
+ syntaxError("block must end in result expression, not in definition", false)
+ stats += Literal(()).setPos(inCurrentPos)
+ } else acceptStatSep()
+ } else if (isStatSep) {
+ inNextToken
+ } else {
+ syntaxErrorOrIncomplete("illegal start of statement", true)
+ if (hasError0) keepGoing = false else hasError = true
+ }
+ }
+ stats.toList
+ }
+
+ /** CompilationUnit ::= [package QualId semi] TopStatSeq
+ */
+ def compilationUnit(): Tree = checkNoEscapingPlaceholders {
+ var pos = inCurrentPos;
+ {
+ val ts = new ListBuffer[Tree]
+ // @S: the IDE can insert phantom semi-colons before package during editing
+ // @S: just eat them (doesn't really change the grammar)
+ while (inToken == SEMI) inNextToken
+ if (inToken == PACKAGE) {
+ pos = inSkipToken
+ if (in.token == OBJECT) {
+ ts += makePackageObject(objectDef(NoMods))
+ if (inToken != EOF) {
+ acceptStatSep()
+ ts ++= topStatSeq()
+ }
+ } else {
+ val pkg = qualId()
+ newLineOptWhenFollowedBy(LBRACE)
+ if (inToken == EOF) {
+ ts += makePackaging(pkg, List())
+ } else if (isStatSep) {
+ inNextToken
+ ts += makePackaging(pkg, topStatSeq())
+ } else {
+ accept(LBRACE)
+ ts += makePackaging(pkg, topStatSeq())
+ accept(RBRACE)
+ ts ++= topStatSeq()
+ }
+ }
+ } else {
+ ts ++= topStatSeq()
+ }
+ val stats = ts.toList
+ val usePos =
+ if (stats.isEmpty || stats.head.pos == NoPosition) i2p(pos)
+ else stats.head.pos
+ atPos(usePos) { stats match {
+ case List(stat @ PackageDef(_, _)) => stat
+ case _ => makePackaging(Ident(nme.EMPTY_PACKAGE_NAME), stats)
+ }}
+ }
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners1.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners1.scala
new file mode 100755
index 0000000000..c0e425e494
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners1.scala
@@ -0,0 +1,971 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2009 LAMP/EPFL
+ * @author Martin Odersky
+ */
+// $Id: Scanners.scala 17274 2009-03-10 11:39:04Z michelou $
+
+package scala.tools.nsc.ast.parser
+
+import scala.tools.nsc.util._
+import SourceFile.{LF, FF, CR, SU}
+import Tokens._
+import scala.annotation.switch
+
+trait Scanners1 {
+ val global : Global
+ import global._
+
+ /** Offset into source character array */
+ type Offset = Int
+
+ /** An undefined offset */
+ val NoOffset: Offset = -1
+
+ trait TokenData {
+
+ /** the next token */
+ var token: Int = EMPTY
+
+ /** the offset of the first character of the current token */
+ var offset: Offset = 0
+
+ /** the offset of the character following the token preceding this one */
+ var lastOffset: Offset = 0
+
+ /** the name of an identifier */
+ var name: Name = null
+
+ /** the string value of a literal */
+ var strVal: String = null
+
+ /** the base of a number */
+ var base: Int = 0
+
+ def copyFrom(td: TokenData) = {
+ this.token = td.token
+ this.offset = td.offset
+ this.lastOffset = td.lastOffset
+ this.name = td.name
+ this.strVal = td.strVal
+ this.base = td.base
+ }
+ }
+
+ abstract class Scanner extends CharArrayReader1 with TokenData {
+
+ def flush = { charOffset = offset; nextChar(); this }
+
+ def resume(lastCode: Int) = {
+ token = lastCode
+ assert(next.token == EMPTY)
+ nextToken()
+ }
+
+ // things to fill in, in addition to buf, decodeUni
+ def warning(off: Offset, msg: String): Unit
+ def error (off: Offset, msg: String): Unit
+ def incompleteInputError(off: Offset, msg: String): Unit
+ def deprecationWarning(off: Offset, msg: String): Unit
+
+ /** the last error offset
+ */
+ var errOffset: Offset = NoOffset
+
+ /** A character buffer for literals
+ */
+ val cbuf = new StringBuilder
+
+ /** append Unicode character to "cbuf" buffer
+ */
+ protected def putChar(c: Char) {
+// assert(cbuf.size < 10000, cbuf)
+ cbuf.append(c)
+ }
+
+ /** Clear buffer and set name and token */
+ private def finishNamed() {
+ name = newTermName(cbuf.toString)
+ token = name2token(name)
+ cbuf.clear()
+ }
+
+ /** Clear buffer and set string */
+ private def setStrVal() {
+ strVal = cbuf.toString
+ cbuf.clear()
+ }
+
+ /** Should doc comments be built? */
+ def buildDocs: Boolean = onlyPresentation
+
+ /** buffer for the documentation comment
+ */
+ var docBuffer: StringBuilder = null
+
+ /** Return current docBuffer and set docBuffer to null */
+ def flushDoc = {
+ val ret = if (docBuffer != null) docBuffer.toString else null
+ docBuffer = null
+ ret
+ }
+
+ /** add the given character to the documentation buffer
+ */
+ protected def putDocChar(c: Char) {
+ if (docBuffer ne null) docBuffer.append(c)
+ }
+
+ private class TokenData0 extends TokenData
+
+ /** we need one token lookahead and one token history
+ */
+ val next : TokenData = new TokenData0
+ val prev : TokenData = new TokenData0
+
+ /** a stack of tokens which indicates whether line-ends can be statement separators
+ */
+ var sepRegions: List[Int] = List()
+
+// Get next token ------------------------------------------------------------
+
+ /** read next token and return last offset
+ */
+ def skipToken(): Offset = {
+ val off = offset
+ nextToken()
+ off
+ }
+
+ /** Produce next token, filling TokenData fields of Scanner.
+ */
+ def nextToken() {
+ val lastToken = token
+ // Adapt sepRegions according to last token
+ (lastToken: @switch) match {
+ case LPAREN =>
+ sepRegions = RPAREN :: sepRegions
+ case LBRACKET =>
+ sepRegions = RBRACKET :: sepRegions
+ case LBRACE =>
+ sepRegions = RBRACE :: sepRegions
+ case CASE =>
+ sepRegions = ARROW :: sepRegions
+ case RBRACE =>
+ sepRegions = sepRegions dropWhile (_ != RBRACE)
+ if (!sepRegions.isEmpty) sepRegions = sepRegions.tail
+ case RBRACKET | RPAREN | ARROW =>
+ if (!sepRegions.isEmpty && sepRegions.head == lastToken)
+ sepRegions = sepRegions.tail
+ case _ =>
+ }
+
+ // Read a token or copy it from `next` tokenData
+ if (next.token == EMPTY) {
+ lastOffset = charOffset - 1
+ fetchToken()
+ } else {
+ this copyFrom next
+ next.token = EMPTY
+ }
+
+ /** Insert NEWLINE or NEWLINES if
+ * - we are after a newline
+ * - we are within a { ... } or on toplevel (wrt sepRegions)
+ * - the current token can start a statement and the one before can end it
+ * insert NEWLINES if we are past a blank line, NEWLINE otherwise
+ */
+ if (afterLineEnd() && inLastOfStat(lastToken) && inFirstOfStat(token) &&
+ (sepRegions.isEmpty || sepRegions.head == RBRACE)) {
+ next copyFrom this
+ offset = if (lineStartOffset <= offset) lineStartOffset else lastLineStartOffset
+ token = if (pastBlankLine()) NEWLINES else NEWLINE
+ }
+
+ // Join CASE + CLASS => CASECLASS, CASE + OBJECT => CASEOBJECT, SEMI + ELSE => ELSE
+ if (token == CASE) {
+ prev copyFrom this
+ val nextLastOffset = charOffset - 1
+ fetchToken()
+ if (token == CLASS) {
+ token = CASECLASS
+ } else if (token == OBJECT) {
+ token = CASEOBJECT
+ } else {
+ lastOffset = nextLastOffset
+ next copyFrom this
+ this copyFrom prev
+ }
+ } else if (token == SEMI) {
+ prev copyFrom this
+ fetchToken()
+ if (token != ELSE) {
+ next copyFrom this
+ this copyFrom prev
+ }
+ }
+
+// print("["+this+"]")
+ }
+
+ /** Is current token first one after a newline? */
+ private def afterLineEnd(): Boolean =
+ lastOffset < lineStartOffset &&
+ (lineStartOffset <= offset ||
+ lastOffset < lastLineStartOffset && lastLineStartOffset <= offset)
+
+ /** Is there a blank line between the current token and the last one?
+ * @pre afterLineEnd().
+ */
+ private def pastBlankLine(): Boolean = {
+ var idx = lastOffset
+ var ch = buf(idx)
+ val end = offset
+ while (idx < end) {
+ if (ch == LF || ch == FF) {
+ do {
+ idx += 1; ch = buf(idx)
+ if (ch == LF || ch == FF) {
+// println("blank line found at "+lastOffset+":"+(lastOffset to idx).map(buf(_)).toList)
+ return true
+ }
+ } while (idx < end && ch <= ' ')
+ }
+ idx += 1; ch = buf(idx)
+ }
+ false
+ }
+
+ /** read next token, filling TokenData fields of Scanner.
+ */
+ private final def fetchToken() {
+ offset = charOffset - 1
+ (ch: @switch) match {
+ case ' ' | '\t' | CR | LF | FF =>
+ nextChar()
+ fetchToken()
+ case 'A' | 'B' | 'C' | 'D' | 'E' |
+ 'F' | 'G' | 'H' | 'I' | 'J' |
+ 'K' | 'L' | 'M' | 'N' | 'O' |
+ 'P' | 'Q' | 'R' | 'S' | 'T' |
+ 'U' | 'V' | 'W' | 'X' | 'Y' |
+ 'Z' | '$' | '_' |
+ 'a' | 'b' | 'c' | 'd' | 'e' |
+ 'f' | 'g' | 'h' | 'i' | 'j' |
+ 'k' | 'l' | 'm' | 'n' | 'o' |
+ 'p' | 'q' | 'r' | 's' | 't' |
+ 'u' | 'v' | 'w' | 'x' | 'y' | // scala-mode: need to understand multi-line case patterns
+ 'z' =>
+ putChar(ch)
+ nextChar()
+ getIdentRest() // scala-mode: wrong indent for multi-line case blocks
+ case '<' => // is XMLSTART?
+ val last = if (charOffset >= 2) buf(charOffset - 2) else ' '
+ nextChar()
+ last match {
+ case ' '|'\t'|'\n'|'{'|'('|'>' if xml.Parsing.isNameStart(ch) || ch == '!' || ch == '?' =>
+ token = XMLSTART
+ case _ =>
+ // Console.println("found '<', but last is '"+in.last+"'"); // DEBUG
+ putChar('<')
+ getOperatorRest()
+ }
+ case '~' | '!' | '@' | '#' | '%' |
+ '^' | '*' | '+' | '-' | /*'<' | */
+ '>' | '?' | ':' | '=' | '&' |
+ '|' | '\\' =>
+ putChar(ch)
+ nextChar()
+ getOperatorRest()
+ case '/' =>
+ nextChar()
+ if (skipComment()) {
+ fetchToken()
+ } else {
+ putChar('/')
+ getOperatorRest()
+ }
+ case '0' =>
+ putChar(ch)
+ nextChar()
+ if (ch == 'x' || ch == 'X') {
+ nextChar()
+ base = 16
+ } else {
+ base = 8
+ }
+ getNumber()
+ case '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' =>
+ base = 10
+ getNumber()
+ case '`' =>
+ nextChar()
+ if (getStringLit('`')) {
+ finishNamed();
+ if (name.length == 0) syntaxError("empty quoted identifier")
+ token = BACKQUOTED_IDENT
+ }
+ else syntaxError("unclosed quoted identifier")
+ case '\"' =>
+ nextChar()
+ if (ch == '\"') {
+ nextChar()
+ if (ch == '\"') {
+ nextChar()
+ val saved = lineStartOffset
+ getMultiLineStringLit()
+ if (lineStartOffset != saved) // ignore linestarts within a multi-line string
+ lastLineStartOffset = saved
+ } else {
+ token = STRINGLIT
+ strVal = ""
+ }
+ } else if (getStringLit('\"')) {
+ setStrVal()
+ token = STRINGLIT
+ } else {
+ syntaxError("unclosed string literal")
+ }
+ case '\'' =>
+ nextChar()
+ if (isIdentifierStart(ch) || '0' <= ch && ch <= '9')
+ charLitOr(getIdentRest)
+ else if (isSpecial(ch))
+ charLitOr(getOperatorRest)
+ else {
+ getLitChar()
+ if (ch == '\'') {
+ nextChar()
+ token = CHARLIT
+ setStrVal()
+ } else {
+ syntaxError("unclosed character literal")
+ }
+ }
+ case '.' =>
+ nextChar()
+ if ('0' <= ch && ch <= '9') {
+ putChar('.'); getFraction()
+ } else {
+ token = DOT
+ }
+ case ';' =>
+ nextChar(); token = SEMI
+ case ',' =>
+ nextChar(); token = COMMA
+ case '(' =>
+ nextChar(); token = LPAREN
+ case '{' =>
+ nextChar(); token = LBRACE
+ case ')' =>
+ nextChar(); token = RPAREN
+ case '}' =>
+ nextChar(); token = RBRACE
+ case '[' =>
+ nextChar(); token = LBRACKET
+ case ']' =>
+ nextChar(); token = RBRACKET
+ case SU =>
+ if (charOffset >= buf.length) token = EOF
+ else {
+ syntaxError("illegal character")
+ nextChar()
+ }
+ case _ =>
+ if (ch == '\u21D2') {
+ nextChar(); token = ARROW
+ } else if (ch == '\u2190') {
+ nextChar(); token = LARROW
+ } else if (Character.isUnicodeIdentifierStart(ch)) {
+ putChar(ch)
+ nextChar()
+ getIdentRest()
+ } else if (isSpecial(ch)) {
+ putChar(ch)
+ getOperatorRest()
+ } else {
+ syntaxError("illegal character")
+ nextChar()
+ }
+ }
+ }
+
+ private def skipComment(): Boolean = {
+ if (ch == '/') {
+ do {
+ nextChar()
+ } while ((ch != CR) && (ch != LF) && (ch != SU))
+ true
+ } else if (ch == '*') {
+ docBuffer = null
+ var openComments = 1
+ nextChar()
+ if (ch == '*' && buildDocs)
+ docBuffer = new StringBuilder("/**")
+ while (openComments > 0) {
+ do {
+ do {
+ if (ch == '/') {
+ nextChar(); putDocChar(ch)
+ if (ch == '*') {
+ nextChar(); putDocChar(ch)
+ openComments += 1
+ }
+ }
+ if (ch != '*' && ch != SU) {
+ nextChar(); putDocChar(ch)
+ }
+ } while (ch != '*' && ch != SU)
+ while (ch == '*') {
+ nextChar(); putDocChar(ch)
+ }
+ } while (ch != '/' && ch != SU)
+ if (ch == '/') nextChar()
+ else incompleteInputError("unclosed comment")
+ openComments -= 1
+ }
+ true
+ } else {
+ false
+ }
+ }
+
+ /** Can token start a statement? */
+ def inFirstOfStat(token: Int) = token match {
+ case EOF | CATCH | ELSE | EXTENDS | FINALLY | FORSOME | MATCH | WITH | YIELD |
+ COMMA | SEMI | NEWLINE | NEWLINES | DOT | COLON | EQUALS | ARROW | LARROW |
+ SUBTYPE | VIEWBOUND | SUPERTYPE | HASH | RPAREN | RBRACKET | RBRACE | LBRACKET =>
+ false
+ case _ =>
+ true
+ }
+
+ /** Can token end a statement? */
+ def inLastOfStat(token: Int) = token match {
+ case CHARLIT | INTLIT | LONGLIT | FLOATLIT | DOUBLELIT | STRINGLIT | SYMBOLLIT |
+ IDENTIFIER | BACKQUOTED_IDENT | THIS | NULL | TRUE | FALSE | RETURN | USCORE |
+ TYPE | XMLSTART | RPAREN | RBRACKET | RBRACE =>
+ true
+ case _ =>
+ false
+ }
+
+// Identifiers ---------------------------------------------------------------
+
+ private def getIdentRest(): Unit = (ch: @switch) match {
+ case 'A' | 'B' | 'C' | 'D' | 'E' |
+ 'F' | 'G' | 'H' | 'I' | 'J' |
+ 'K' | 'L' | 'M' | 'N' | 'O' |
+ 'P' | 'Q' | 'R' | 'S' | 'T' |
+ 'U' | 'V' | 'W' | 'X' | 'Y' |
+ 'Z' | '$' |
+ 'a' | 'b' | 'c' | 'd' | 'e' |
+ 'f' | 'g' | 'h' | 'i' | 'j' |
+ 'k' | 'l' | 'm' | 'n' | 'o' |
+ 'p' | 'q' | 'r' | 's' | 't' |
+ 'u' | 'v' | 'w' | 'x' | 'y' |
+ 'z' |
+ '0' | '1' | '2' | '3' | '4' |
+ '5' | '6' | '7' | '8' | '9' =>
+ putChar(ch)
+ nextChar()
+ getIdentRest()
+ case '_' =>
+ putChar(ch)
+ nextChar()
+ getIdentOrOperatorRest()
+ case SU => // strangely enough, Character.isUnicodeIdentifierPart(SU) returns true!
+ finishNamed()
+ case _ =>
+ if (Character.isUnicodeIdentifierPart(ch)) {
+ putChar(ch)
+ nextChar()
+ getIdentRest()
+ } else {
+ finishNamed()
+ }
+ }
+
+ private def getOperatorRest(): Unit = (ch: @switch) match {
+ case '~' | '!' | '@' | '#' | '%' |
+ '^' | '*' | '+' | '-' | '<' |
+ '>' | '?' | ':' | '=' | '&' |
+ '|' | '\\' =>
+ putChar(ch); nextChar(); getOperatorRest()
+ case '/' =>
+ nextChar()
+ if (skipComment()) finishNamed()
+ else { putChar('/'); getOperatorRest() }
+ case _ =>
+ if (isSpecial(ch)) { putChar(ch); nextChar(); getOperatorRest() }
+ else finishNamed()
+ }
+
+ private def getIdentOrOperatorRest() {
+ if (isIdentifierPart(ch))
+ getIdentRest()
+ else ch match {
+ case '~' | '!' | '@' | '#' | '%' |
+ '^' | '*' | '+' | '-' | '<' |
+ '>' | '?' | ':' | '=' | '&' |
+ '|' | '\\' | '/' =>
+ getOperatorRest()
+ case _ =>
+ if (isSpecial(ch)) getOperatorRest()
+ else finishNamed()
+ }
+ }
+
+ private def getStringLit(delimiter: Char): Boolean = {
+ while (ch != delimiter && (isUnicodeEscape || ch != CR && ch != LF && ch != SU)) {
+ getLitChar()
+ }
+ if (ch == delimiter) { nextChar(); true }
+ else false
+ }
+
+ private def getMultiLineStringLit() {
+ if (ch == '\"') {
+ nextChar()
+ if (ch == '\"') {
+ nextChar()
+ if (ch == '\"') {
+ nextChar()
+ token = STRINGLIT
+ setStrVal()
+ } else {
+ putChar('\"')
+ putChar('\"')
+ getMultiLineStringLit()
+ }
+ } else {
+ putChar('\"')
+ getMultiLineStringLit()
+ }
+ } else if (ch == SU) {
+ incompleteInputError("unclosed multi-line string literal")
+ } else {
+ putChar(ch)
+ nextChar()
+ getMultiLineStringLit()
+ }
+ }
+
+// Literals -----------------------------------------------------------------
+
+ /** read next character in character or string literal:
+ */
+ protected def getLitChar() =
+ if (ch == '\\') {
+ nextChar()
+ if ('0' <= ch && ch <= '7') {
+ val leadch: Char = ch
+ var oct: Int = digit2int(ch, 8)
+ nextChar()
+ if ('0' <= ch && ch <= '7') {
+ oct = oct * 8 + digit2int(ch, 8)
+ nextChar()
+ if (leadch <= '3' && '0' <= ch && ch <= '7') {
+ oct = oct * 8 + digit2int(ch, 8)
+ nextChar()
+ }
+ }
+ putChar(oct.toChar)
+ } else {
+ ch match {
+ case 'b' => putChar('\b')
+ case 't' => putChar('\t')
+ case 'n' => putChar('\n')
+ case 'f' => putChar('\f')
+ case 'r' => putChar('\r')
+ case '\"' => putChar('\"')
+ case '\'' => putChar('\'')
+ case '\\' => putChar('\\')
+ case _ =>
+ syntaxError(charOffset - 1, "invalid escape character")
+ putChar(ch)
+ }
+ nextChar()
+ }
+ } else {
+ putChar(ch)
+ nextChar()
+ }
+
+ /** read fractional part and exponent of floating point number
+ * if one is present.
+ */
+ protected def getFraction() {
+ token = DOUBLELIT
+ while ('0' <= ch && ch <= '9') {
+ putChar(ch)
+ nextChar()
+ }
+ if (ch == 'e' || ch == 'E') {
+ val lookahead = lookaheadReader
+ lookahead.nextChar()
+ if (lookahead.ch == '+' || lookahead.ch == '-') {
+ lookahead.nextChar()
+ }
+ if ('0' <= lookahead.ch && lookahead.ch <= '9') {
+ putChar(ch)
+ nextChar()
+ if (ch == '+' || ch == '-') {
+ putChar(ch)
+ nextChar()
+ }
+ while ('0' <= ch && ch <= '9') {
+ putChar(ch)
+ nextChar()
+ }
+ }
+ token = DOUBLELIT
+ }
+ if (ch == 'd' || ch == 'D') {
+ putChar(ch)
+ nextChar()
+ token = DOUBLELIT
+ } else if (ch == 'f' || ch == 'F') {
+ putChar(ch)
+ nextChar()
+ token = FLOATLIT
+ }
+ checkNoLetter()
+ setStrVal()
+ }
+
+ /** Convert current strVal to char value
+ */
+ def charVal: Char = if (strVal.length > 0) strVal.charAt(0) else 0
+
+ /** Convert current strVal, base to long value
+ * This is tricky because of max negative value.
+ */
+ def intVal(negated: Boolean): Long = {
+ if (token == CHARLIT && !negated) {
+ charVal
+ } else {
+ var value: Long = 0
+ val divider = if (base == 10) 1 else 2
+ val limit: Long =
+ if (token == LONGLIT) Math.MAX_LONG else Math.MAX_INT
+ var i = 0
+ val len = strVal.length
+ while (i < len) {
+ val d = digit2int(strVal charAt i, base)
+ if (d < 0) {
+ syntaxError("malformed integer number")
+ return 0
+ }
+ if (value < 0 ||
+ limit / (base / divider) < value ||
+ limit - (d / divider) < value * (base / divider) &&
+ !(negated && limit == value * base - 1 + d)) {
+ syntaxError("integer number too large")
+ return 0
+ }
+ value = value * base + d
+ i += 1
+ }
+ if (negated) -value else value
+ }
+ }
+
+ def intVal: Long = intVal(false)
+
+ /** Convert current strVal, base to double value
+ */
+ def floatVal(negated: Boolean): Double = {
+ val limit: Double =
+ if (token == DOUBLELIT) Math.MAX_DOUBLE else Math.MAX_FLOAT
+ try {
+ val value: Double = java.lang.Double.valueOf(strVal).doubleValue()
+ if (value > limit)
+ syntaxError("floating point number too large")
+ if (negated) -value else value
+ } catch {
+ case _: NumberFormatException =>
+ syntaxError("malformed floating point number")
+ 0.0
+ }
+ }
+
+ def floatVal: Double = floatVal(false)
+
+ def checkNoLetter() {
+ if (isIdentifierPart(ch) && ch >= ' ')
+ syntaxError("Invalid literal number")
+ }
+
+ /** Read a number into strVal and set base
+ */
+ protected def getNumber() {
+ val base1 = if (base < 10) 10 else base
+ // read 8,9's even if format is octal, produce a malformed number error afterwards.
+ while (digit2int(ch, base1) >= 0) {
+ putChar(ch)
+ nextChar()
+ }
+ token = INTLIT
+ if (base <= 10 && ch == '.') {
+ val lookahead = lookaheadReader
+ lookahead.nextChar()
+ lookahead.ch match {
+ case '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' |
+ '8' | '9' | 'd' | 'D' | 'e' | 'E' | 'f' | 'F' =>
+ putChar(ch)
+ nextChar()
+ return getFraction()
+ case _ =>
+ if (!isIdentifierStart(lookahead.ch)) {
+ putChar(ch)
+ nextChar()
+ return getFraction()
+ }
+ }
+ }
+ if (base <= 10 &&
+ (ch == 'e' || ch == 'E' ||
+ ch == 'f' || ch == 'F' ||
+ ch == 'd' || ch == 'D')) {
+ return getFraction()
+ }
+ setStrVal()
+ if (ch == 'l' || ch == 'L') {
+ nextChar()
+ token = LONGLIT
+ } else checkNoLetter()
+ }
+
+ /** Parse character literal if current character is followed by \',
+ * or follow with given op and return a symol literal token
+ */
+ def charLitOr(op: () => Unit) {
+ putChar(ch)
+ nextChar()
+ if (ch == '\'') {
+ nextChar()
+ token = CHARLIT
+ setStrVal()
+ } else {
+ op()
+ token = SYMBOLLIT
+ strVal = name.toString
+ }
+ }
+
+// Errors -----------------------------------------------------------------
+
+ /** generate an error at the given offset
+ */
+ def syntaxError(off: Offset, msg: String) {
+ error(off, msg)
+ token = ERROR
+ errOffset = off
+ }
+
+ /** generate an error at the current token offset
+ */
+ def syntaxError(msg: String): Unit = syntaxError(offset, msg)
+
+ /** signal an error where the input ended in the middle of a token */
+ def incompleteInputError(msg: String) {
+ incompleteInputError(offset, msg)
+ token = EOF
+ errOffset = offset
+ }
+
+ override def toString() = token match {
+ case IDENTIFIER | BACKQUOTED_IDENT =>
+ "id(" + name + ")"
+ case CHARLIT =>
+ "char(" + intVal + ")"
+ case INTLIT =>
+ "int(" + intVal + ")"
+ case LONGLIT =>
+ "long(" + intVal + ")"
+ case FLOATLIT =>
+ "float(" + floatVal + ")"
+ case DOUBLELIT =>
+ "double(" + floatVal + ")"
+ case STRINGLIT =>
+ "string(" + strVal + ")"
+ case SEMI =>
+ ";"
+ case NEWLINE =>
+ ";"
+ case NEWLINES =>
+ ";;"
+ case COMMA =>
+ ","
+ case _ =>
+ token2string(token)
+ }
+
+ /** Initialization method: read first char, then first token
+ */
+ def init() {
+ nextChar()
+ nextToken()
+ }
+ } // end Scanner
+
+ // ------------- character classification --------------------------------
+
+ def isIdentifierStart(c: Char): Boolean = (
+ ('A' <= c && c <= 'Z') ||
+ ('a' <= c && c <= 'a') ||
+ (c == '_') || (c == '$') ||
+ Character.isUnicodeIdentifierStart(c)
+ )
+
+ def isIdentifierPart(c: Char) = (
+ isIdentifierStart(c) ||
+ ('0' <= c && c <= '9') ||
+ Character.isUnicodeIdentifierPart(c)
+ )
+
+ def isSpecial(c: Char) = {
+ val chtp = Character.getType(c)
+ chtp == Character.MATH_SYMBOL || chtp == Character.OTHER_SYMBOL
+ }
+
+ def isOperatorPart(c : Char) : Boolean = (c: @switch) match {
+ case '~' | '!' | '@' | '#' | '%' |
+ '^' | '*' | '+' | '-' | '<' |
+ '>' | '?' | ':' | '=' | '&' |
+ '|' | '/' | '\\' => true
+ case c => isSpecial(c)
+ }
+
+ // ------------- keyword configuration -----------------------------------
+
+ /** Keyword array; maps from name indices to tokens */
+ private var keyCode: Array[Byte] = _
+ /** The highest name index of a keyword token */
+ private var maxKey = 0
+ /** An array of all keyword token names */
+ private var keyName = new Array[Name](128)
+ /** The highest keyword token plus one */
+ private var tokenCount = 0
+
+ /** Enter keyword with given name and token id */
+ protected def enterKeyword(n: Name, tokenId: Int) {
+ while (tokenId >= keyName.length) {
+ val newTokName = new Array[Name](keyName.length * 2)
+ Array.copy(keyName, 0, newTokName, 0, newTokName.length)
+ keyName = newTokName
+ }
+ keyName(tokenId) = n
+ if (n.start > maxKey) maxKey = n.start
+ if (tokenId >= tokenCount) tokenCount = tokenId + 1
+ }
+
+ /** Enter all keywords */
+ protected def enterKeywords() {
+ enterKeyword(nme.ABSTRACTkw, ABSTRACT)
+ enterKeyword(nme.CASEkw, CASE)
+ enterKeyword(nme.CATCHkw, CATCH)
+ enterKeyword(nme.CLASSkw, CLASS)
+ enterKeyword(nme.DEFkw, DEF)
+ enterKeyword(nme.DOkw, DO)
+ enterKeyword(nme.ELSEkw, ELSE)
+ enterKeyword(nme.EXTENDSkw, EXTENDS)
+ enterKeyword(nme.FALSEkw, FALSE)
+ enterKeyword(nme.FINALkw, FINAL)
+ enterKeyword(nme.FINALLYkw, FINALLY)
+ enterKeyword(nme.FORkw, FOR)
+ enterKeyword(nme.FORSOMEkw, FORSOME)
+ enterKeyword(nme.IFkw, IF)
+ enterKeyword(nme.IMPLICITkw, IMPLICIT)
+ enterKeyword(nme.IMPORTkw, IMPORT)
+ enterKeyword(nme.LAZYkw, LAZY)
+ enterKeyword(nme.MATCHkw, MATCH)
+ enterKeyword(nme.NEWkw, NEW)
+ enterKeyword(nme.NULLkw, NULL)
+ enterKeyword(nme.OBJECTkw, OBJECT)
+ enterKeyword(nme.OVERRIDEkw, OVERRIDE)
+ enterKeyword(nme.PACKAGEkw, PACKAGE)
+ enterKeyword(nme.PRIVATEkw, PRIVATE)
+ enterKeyword(nme.PROTECTEDkw, PROTECTED)
+ enterKeyword(nme.RETURNkw, RETURN)
+ enterKeyword(nme.SEALEDkw, SEALED)
+ enterKeyword(nme.SUPERkw, SUPER)
+ enterKeyword(nme.THISkw, THIS)
+ enterKeyword(nme.THROWkw, THROW)
+ enterKeyword(nme.TRAITkw, TRAIT)
+ enterKeyword(nme.TRUEkw, TRUE)
+ enterKeyword(nme.TRYkw, TRY)
+ enterKeyword(nme.TYPEkw, TYPE)
+ enterKeyword(nme.VALkw, VAL)
+ enterKeyword(nme.VARkw, VAR)
+ enterKeyword(nme.WHILEkw, WHILE)
+ enterKeyword(nme.WITHkw, WITH)
+ enterKeyword(nme.YIELDkw, YIELD)
+ enterKeyword(nme.DOTkw, DOT)
+ enterKeyword(nme.USCOREkw, USCORE)
+ enterKeyword(nme.COLONkw, COLON)
+ enterKeyword(nme.EQUALSkw, EQUALS)
+ enterKeyword(nme.ARROWkw, ARROW)
+ enterKeyword(nme.LARROWkw, LARROW)
+ enterKeyword(nme.SUBTYPEkw, SUBTYPE)
+ enterKeyword(nme.VIEWBOUNDkw, VIEWBOUND)
+ enterKeyword(nme.SUPERTYPEkw, SUPERTYPE)
+ enterKeyword(nme.HASHkw, HASH)
+ enterKeyword(nme.ATkw, AT)
+ }
+
+ { // initialization
+ enterKeywords()
+ // Build keyword array
+ keyCode = Array.make(maxKey + 1, IDENTIFIER)
+ for (j <- 0 until tokenCount if keyName(j) ne null)
+ keyCode(keyName(j).start) = j.toByte
+ }
+
+ /** Convert name to token */
+ def name2token(name: Name): Int =
+ if (name.start <= maxKey) keyCode(name.start) else IDENTIFIER
+
+// Token representation ----------------------------------------------------
+
+ /** Returns the string representation of given token. */
+ def token2string(token: Int): String = (token: @switch) match {
+ case IDENTIFIER | BACKQUOTED_IDENT => "identifier"
+ case CHARLIT => "character literal"
+ case INTLIT => "integer literal"
+ case LONGLIT => "long literal"
+ case FLOATLIT => "float literal"
+ case DOUBLELIT => "double literal"
+ case STRINGLIT => "string literal"
+ case SYMBOLLIT => "symbol literal"
+ case LPAREN => "'('"
+ case RPAREN => "')'"
+ case LBRACE => "'{'"
+ case RBRACE => "'}'"
+ case LBRACKET => "'['"
+ case RBRACKET => "']'"
+ case EOF => "eof"
+ case ERROR => "something"
+ case SEMI => "';'"
+ case NEWLINE => "';'"
+ case NEWLINES => "';'"
+ case COMMA => "','"
+ case CASECLASS => "case class"
+ case CASEOBJECT => "case object"
+ case XMLSTART => "$XMLSTART$<"
+ case _ =>
+ if (token <= maxKey) "'" + keyName(token) + "'"
+ else "'<" + token + ">'"
+ }
+
+ /** A scanner over a given compilation unit
+ */
+ class UnitScanner(unit: CompilationUnit) extends Scanner {
+ val buf = unit.source.asInstanceOf[BatchSourceFile].content
+ val decodeUnit = !settings.nouescape.value
+ def warning(off: Offset, msg: String) = unit.warning(unit.position(off), msg)
+ def error (off: Offset, msg: String) = unit.error(unit.position(off), msg)
+ def incompleteInputError(off: Offset, msg: String) = unit.incompleteInputError(unit.position(off), msg)
+ def deprecationWarning(off: Offset, msg: String) = unit.deprecationWarning(unit.position(off), msg)
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder1.scala b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder1.scala
new file mode 100644
index 0000000000..c4eef84be6
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder1.scala
@@ -0,0 +1,368 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2009 LAMP/EPFL
+ * @author Burak Emir
+ */
+// $Id: SymbolicXMLBuilder.scala 16884 2009-01-09 16:52:09Z cunei $
+
+package scala.tools.nsc.ast.parser
+
+import scala.collection.mutable.{Buffer, HashMap, ListBuffer, Map}
+import scala.tools.nsc.util.Position
+import scala.xml.{EntityRef, Text}
+import symtab.Flags.MUTABLE
+
+/** This class builds instance of <code>Tree</code> that represent XML.
+ *
+ * @author Burak Emir
+ * @version 1.0
+ */
+abstract class SymbolicXMLBuilder1(make: TreeBuilder, p: Parsers1 # Parser, preserveWS: Boolean) {
+
+ val global: Global
+ import global._
+ import global.posAssigner.atPos
+
+ var isPattern: Boolean = _
+
+ def _Attribute = global.newTypeName("Attribute")
+ def _MetaData = global.newTypeName("MetaData")
+ def _NamespaceBinding = global.newTypeName("NamespaceBinding")
+ def _NodeBuffer = global.newTypeName("NodeBuffer")
+ def _Null = global.newTermName("Null")
+
+ def _PrefixedAttribute = global.newTypeName("PrefixedAttribute")
+ def _UnprefixedAttribute = global.newTypeName("UnprefixedAttribute")
+ def _Elem = global.newTypeName("Elem")
+ def __Elem = global.newTermName("Elem")
+ def _Group = global.newTypeName("Group")
+ def _Unparsed = global.newTypeName("Unparsed")
+ def _Seq = global.newTypeName("Seq")
+ def _immutable = global.newTermName("immutable")
+ def _mutable = global.newTermName("mutable")
+ def _append = global.newTermName("append")
+ def _plus = global.newTermName("$amp$plus")
+ def _collection = global.newTermName("collection")
+ def _toList = global.newTermName("toList")
+ def _xml = global.newTermName("xml")
+ def _Comment = global.newTypeName("Comment")
+ def _Node = global.newTypeName("Node")
+ def _None = global.newTermName("None")
+ def _Some = global.newTypeName("Some")
+ def _ProcInstr = global.newTypeName("ProcInstr")
+ def _Text = global.newTypeName("Text")
+ def __Text = global.newTermName("Text")
+ def _EntityRef = global.newTypeName("EntityRef")
+
+ final def _buf = global.newTermName("$buf")
+ final def _md = global.newTermName("$md")
+ final def _scope = global.newTermName("$scope")
+ final def _tmpscope = global.newTermName("$tmpscope")
+
+ // convenience methods
+ private def LL[A](x: A*): List[List[A]] = List(List(x:_*))
+
+ private def _scala(name: Name) =
+ Select(Select(Ident(nme.ROOTPKG), nme.scala_), name)
+
+ private def _scala_Seq = _scala(_Seq)
+ private def _scala_xml(name: Name) = Select(_scala(_xml), name)
+
+ private def _scala_xml_MetaData = _scala_xml(_MetaData)
+ private def _scala_xml_NamespaceBinding = _scala_xml(_NamespaceBinding)
+ private def _scala_xml_Null = _scala_xml(_Null)
+ private def _scala_xml_PrefixedAttribute = _scala_xml(_PrefixedAttribute)
+ private def _scala_xml_UnprefixedAttribute= _scala_xml(_UnprefixedAttribute)
+ private def _scala_xml_Node = _scala_xml(_Node)
+ private def _scala_xml_NodeBuffer = _scala_xml(_NodeBuffer)
+ private def _scala_xml_EntityRef = _scala_xml(_EntityRef)
+ private def _scala_xml_Comment = _scala_xml(_Comment)
+ private def _scala_xml_ProcInstr = _scala_xml(_ProcInstr)
+ private def _scala_xml_Text = _scala_xml(_Text)
+ private def _scala_xml__Text = _scala_xml(__Text)
+ private def _scala_xml_Elem = _scala_xml(_Elem)
+ private def _scala_xml__Elem = _scala_xml(__Elem)
+ private def _scala_xml_Attribute = _scala_xml(_Attribute)
+ private def _scala_xml_Group = _scala_xml(_Group)
+ private def _scala_xml_Unparsed = _scala_xml(_Unparsed)
+
+ // create scala xml tree
+
+ /**
+ * @arg namespace: a Tree of type defs.STRING_TYPE
+ * @arg label: a Tree of type defs.STRING_TYPE
+ * @todo map: a map of attributes !!!
+ */
+
+ protected def mkXML(pos: Position, isPattern: Boolean, pre: Tree, label: Tree, attrs: /*Array[*/Tree/*]*/ , scope:Tree, children: Buffer[Tree]): Tree = {
+ if (isPattern) {
+ convertToTextPat(children)
+ atPos (pos) { //@todo maybe matching on attributes, scope?
+ Apply( _scala_xml__Elem, List(
+ pre, label, Ident(nme.WILDCARD) /* md */ , Ident(nme.WILDCARD)) /* scope */ ::: children.toList )
+ }
+ } else {
+ var ab = List(pre, label, attrs, scope)
+ if (children.length > 0)
+ ab = ab ::: List(Typed(makeXMLseq(pos, children), Ident(nme.WILDCARD_STAR.toTypeName)));
+ atPos(pos) { New( _scala_xml_Elem, List(ab) )}
+ }
+ }
+
+ final def entityRef(pos: Position, n: String) = {
+ atPos(pos) { New( _scala_xml_EntityRef, LL(Literal(Constant( n )))) }
+
+ };
+ // create scala.xml.Text here <: scala.xml.Node
+ final def text(pos: Position, txt:String): Tree = {
+ //makeText( isPattern, gen.mkStringLit( txt ))
+ val txt1 = Literal(Constant(txt))
+ atPos(pos) {
+ if (isPattern)
+ makeTextPat(txt1)
+ else
+ makeText1(txt1)
+ }
+ }
+
+ // create scala.xml.Text here <: scala.xml.Node
+ def makeTextPat(txt: Tree) = Apply(_scala_xml__Text, List(txt))
+
+ def makeText1(txt: Tree) =
+ New(_scala_xml_Text, LL(txt))
+
+ // create
+ def comment(pos: Position, text: String): Tree =
+ atPos(pos) { Comment( Literal(Constant(text))) }
+
+ // create
+ def charData(pos: Position, txt: String): Tree =
+ atPos(pos) { makeText1(Literal(Constant(txt))) }; //{ CharData( Literal(Constant(txt))) };
+
+ // create scala.xml.Text here <: scala.xml.Node
+ def procInstr( pos: Position, target: String, txt: String ) =
+ atPos(pos) { ProcInstr(Literal(Constant(target)), Literal(Constant(txt))) }
+
+ protected def Comment(txt: Tree) = New(_scala_xml_Comment, LL(txt))
+
+ protected def ProcInstr(target: Tree, txt: Tree) =
+ New(_scala_xml_ProcInstr, LL(target, txt))
+
+ /** @todo: attributes */
+ def makeXMLpat(pos: Position, n: String, args: Buffer[Tree]): Tree = {
+ val (prepat, labpat) = n.indexOf(':') match {
+ case -1 => (Ident(nme.WILDCARD), Literal(Constant(n)))
+ //case 0 => // is erroneous, but cannot happen
+ case i => //if(i+1<n.length) // we ensure i+1<n.length in method xName
+ (Literal(Constant(n.substring(0,i))), Literal(Constant(n.substring(i+1,n.length))))
+ //else { p.syntaxError(pos,"nonsensical qualified name in XML"); return Ident(nme.WILDCARD).setPos(pos)}
+ }
+ mkXML(pos,
+ true,
+ prepat, //Ident( nme.WILDCARD ),
+ labpat, //Literal(Constant(n)),
+ null, //Array[Tree](),
+ null,
+ args);
+ }
+
+ protected def convertToTextPat(t: Tree): Tree = t match {
+ case _:Literal => makeTextPat(t)
+ case _ => t
+ }
+
+ def parseAttribute(pos: Position, s: String): Tree = {
+ val ns = xml.Utility.parseAttributeValue(s)
+ val ts: ListBuffer[Tree] = new ListBuffer
+ val it = ns.elements
+ while (it.hasNext) it.next match {
+ case Text(s) => ts += text(pos, s) // makeText1(Literal(Constant(s)))
+ case EntityRef(s) => ts += entityRef(pos, s)
+ }
+ ts.length match {
+ case 0 => gen.mkNil
+ case 1 => val t = ts(0); ts.clear; t
+ case _ => makeXMLseq(pos, ts)
+ }
+ }
+
+ protected def convertToTextPat(buf: Buffer[Tree]) {
+ var i = 0; while (i < buf.length) {
+ val t1 = buf(i)
+ val t2 = convertToTextPat(t1)
+ if (!t1.eq(t2)) {
+ buf.remove(i)
+ buf.insert(i, t2)
+ }
+ i += 1
+ }
+ }
+
+ def freshName(prefix: String): Name
+
+ def isEmptyText(t: Tree) = t match {
+ case Literal(Constant("")) => true
+ case _ => false
+ }
+
+ // could optimize if args.length == 0, args.length == 1 AND args(0) is <: Node.
+ def makeXMLseq(pos: Position, args: Buffer[Tree] ) = {
+ //var _buffer = New( _scala_xml_NodeBuffer, List(Nil))
+
+ var as:List[Tree] = ValDef(NoMods, _buf, TypeTree(), New( _scala_xml_NodeBuffer, List(Nil)))::Nil
+ val it = args.elements
+ while (it.hasNext) {
+ val t = it.next
+ if (!isEmptyText(t)) {
+ //_buffer = Apply(Select(_buffer, _plus), List(t))
+ as = Apply(Select(Ident(_buf), _plus), List(t))::as
+ }
+ }
+ //atPos(pos) { Select(_buffer, _toList) }
+
+ atPos(pos) {
+ Block(as.reverse, Ident(_buf))
+ }
+ }
+ /** returns Some(prefix) if pre:name, None otherwise */
+ def getPrefix(name: String): Option[String] = {
+ val i = name.indexOf(':')
+ if (i != -1) Some(name.substring(0, i)) else None
+ }
+
+ def group(pos: Position, args: Buffer[Tree]): Tree = {
+ atPos(pos) { New( _scala_xml_Group, LL( makeXMLseq(pos, args))) }
+ }
+
+ /** code that constructs an unparsed node
+ */
+ def unparsed(pos: Position, str: String): Tree = {
+ atPos(pos) { New( _scala_xml_Unparsed, LL( Literal(Constant(str)))) }
+ }
+
+ /** makes an element */
+ def element(pos: Position, qname: String, attrMap: Map[String,Tree], args: Buffer[Tree]): Tree = {
+ //Console.println("SymbolicXMLBuilder::element("+pos+","+qname+","+attrMap+","+args+")");
+ var setNS = new HashMap[String, Tree]
+
+ var tlist: List[Tree] = List()
+
+ /* pre can be null */
+ def handleNamespaceBinding(pre: String , uri1: Tree) {
+ def mkAssign(t: Tree): Tree =
+ Assign(Ident(_tmpscope), New( _scala_xml_NamespaceBinding,
+ LL(Literal(Constant(pre)), t, Ident( _tmpscope))))
+ uri1 match {
+ case Apply(_, List(uri @ Literal(Constant(_)))) => //text
+ tlist = mkAssign(uri) :: tlist
+ case Select(_, nme.Nil) => // allow for xmlns="" -- bug #1626
+ tlist = mkAssign(Literal(Constant(null))) :: tlist
+ case _ =>
+ tlist = mkAssign(uri1) :: tlist
+ //println("SymbolicXMLBuilder::handleNamespaceBinding:")
+ //println(t.toString())
+ }
+ }
+
+ /* DEBUG */
+ val attrIt = attrMap.keys
+ while (attrIt.hasNext) {
+ val z = attrIt.next
+ if (z startsWith "xmlns") { // handle namespace
+ val i = z indexOf ':'
+ if (i == -1)
+ handleNamespaceBinding(null, attrMap(z))
+ //setNS.update("default", attrMap(z))
+ else {
+ val zz = z.substring(i+1, z.length())
+ //setNS.update( zz, attrMap( z ) );
+ handleNamespaceBinding(zz, attrMap(z))
+ }
+ attrMap -= z
+ }
+ }
+
+ val moreNamespaces = (0 < tlist.length)
+ val i = qname indexOf ':'
+ var newlabel = qname
+ val pre = getPrefix(qname) match {
+ case Some(p) =>
+ newlabel = qname.substring(p.length()+1, qname.length())
+ p
+ case None =>
+ null
+ }
+ var tlist2: List[Tree] = List()
+
+ // make attributes
+
+ def handlePrefixedAttribute(pre:String, key:String, value:Tree) {
+ val t = atPos(pos) {
+ Assign(Ident(_md), New( _scala_xml_PrefixedAttribute,
+ LL(
+ Literal(Constant(pre)),
+ Literal(Constant(key)),
+ value,
+ Ident(_md)
+ )))};
+ tlist2 = t :: tlist2;
+ // Console.println("SymbolicXMLBuilder::handlePrefixed :");
+ // Console.println(t.toString());
+ }
+
+ def handleUnprefixedAttribute(key: String, value:Tree) {
+ val t = atPos(pos) {
+ Assign(Ident(_md), New(_scala_xml_UnprefixedAttribute,
+ LL(Literal(Constant(key)),value,Ident(_md))
+ ))};
+ tlist2 = t :: tlist2
+ }
+
+ var it = attrMap.elements
+ while (it.hasNext) {
+ val ansk = it.next
+ getPrefix(ansk._1) match {
+ case Some(pre) =>
+ val key = ansk._1.substring(pre.length()+1, ansk._1.length())
+ handlePrefixedAttribute(pre, key, ansk._2)
+ case None =>
+ handleUnprefixedAttribute(ansk._1, ansk._2)
+ }
+ }
+ // attrs
+
+ val moreAttributes = (0 < tlist2.length)
+
+ var ts: List[Tree] = tlist
+ var ts2: List[Tree] = List()
+
+ if (moreAttributes) {
+ ts2 = atPos(pos) {ValDef(Modifiers(MUTABLE),
+ _md,
+ _scala_xml_MetaData,
+ _scala_xml_Null)} :: tlist2;
+ }
+ if (moreNamespaces) {
+ ts = atPos(pos) {
+ ValDef(Modifiers(MUTABLE),
+ _tmpscope,
+ _scala_xml_NamespaceBinding,
+ Ident(_scope))} :: ts;
+
+ ts2 = ValDef(NoMods, _scope, _scala_xml_NamespaceBinding, Ident(_tmpscope)) :: ts2
+ }
+
+ val makeSymbolicAttrs =
+ if (moreAttributes) Ident(_md) else _scala_xml_Null
+
+ var t = mkXML(pos,
+ false,
+ Literal(Constant(pre)) /* can be null */ ,
+ Literal(Constant(newlabel)): Tree,
+ makeSymbolicAttrs,
+ Ident(_scope),
+ args);
+
+ atPos(pos) { Block(ts, Block(ts2, t)) }
+ }
+}
+
diff --git a/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer1.scala b/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer1.scala
new file mode 100644
index 0000000000..4fe9db2036
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer1.scala
@@ -0,0 +1,29 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2009 LAMP/EPFL
+ * @author Martin Odersky
+ */
+// $Id: SyntaxAnalyzer.scala 16893 2009-01-13 13:09:22Z cunei $
+
+package scala.tools.nsc.ast.parser
+
+import javac._
+
+/** An nsc sub-component.
+ */
+abstract class SyntaxAnalyzer1 extends SubComponent with Parsers1 with MarkupParsers1 with Scanners1 with JavaParsers with JavaScanners {
+
+ val phaseName = "parser"
+
+ def newPhase(prev: Phase): StdPhase = new ParserPhase(prev)
+
+ class ParserPhase(prev: scala.tools.nsc.Phase) extends StdPhase(prev) {
+ override val checkable = false
+ def apply(unit: global.CompilationUnit) {
+ global.informProgress("parsing " + unit)
+ unit.body =
+ if (unit.source.file.name.endsWith(".java")) new JavaUnitParser(unit).parse()
+ else new UnitParser(unit).parse()
+ }
+ }
+}
+
diff --git a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
index 18aaddd1fe..b6b179b6b5 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
@@ -9,7 +9,7 @@ package scala.tools.nsc.backend.icode
//import scala.tools.nsc.ast._
import scala.collection.mutable.{Map, Set}
-import scala.collection.jcl.LinkedHashSet
+import scala.collection.mutable.LinkedHashSet
import scala.tools.nsc.util.{Position,NoPosition}
import scala.tools.nsc.backend.icode.analysis.ProgramPoint
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala b/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala
index b619ff949e..13bb286517 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala
@@ -37,7 +37,7 @@ trait Linearizers { self: ICodes =>
blocks = Nil;
run {
- worklist ++= (m.exh map (_.startBlock));
+ worklist pushAll (m.exh map (_.startBlock));
worklist.push(b);
}
@@ -46,7 +46,7 @@ trait Linearizers { self: ICodes =>
def linearizeAt(m: IMethod, start: BasicBlock): List[BasicBlock] = {
blocks = Nil
- worklist.clear
+ worklist.clear()
linearize(start)
}
@@ -82,13 +82,14 @@ trait Linearizers { self: ICodes =>
* Prepend b to the list, if not already scheduled.
* TODO: use better test than linear search
*/
- def add(b: BasicBlock) =
+ def add(b: BasicBlock) {
if (blocks.contains(b))
()
else {
blocks = b :: blocks;
worklist push b;
}
+ }
def add(bs: List[BasicBlock]): Unit = bs foreach add;
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala
index 104a6267ff..848cd657e7 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala
@@ -7,8 +7,7 @@
package scala.tools.nsc.backend.icode.analysis
-import scala.collection.jcl.{HashMap, Set, HashSet, LinkedHashSet}
-import scala.collection.mutable.Map
+import scala.collection.mutable.{Map, HashMap, Set, HashSet, LinkedHashSet}
/** A generic framework for data flow analysis.
*/
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala
index 00ed5279db..e11a22fa37 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala
@@ -8,7 +8,7 @@
package scala.tools.nsc.backend.icode.analysis
import scala.collection.immutable.{Set, ListSet, HashSet}
-import scala.collection.jcl.{HashMap, Map}
+import scala.collection.mutable.{HashMap, Map}
/** Compute reaching definitions. We are only interested in reaching
* definitions for local variables, since values on the stack
@@ -132,7 +132,7 @@ abstract class ReachingDefinitions {
var prod = instr.produced
depth = depth + prod
while (prod > 0) {
- stackOut = (new collection.immutable.Set1((b, idx))) :: stackOut
+ stackOut = collection.immutable.Set((b, idx)) :: stackOut
prod = prod - 1
}
}
@@ -184,7 +184,7 @@ abstract class ReachingDefinitions {
var prod = instr.produced
while (prod > 0) {
- stack = (new collection.immutable.Set1((b, idx))) :: stack
+ stack = collection.immutable.Set((b, idx)) :: stack
prod -= 1
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
index 1c0dae7ab8..605c17d0a2 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
@@ -10,7 +10,7 @@ package scala.tools.nsc.backend.jvm
import java.io.{DataOutputStream, File, OutputStream}
import java.nio.ByteBuffer
-import scala.collection.immutable.{Set, ListSet}
+import scala.collection.immutable.Set
import scala.collection.mutable.{Map, HashMap, HashSet}
import scala.tools.nsc.io.AbstractFile
import scala.tools.nsc.symtab._
@@ -101,7 +101,7 @@ abstract class GenJVM extends SubComponent {
var jmethod: JMethod = _
// var jcode: JExtendedCode = _
- var innerClasses: Set[Symbol] = ListSet.empty // referenced inner classes
+ var innerClasses: Set[Symbol] = Set.empty // referenced inner classes
val fjbgContext = new FJBGContext(49, 0)
@@ -152,7 +152,7 @@ abstract class GenJVM extends SubComponent {
def genClass(c: IClass) {
clasz = c
- innerClasses = ListSet.empty
+ innerClasses = Set.empty
var parents = c.symbol.info.parents
var ifaces = JClass.NO_INTERFACES
diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
index dbaf548e8f..3847c97a18 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
@@ -57,7 +57,7 @@ abstract class DeadCodeElimination extends SubComponent {
var defs: Map[(BasicBlock, Int), Set[rdef.lattice.Definition]] = HashMap.empty
/** Useful instructions which have not been scanned yet. */
- val worklist: mutable.Set[(BasicBlock, Int)] = new jcl.LinkedHashSet
+ val worklist: mutable.Set[(BasicBlock, Int)] = new mutable.LinkedHashSet
/** what instructions have been marked as useful? */
val useful: mutable.Map[BasicBlock, mutable.BitSet] = new mutable.HashMap
diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
index b90051a6e9..3673e42afb 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
@@ -141,7 +141,7 @@ abstract class Inliners extends SubComponent {
val afterBlock = newBlock;
/** Map from nw.init instructions to their matching NEW call */
- val pending: collection.jcl.Map[Instruction, NEW] = new collection.jcl.HashMap
+ val pending: collection.mutable.Map[Instruction, NEW] = new collection.mutable.HashMap
/** Map an instruction from the callee to one suitable for the caller. */
def map(i: Instruction): Instruction = {
diff --git a/src/compiler/scala/tools/nsc/dependencies/Files.scala b/src/compiler/scala/tools/nsc/dependencies/Files.scala
index 96e9e0f4be..e2bd341c47 100644
--- a/src/compiler/scala/tools/nsc/dependencies/Files.scala
+++ b/src/compiler/scala/tools/nsc/dependencies/Files.scala
@@ -141,7 +141,7 @@ trait Files{
def lastModified = underlying.lastModified
def list : Iterable[File] =
- assertExists.assertDirectory.underlying.listFiles.projection.map(toFile)
+ assertExists.assertDirectory.underlying.listFiles.view.map(toFile)
def / (file : File) : File =
new JFile(assertDirectory.toString,
diff --git a/src/compiler/scala/tools/nsc/doc/DefaultDocDriver.scala b/src/compiler/scala/tools/nsc/doc/DefaultDocDriver.scala
index eec98f5c20..a7d38d8245 100644
--- a/src/compiler/scala/tools/nsc/doc/DefaultDocDriver.scala
+++ b/src/compiler/scala/tools/nsc/doc/DefaultDocDriver.scala
@@ -6,9 +6,9 @@
package scala.tools.nsc.doc
+import scala.collection.mutable
import java.util.zip.ZipFile
-import scala.collection.jcl
import symtab.Flags._
import scala.xml._
@@ -19,7 +19,7 @@ abstract class DefaultDocDriver extends DocDriver with ModelFrames with ModelToX
import global._
import definitions.{AnyClass, AnyRefClass}
- lazy val additions = new jcl.LinkedHashSet[Symbol]
+ lazy val additions = new mutable.LinkedHashSet[Symbol]
lazy val additions0 = new ModelAdditions(global) {
override def addition(sym: global.Symbol) = {
super.addition(sym)
@@ -115,7 +115,7 @@ abstract class DefaultDocDriver extends DocDriver with ModelFrames with ModelToX
new NavigationFrame with Frame { }
new ListClassFrame with Frame {
def classes = for (p <- allClasses; d <- p._2) yield d
- object organized extends jcl.LinkedHashMap[(List[String],Boolean),List[ClassOrObject]] {
+ object organized extends mutable.LinkedHashMap[(List[String],Boolean),List[ClassOrObject]] {
override def default(key : (List[String],Boolean)) = Nil;
classes.foreach(cls => {
val path = cls.path.map(_.name);
@@ -131,14 +131,15 @@ abstract class DefaultDocDriver extends DocDriver with ModelFrames with ModelToX
val path = cls.path.map(_.name)
val key = (cls.path.map(_.name), cls.isInstanceOf[Clazz])
assert(!organized(key).isEmpty);
- (if (!organized(key).tail.isEmpty) Text(" (" +{
+
+ ((if (!organized(key).tail.isEmpty) Text(" (" +{
//Console.println("CONFLICT: " + path + " " + organized(key));
val str = cls.path(0).sym.owner.fullNameString('.');
val idx = str.lastIndexOf('.');
if (idx == -1) str;
else str.substring(idx + 1);
- }+ ")");
- else NodeSeq.Empty) ++ super.optional(cls);
+ }+ ")");
+ else NodeSeq.Empty) ++ super.optional(cls))(NodeSeq.builderFactory)
}
}
@@ -176,7 +177,7 @@ abstract class DefaultDocDriver extends DocDriver with ModelFrames with ModelToX
import DocUtil._
override def classBody(entity: ClassOrObject)(implicit from: Frame): NodeSeq =
- (subClasses.get(entity.sym) match {
+ (((subClasses.get(entity.sym) match {
case Some(symbols) =>
(<dl>
<dt style="margin:10px 0 0 20px;"><b>Direct Known Subclasses:</b></dt>
@@ -186,7 +187,7 @@ abstract class DefaultDocDriver extends DocDriver with ModelFrames with ModelToX
</dl><hr/>);
case None =>
NodeSeq.Empty
- })++super.classBody(entity);
+ }): NodeSeq)++super.classBody(entity))//(NodeSeq.builderFactory)
protected def urlFor(sym: Symbol)(implicit frame: Frame) = frame.urlFor(sym)
@@ -213,7 +214,7 @@ abstract class DefaultDocDriver extends DocDriver with ModelFrames with ModelToX
super.decodeOption(tag,option)
}
- object roots extends jcl.LinkedHashMap[String,String];
+ object roots extends mutable.LinkedHashMap[String,String];
roots("classes") = "http://java.sun.com/j2se/1.5.0/docs/api";
roots("rt") = roots("classes");
private val SCALA_API_ROOT = "http://www.scala-lang.org/docu/files/api/";
@@ -260,19 +261,19 @@ abstract class DefaultDocDriver extends DocDriver with ModelFrames with ModelToX
protected def anchor(entity: Symbol)(implicit frame: Frame): NodeSeq =
(<a name={Text(frame.docName(entity))}></a>)
- object symbols extends jcl.LinkedHashSet[Symbol]
+ object symbols extends mutable.LinkedHashSet[Symbol]
- object allClasses extends jcl.LinkedHashMap[Package, jcl.LinkedHashSet[ClassOrObject]] {
- override def default(pkg: Package): jcl.LinkedHashSet[ClassOrObject] = {
- object ret extends jcl.LinkedHashSet[ClassOrObject]
+ object allClasses extends mutable.LinkedHashMap[Package, mutable.LinkedHashSet[ClassOrObject]] {
+ override def default(pkg: Package): mutable.LinkedHashSet[ClassOrObject] = {
+ object ret extends mutable.LinkedHashSet[ClassOrObject]
this(pkg) = ret
ret
}
}
- object subClasses extends jcl.LinkedHashMap[Symbol, jcl.LinkedHashSet[ClassOrObject]] {
+ object subClasses extends mutable.LinkedHashMap[Symbol, mutable.LinkedHashSet[ClassOrObject]] {
override def default(key: Symbol) = {
- val ret = new jcl.LinkedHashSet[ClassOrObject]
+ val ret = new mutable.LinkedHashSet[ClassOrObject]
this(key) = ret
ret
}
diff --git a/src/compiler/scala/tools/nsc/doc/DocUtil.scala b/src/compiler/scala/tools/nsc/doc/DocUtil.scala
index 3a6f2812dd..c719ad1cc6 100644
--- a/src/compiler/scala/tools/nsc/doc/DocUtil.scala
+++ b/src/compiler/scala/tools/nsc/doc/DocUtil.scala
@@ -92,8 +92,8 @@ object DocUtil {
var ts = ts0
for (t <- ts1.elements) {
if (!ts.contains(t._1))
- ts = ts.update(t._1, new TreeSet[S]);
- ts = ts.update(t._1, merge(ts(t._1), t._2))
+ ts = ts.add(t._1, new TreeSet[S]);
+ ts = ts.add(t._1, merge(ts(t._1), t._2))
}
ts
}
diff --git a/src/compiler/scala/tools/nsc/doc/ModelAdditions.scala b/src/compiler/scala/tools/nsc/doc/ModelAdditions.scala
index 5ea74bd59f..2a95b80b5e 100644
--- a/src/compiler/scala/tools/nsc/doc/ModelAdditions.scala
+++ b/src/compiler/scala/tools/nsc/doc/ModelAdditions.scala
@@ -398,7 +398,8 @@ class ModelAdditions(val global: Global) {
};
//("Float" :: "Long" :: "Number" :: "Integer" :: Nil).foreach(boxedValDescr);
*/
- object exceptions extends collection.jcl.TreeMap[String,(Symbol,String)] {
+ object exceptions extends collection.JavaConversions.JMapWrapper[String,(Symbol,String)](
+ new java.util.TreeMap()) {
def f(name: String) {
this("Predef." + name) = (definitions.PredefModule, name)
}
diff --git a/src/compiler/scala/tools/nsc/doc/ModelExtractor.scala b/src/compiler/scala/tools/nsc/doc/ModelExtractor.scala
index c0acacb616..0e94ab90f7 100644
--- a/src/compiler/scala/tools/nsc/doc/ModelExtractor.scala
+++ b/src/compiler/scala/tools/nsc/doc/ModelExtractor.scala
@@ -6,7 +6,7 @@
package scala.tools.nsc.doc
-import scala.collection.jcl
+import scala.collection.mutable
import compat.Platform.{EOL => LINE_SEPARATOR}
@@ -32,7 +32,7 @@ trait ModelExtractor {
case class Comment(body: String, attributes: List[Tag]) {
def decodeAttributes = {
- val map = new jcl.LinkedHashMap[String, List[(String, String)]] {
+ val map = new mutable.LinkedHashMap[String, List[(String, String)]] {
override def default(key: String) = Nil
}
attributes.foreach(a => {
@@ -221,11 +221,11 @@ trait ModelExtractor {
def path: List[ClassOrObject] = this :: Nil
override def listName = path map (_.name) mkString "."
- object freshParents extends jcl.LinkedHashSet[Type] {
- this addAll sym.tpe.parents
- this.toList foreach (this removeAll _.parents)
+ object freshParents extends mutable.LinkedHashSet[Type] {
+ this ++= sym.tpe.parents
+ this.toList foreach (this --= _.parents)
}
- object constructorArgs extends jcl.LinkedHashMap[Symbol, ValueParam] {
+ object constructorArgs extends mutable.LinkedHashMap[Symbol, ValueParam] {
import symtab.Flags._
sym.constrParamAccessors.filter(arg => ! (arg hasFlag SYNTHETIC)).foreach(arg => {
val str = flagsToString(arg.flags)
@@ -242,7 +242,7 @@ trait ModelExtractor {
this(param) = new ConstructorParam(param)
});
}
- object decls extends jcl.LinkedHashMap[Symbol, Member] {
+ object decls extends mutable.LinkedHashMap[Symbol, Member] {
sym.tpe.decls.elements.foreach(e => {
if (!constructorArgs.contains(e)) {
val m = Member(e)
@@ -250,9 +250,9 @@ trait ModelExtractor {
}
});
}
- def members0(f: Symbol => Boolean) = decls.projection.filterKeys(f).valueSet
+ def members0(f: Symbol => Boolean) = decls.filterKeys(f).values.toList
def members(c: Category): Iterable[Member] = members0(c.f)
- object inherited extends jcl.LinkedHashMap[Symbol, List[Member]]() {
+ object inherited extends mutable.LinkedHashMap[Symbol, List[Member]]() {
override def default(tpe: Symbol) = Nil
for (m <- sym.tpe.members if !sym.tpe.decls.elements.contains(m) &&
(Values.f(m) || Methods.f(m))) {
@@ -281,15 +281,15 @@ trait ModelExtractor {
override def resultType = Some(resultType0)
protected def resultType0: Type
override def overridden: Iterable[Symbol] = {
- var ret: jcl.LinkedHashSet[Symbol] = null
+ var ret: mutable.LinkedHashSet[Symbol] = null
for (parent <- ClassOrObject.this.parents) {
val sym0 = sym.overriddenSymbol(parent.typeSymbol)
if (sym0 != NoSymbol) {
- if (ret == null) ret = new jcl.LinkedHashSet[Symbol];
+ if (ret == null) ret = new mutable.LinkedHashSet[Symbol];
ret += sym0
}
}
- if (ret == null) Nil else ret.readOnly
+ if (ret == null) Nil else ret
}
}
case class Def(override val sym : TermSymbol) extends ValDef(sym) {
@@ -417,7 +417,7 @@ trait ModelExtractor {
"[ \t]*@(exception|param|throws)[ \t]+(\\p{Graph}*)[ \t]*(.*)")
def sort[E <: Entity](entities: Iterable[E]): Iterable[E] = {
- val set = new jcl.TreeSet[E]()({eA: E => new Ordered[E] {
+ val set = new collection.immutable.TreeSet[E]()({eA: E => new Ordered[E] {
def compare(eB: E): Int = {
if (eA eq eB) return 0;
(eA, eB) match {
@@ -443,7 +443,6 @@ trait ModelExtractor {
eA.equals(other) || (other match { case that: AnyRef => this.eq(that)
case _ => false })
}})
- set addAll entities;
- set
+ set ++ entities
}
}
diff --git a/src/compiler/scala/tools/nsc/doc/ModelFrames.scala b/src/compiler/scala/tools/nsc/doc/ModelFrames.scala
index 0c1e51c9cc..8475502737 100644
--- a/src/compiler/scala/tools/nsc/doc/ModelFrames.scala
+++ b/src/compiler/scala/tools/nsc/doc/ModelFrames.scala
@@ -8,7 +8,7 @@ package scala.tools.nsc.doc
import java.io.{File, FileWriter}
import util.NameTransformer
-import scala.collection.jcl
+import scala.collection.mutable
import scala.compat.Platform.{EOL => LINE_SEPARATOR}
import scala.xml.{NodeSeq, Text, Unparsed, Utility}
@@ -228,7 +228,7 @@ trait ModelFrames extends ModelExtractor {
{aref(navPath, contentFrame, navLabel)}
</td></tr>
</table>);
- val ids = new jcl.LinkedHashSet[String]
+ val ids = new mutable.LinkedHashSet[String]
def idFor(kind: Category, t: Entity)(seq : NodeSeq): NodeSeq = {
val ch = t.listName.charAt(0);
val id = kind.plural + "_" + ch;
diff --git a/src/compiler/scala/tools/nsc/doc/ModelToXML.scala b/src/compiler/scala/tools/nsc/doc/ModelToXML.scala
index 5f7137d7a3..b3c1a6ab26 100644
--- a/src/compiler/scala/tools/nsc/doc/ModelToXML.scala
+++ b/src/compiler/scala/tools/nsc/doc/ModelToXML.scala
@@ -257,7 +257,7 @@ trait ModelToXML extends ModelExtractor {
var seq: NodeSeq = NodeSeq.Empty
if (xs.elements.hasNext) {
// alphabetic
- val set = new scala.collection.jcl.TreeSet[entity.Member]()(mA => new Ordered[entity.Member] {
+ val set = new scala.collection.immutable.TreeSet[entity.Member]()(mA => new Ordered[entity.Member] {
def compare(mB: entity.Member): Int =
if (mA eq mB) 0
else {
@@ -273,8 +273,7 @@ trait ModelToXML extends ModelExtractor {
other match { case that: entity.Member => compare(that) == 0
case that: AnyRef => this.eq(that)
case _ => false }
- });
- set addAll xs;
+ })++xs
seq = seq ++ <table cellpadding="3" class="member" summary="">
<tr><td colspan="2" class="title">{Text(category.label + " Summary")}</td></tr>
{set.mkXML("","\n","")(mmbr => shortHeader(mmbr))}
@@ -369,8 +368,10 @@ trait ModelToXML extends ModelExtractor {
if (entity.sym.hasFlag(symtab.Flags.CASE)) NodeSeq.Empty;
else {
val sep = Text("@")
+ val seq = // !!! does it still get confused otherwise?
for (attr <- entity.attributes)
yield Group({(sep ++ attrFor(attr) ++ <br/>)})
+ seq
}
}
}
diff --git a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
index 6a811b77c7..15f712413d 100644
--- a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
+++ b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
@@ -660,7 +660,7 @@ trait ParallelMatching {
}
val indexOfAlternative = pat findIndexOf isAlternative
- val pats: List[Tree] = List.map2(pat, pat.indices)(classifyPat)
+ val pats: List[Tree] = List.map2(pat, pat.indices.toList)(classifyPat)
lazy val (prefix, alts :: suffix) = pats.splitAt(indexOfAlternative)
lazy val alternativeBranches = getAlternativeBranches(alts) map { p => replace(prefix ::: p :: suffix) }
diff --git a/src/compiler/scala/tools/nsc/models/SemanticTokens.scala b/src/compiler/scala/tools/nsc/models/SemanticTokens.scala
index 9a7566cd22..56e80cf5bf 100644
--- a/src/compiler/scala/tools/nsc/models/SemanticTokens.scala
+++ b/src/compiler/scala/tools/nsc/models/SemanticTokens.scala
@@ -199,7 +199,7 @@ class SemanticTokens(val compiler: Global) {
val list = new TokenList
//build(unit.body)
- val map = new scala.collection.jcl.LinkedHashMap[Int,Symbol]
+ val map = new scala.collection.mutable.LinkedHashMap[Int,Symbol]
map.clear // populate the map.
class visitor extends walker.Visitor {
def contains(pos : Position) = map.contains(pos.offset.get)
diff --git a/src/compiler/scala/tools/nsc/plugins/Plugin.scala b/src/compiler/scala/tools/nsc/plugins/Plugin.scala
index bce4876c41..2e09c6e231 100644
--- a/src/compiler/scala/tools/nsc/plugins/Plugin.scala
+++ b/src/compiler/scala/tools/nsc/plugins/Plugin.scala
@@ -146,6 +146,8 @@ object Plugin {
* the compiler it is to be used in.
*/
def instantiate(clazz: AnyClass, global: Global): Plugin = {
+ //println("instantiating "+clazz)
+ //println(clazz.getDeclaredConstructors)
val constructor = clazz.getConstructor(classOf[Global])
constructor.newInstance(global).asInstanceOf[Plugin]
}
diff --git a/src/compiler/scala/tools/nsc/symtab/IdeSupport.scala b/src/compiler/scala/tools/nsc/symtab/IdeSupport.scala
index ad831a8b14..694c8e45b2 100644
--- a/src/compiler/scala/tools/nsc/symtab/IdeSupport.scala
+++ b/src/compiler/scala/tools/nsc/symtab/IdeSupport.scala
@@ -1,7 +1,6 @@
package scala.tools.nsc.symtab
import scala.tools.nsc.util._
-import scala.collection.jcl._
-import scala.collection.jcl
+import scala.collection.mutable._
import scala.tools.nsc.io._
trait IdeSupport extends SymbolTable { // added to global, not analyzers.
@@ -67,8 +66,8 @@ trait IdeSupport extends SymbolTable { // added to global, not analyzers.
this(what) = set; set
}
}
- private val emptySet = new jcl.LinkedList[Symbol]
- val reuseMap = new LinkedHashMap[PersistentScope,jcl.LinkedList[Symbol]] {
+ private val emptySet = new ListBuffer[Symbol]
+ val reuseMap = new LinkedHashMap[PersistentScope,ListBuffer[Symbol]] {
override def default(key : PersistentScope) = emptySet
}
def reuse(scope : PersistentScope, sym : Symbol) = {
@@ -85,9 +84,9 @@ trait IdeSupport extends SymbolTable { // added to global, not analyzers.
if (e != null && e.sym == sym) {
val list = reuseMap.get(scope) match {
- case Some(list) => list
- case None =>
- val list = new jcl.LinkedList[Symbol]
+ case Some(list) => list
+ case None =>
+ val list = new ListBuffer[Symbol]
reuseMap(scope) = list; list
}
check(!sym.isPackage, "" +sym)
@@ -135,12 +134,12 @@ trait IdeSupport extends SymbolTable { // added to global, not analyzers.
}
private def reuse(scope : PersistentScope) : PersistentScope = {
if (currentClient.makeNoChanges) return scope
- val buf = new jcl.LinkedList[Symbol]
+ val buf = new ListBuffer[Symbol]
scope.toList.foreach{sym =>
if (false && sym.hasFlag(Flags.CASE) && sym.hasFlag(Flags.SYNTHETIC)) {
check(sym != null, "")
} else {
- buf add sym
+ buf += sym
scope unlink sym
}
}
@@ -155,25 +154,28 @@ trait IdeSupport extends SymbolTable { // added to global, not analyzers.
}
def reloadSource(file : AbstractFile) = {
-
- if (!currentClient.makeNoChanges) topDefs.removeKey(file) match {
- case None =>
- case Some(symbols) => symbols.foreach{sym =>
- def f(sym : Symbol) = sym.owner.info.decls match {
- case scope : PersistentScope => reuse(scope, (sym))
- case scope =>
- check(false, scope + " is not persistent")
- }
- if (sym.isModuleClass) {
- if (check(sym.name.isTypeName,"") && sym.hasRawInfo)
- if (sym.linkedModuleOfClass != NoSymbol) f(sym.linkedModuleOfClass)
- } else {
- if (check(sym.name.isTypeName, ""))
- f(sym)
+ if (!currentClient.makeNoChanges)
+ topDefs removeKey file match {
+ case None => ;
+ case Some(symbols) =>
+ symbols.foreach{
+ sym =>
+ def f(sym : Symbol) = sym.owner.info.decls match {
+ case scope : PersistentScope => reuse(scope, (sym))
+ case scope =>
+ check(false, scope + " is not persistent")
+ }
+ if (sym.isModuleClass) {
+ if (check(sym.name.isTypeName,"") && sym.hasRawInfo)
+ if (sym.linkedModuleOfClass != NoSymbol) f(sym.linkedModuleOfClass)
+ } else {
+ if (check(sym.name.isTypeName, ""))
+ f(sym)
+ }
+ }
}
- }
- }
}
+
override def attachSource(clazz : ClassSymbol, file : io.AbstractFile) = {
topDefs(file) += clazz
super.attachSource(clazz, file)
@@ -234,7 +236,7 @@ trait IdeSupport extends SymbolTable { // added to global, not analyzers.
va.isEmpty && vb.isEmpty
case (newS:Scope,oldS:Scope) =>
val set = new LinkedHashSet[Symbol]
- set addAll newS.toList
+ set ++= newS.toList
oldS.toList.forall{oldS => if (!set.remove(oldS)) {
var other = newS.lookupEntry(oldS.name)
while (other != null && !compareTypes(other.sym.info,oldType(oldS), syms))
@@ -382,38 +384,36 @@ trait IdeSupport extends SymbolTable { // added to global, not analyzers.
if (symbol == NoSymbol) return symbol
// catch double defs.
record(currentClient, symbol.name)
- val i = reuseMap(this).elements
- while (i.hasNext) {
- var existing = i.next
- if (existing == symbol) return {
- i.remove
- finish(existing)
- }
- else if ({
- if (existing.hasFlag(symtab.Flags.SYNTHETIC) && existing.name == symbol.name) true
- else (symbol.pos,existing.pos) match {
+ // Martin: I changed rest of methods to avoid Iterator.remove
+ val buf = reuseMap(this)
+ if (buf contains symbol) {
+ buf -= symbol
+ finish(symbol)
+ } else buf find { existing =>
+ if (existing.hasFlag(symtab.Flags.SYNTHETIC) && existing.name == symbol.name) true
+ else (symbol.pos,existing.pos) match {
case (apos : TrackedPosition, bpos : TrackedPosition) => apos == bpos
case (apos : OffsetPosition , bpos : OffsetPosition) => apos == bpos
case _ => existing.name == symbol.name
- }
- }) {
+ }
+ } match {
+ case Some(existing) =>
if (check(existing != NoSymbol,"")) {
val oldName = existing.name
compatible(existing, symbol) match {
case NotCompatible =>
case code@GoResult(existing0) =>
- i.remove
- existing = existing0
+ buf -= existing
if (code.isInstanceOf[Updated]) {
invalidate(oldName)
- invalidate(existing.name)
+ invalidate(existing0.name)
}
- return (reuse(existing))
+ return (reuse(existing0))
}
}
- }
+ case None =>
}
invalidate(symbol.name)
return finish(symbol)
@@ -553,7 +553,6 @@ trait IdeSupport extends SymbolTable { // added to global, not analyzers.
object owner extends ReallyHasClients
new PersistentScope(null, owner)
}
- import scala.collection.jcl
override def newPackageScope(depends0 : PackageScopeDependMap) : PackageScope = {
object owner extends ReallyHasClients
object myPackageScope extends PersistentScope(null, owner) with PackageScope {
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
index 17cc7dab44..7e04e9c5fe 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
@@ -296,7 +296,6 @@ abstract class SymbolLoaders {
}
// IDE hook.
protected def completeClassfile(root : Symbol, loader : ClassfileLoader)(f : => Unit) : Unit = f
- import scala.collection.jcl
// incremental builder hook
protected def computeDepends(loader : PackageLoader) : PackageScopeDependMap = {
null
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolWalker.scala b/src/compiler/scala/tools/nsc/symtab/SymbolWalker.scala
index bdd06d836c..265ae9d826 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolWalker.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolWalker.scala
@@ -4,7 +4,7 @@ trait SymbolWalker {
val global : Global
import scala.tools.nsc.util._
import global._
- import scala.collection.jcl._
+ import scala.collection.mutable.LinkedHashSet
trait Visitor {
def update(pos : Position, sym : Symbol) : Unit
def contains(pos : Position) : Boolean
diff --git a/src/compiler/scala/tools/nsc/symtab/Symbols.scala b/src/compiler/scala/tools/nsc/symtab/Symbols.scala
index 6cae7d5835..64f108b644 100644
--- a/src/compiler/scala/tools/nsc/symtab/Symbols.scala
+++ b/src/compiler/scala/tools/nsc/symtab/Symbols.scala
@@ -342,15 +342,18 @@ trait Symbols {
final def isRefinementClass = isClass && name == nme.REFINE_CLASS_NAME.toTypeName; // no lifting for refinement classes
final def isModuleClass = isClass && hasFlag(MODULE)
final def isPackageClass = isClass && hasFlag(PACKAGE)
+ final def isPackageObject = isModule && name == nme.PACKAGEkw && owner.isPackageClass
final def isPackageObjectClass = isModuleClass && name.toTermName == nme.PACKAGEkw && owner.isPackageClass
final def definedInPackage = owner.isPackageClass || owner.isPackageObjectClass
final def isRoot = isPackageClass && name == nme.ROOT.toTypeName
final def isRootPackage = isPackage && name == nme.ROOTPKG
final def isEmptyPackage = isPackage && name == nme.EMPTY_PACKAGE_NAME
final def isEmptyPackageClass = isPackageClass && name == nme.EMPTY_PACKAGE_NAME.toTypeName
- final def isPredefModule = isModule && name == nme.Predef // not printed as a prefix
- final def isScalaPackage = isPackage && name == nme.scala_ // not printed as a prefix
- final def isScalaPackageClass = isPackageClass && name == nme.scala_.toTypeName // not printed as a prefix
+ final def isPredefModule = isModule && name == nme.Predef && owner.isScalaPackageClass // not printed as a prefix
+ final def isScalaPackage = isPackage && name == nme.scala_ && owner.isRoot || // not printed as a prefix
+ isPackageObject && owner.isScalaPackageClass
+ final def isScalaPackageClass: Boolean = isPackageClass && owner.isRoot && name == nme.scala_.toTypeName ||
+ isPackageObjectClass && owner.isScalaPackageClass // not printed as a prefix
/** Is symbol a monomophic type?
* assumption: if a type starts out as monomorphic, it will not acquire
diff --git a/src/compiler/scala/tools/nsc/symtab/Types.scala b/src/compiler/scala/tools/nsc/symtab/Types.scala
index 9aa29a7395..84d34f62c4 100644
--- a/src/compiler/scala/tools/nsc/symtab/Types.scala
+++ b/src/compiler/scala/tools/nsc/symtab/Types.scala
@@ -7,11 +7,10 @@
package scala.tools.nsc.symtab
import scala.collection.immutable
-import scala.collection.mutable.{ListBuffer, HashMap}
+import scala.collection.mutable.{ListBuffer, HashMap, WeakHashMap}
import scala.compat.Platform.currentTime
import scala.tools.nsc.ast.TreeGen
import scala.tools.nsc.util.{HashSet, Position, NoPosition}
-import scala.collection.jcl.WeakHashMap
import Flags._
/* A standard type pattern match:
@@ -1587,8 +1586,15 @@ A type's typeSymbol should never be inspected directly.
if (normed ne this) return normed.toString
}
}
- var str = (pre.prefixString + sym.nameString +
- (if (args.isEmpty) "" else args.mkString("[", ",", "]")))
+ val monopart =
+ if (!settings.debug.value &&
+ (shorthands contains sym.fullNameString) &&
+ (sym.ownerChain forall (_.isClass))) // ensure that symbol is not a local copy with a name coincidence
+ sym.name.toString
+ else
+ pre.prefixString + sym.nameString
+
+ var str = monopart + (if (args.isEmpty) "" else args.mkString("[", ",", "]"))
//if (sym.nameString startsWith "moduleType")
// str += ("_in_"+sym.ownerChain)
if (sym.isPackageClass)
@@ -4468,4 +4474,14 @@ A type's typeSymbol should never be inspected directly.
def objToAny(tp: Type): Type =
if (!phase.erasedTypes && tp.typeSymbol == ObjectClass) AnyClass.tpe
else tp
+
+ val shorthands = Set(
+ "scala.collection.immutable.List",
+ "scala.collection.immutable.Nil",
+ "scala.collection.Sequence",
+ "scala.collection.Traversible",
+ "scala.collection.Iterable",
+ "scala.collection.mutable.StringBuilder",
+ "scala.collection.Vector",
+ "scala.collection.Iterator")
}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
index 24ba34c572..c17a754d31 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
@@ -460,7 +460,7 @@ abstract class ICodeReader extends ClassfileParser {
size += 8
assert(low <= high, "Value low not <= high for tableswitch.")
- val tags = List.tabulate(high - low + 1, n => List(low + n))
+ val tags = List.tabulate(high - low + 1)(n => List(low + n))
val targets = for (_ <- tags) yield parseJumpTargetW
code.emit(LSWITCH(tags, targets ::: List(default)))
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
index eb23571053..2b37a28853 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
@@ -66,7 +66,7 @@ abstract class Pickler extends SubComponent {
private class Pickle(root: Symbol, rootName: Name, rootOwner: Symbol)
extends PickleBuffer(new Array[Byte](4096), -1, 0) {
- import scala.collection.jcl.LinkedHashMap
+ import scala.collection.mutable.LinkedHashMap
private var entries = new Array[AnyRef](256)
private var ep = 0
private val index = new LinkedHashMap[AnyRef, Int]
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/UnPickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/UnPickler.scala
index 2c0a10ecc8..490db843e5 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/UnPickler.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/UnPickler.scala
@@ -68,7 +68,7 @@ abstract class UnPickler {
private def checkVersion() {
val major = readNat()
val minor = readNat()
- if (major != MajorVersion || minor > MinorVersion)
+ if (major < 4 /*!= MajorVersion*/ || minor > MinorVersion) // !!! temporarily accept 4 as version.
throw new IOException("Scala signature " + classRoot.name +
" has wrong version\n expected: " +
MajorVersion + "." + MinorVersion +
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index 3ff464bf97..0d4078dc38 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -640,7 +640,7 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer {
qual1 = box(qual1);
else if (!isValueType(qual1.tpe.typeSymbol) && isUnboxedValueMember(tree.symbol))
qual1 = unbox(qual1, tree.symbol.owner.tpe)
- else if (tree.symbol.owner == ArrayClass && qual1.tpe.typeSymbol == ObjectClass)
+ else if (tree.symbol.owner == ArrayClass && (BoxedArrayClass isSubClass qual1.tpe.typeSymbol))
qual1 = cast(qual1, BoxedArrayClass.tpe)
if (isUnboxedClass(tree.symbol.owner) && !isUnboxedClass(qual1.tpe.typeSymbol))
diff --git a/src/compiler/scala/tools/nsc/transform/LiftCode.scala b/src/compiler/scala/tools/nsc/transform/LiftCode.scala
index 0e6504725d..b17a0efea2 100644
--- a/src/compiler/scala/tools/nsc/transform/LiftCode.scala
+++ b/src/compiler/scala/tools/nsc/transform/LiftCode.scala
@@ -64,7 +64,7 @@ abstract class LiftCode extends Transform with Reifiers {
}
def objectName(value: Any): String = value match {
- case Nil => "scala.Nil"
+ case Nil => "scala.collection.immutable.Nil"
case reflect.NoSymbol => "scala.reflect.NoSymbol"
case reflect.RootSymbol => "scala.reflect.RootSymbol"
case reflect.NoPrefix => "scala.reflect.NoPrefix"
diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
index d2fd0bec42..b26d3aab39 100644
--- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala
+++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
@@ -326,7 +326,7 @@ abstract class TailCalls extends Transform
ctx.accessed = true
//println("fun: " + fun + " args: " + args)
val t = atPos(fun.pos)(Apply(Ident(ctx.label), args))
- //println(t)
+ // println("TAIL: "+t)
typed(t)
}
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index e793fb463a..ea31126324 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -66,7 +66,7 @@ abstract class UnCurry extends InfoTransform with TypingTransformers {
case TypeRef(pre, sym, List(arg)) if (sym == ByNameParamClass) =>
apply(functionType(List(), arg))
case TypeRef(pre, sym, args) if (sym == RepeatedParamClass) =>
- apply(rawTypeRef(pre, SeqClass, args))
+ apply(appliedType(SeqClass.typeConstructor, args))
case _ =>
expandAlias(mapOver(tp))
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/IdeSupport.scala b/src/compiler/scala/tools/nsc/typechecker/IdeSupport.scala
index 08bba9f6e1..c933b281cb 100644
--- a/src/compiler/scala/tools/nsc/typechecker/IdeSupport.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/IdeSupport.scala
@@ -1,15 +1,15 @@
package scala.tools.nsc.typechecker;
-import scala.collection.jcl.WeakHashMap
+import scala.collection.mutable.{WeakHashMap, LinkedHashSet}
trait IdeSupport extends Analyzer {
val global : Global with symtab.IdeSupport
import global._
private class ContextInternMap extends WeakHashMap[Context,ref.WeakReference[Context]] {
- var last : Context = _
+ var lastContext : Context = _
override def default(txt : Context) : ref.WeakReference[Context] = {
if (txt eq NoContext) new ref.WeakReference(NoContext)
val txt0 = txt.intern0
- last = txt0 // to prevent collection
+ lastContext = txt0 // to prevent collection
val ret = new ref.WeakReference(txt0)
this(txt0) = ret
ret
@@ -132,7 +132,7 @@ trait IdeSupport extends Analyzer {
}
}
}
- private val toComplete = new scala.collection.jcl.LinkedHashSet[Symbol]
+ private val toComplete = new LinkedHashSet[Symbol]
def finishTyping = while (!toComplete.isEmpty) {
toComplete.toList.foreach(sym => if (sym.pos match {
case pos : TrackedPosition if !pos.isValid => toComplete.remove(sym); false
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index 9bc6968e02..6ca93116d6 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -494,7 +494,7 @@ self: Analyzer =>
* - the parts of its base types
*/
private def parts(tp: Type): List[Type] = {
- val partMap = new collection.jcl.LinkedHashMap[Symbol, List[Type]]
+ val partMap = new collection.mutable.LinkedHashMap[Symbol, List[Type]]
/** Add a new type to partMap, unless a subtype of it with the same
* type symbol exists already.
*/
@@ -641,8 +641,9 @@ self: Analyzer =>
val resultTree = implicitManifest(pt)
if (resultTree != EmptyTree) result = new SearchResult(resultTree, EmptyTreeTypeSubstituter)
}
- if (result == SearchFailure && settings.verbose.value) //!!!
+ if (result == SearchFailure && settings.debug.value)
println("no implicits found for "+pt+" "+pt.typeSymbol.info.baseClasses+" "+parts(pt)+implicitsOfExpectedType)
+
if (util.Statistics.enabled) impltime += (currentTime - startTime)
result
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index fb018647ff..71d5f5b9c5 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -871,8 +871,8 @@ trait Infer {
val _arityMismatches = new ListBuffer[(Symbol, Symbol)]
val _varianceMismatches = new ListBuffer[(Symbol, Symbol)]
val _stricterBounds = new ListBuffer[(Symbol, Symbol)]
- def varianceMismatch(a: Symbol, p: Symbol) { _varianceMismatches += (a, p) }
- def stricterBound(a: Symbol, p: Symbol) { _stricterBounds += (a, p) }
+ def varianceMismatch(a: Symbol, p: Symbol) { _varianceMismatches += ((a, p)) }
+ def stricterBound(a: Symbol, p: Symbol) { _stricterBounds += ((a, p)) }
def arityMismatches(as: Iterable[(Symbol, Symbol)]) { _arityMismatches ++= as }
def varianceMismatches(as: Iterable[(Symbol, Symbol)]) { _varianceMismatches ++= as }
def stricterBounds(as: Iterable[(Symbol, Symbol)]) { _stricterBounds ++= as }
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 41b6c61d74..71ec6d417d 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -581,8 +581,7 @@ trait Typers { self: Analyzer =>
private def isInPackageObject(sym: Symbol, pkg: Symbol) =
pkg.isPackageClass &&
- sym.owner.isModuleClass &&
- sym.owner.name.toTermName == nme.PACKAGEkw &&
+ sym.owner.isPackageObjectClass &&
sym.owner.owner == pkg
/** Post-process an identifier or selection node, performing the following:
diff --git a/src/compiler/scala/tools/nsc/util/CharArrayReader1.scala b/src/compiler/scala/tools/nsc/util/CharArrayReader1.scala
new file mode 100755
index 0000000000..6d5843b0ec
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/util/CharArrayReader1.scala
@@ -0,0 +1,107 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2009 LAMP/EPFL
+ * @author Martin Odersky
+ */
+// $Id: NewCharArrayReader.scala 16893 2009-01-13 13:09:22Z cunei $
+
+package scala.tools.nsc.util
+
+import scala.tools.nsc.util.SourceFile.{LF, FF, CR, SU}
+
+abstract class CharArrayReader1 { self =>
+
+ val buf: Array[Char]
+
+ def decodeUni: Boolean = true
+
+ /** An error routine to call on bad unicode escapes \\uxxxx. */
+ protected def error(offset: Int, msg: String)
+
+ /** the last read character */
+ var ch: Char = _
+
+ /** The offset one past the last read character */
+ var charOffset: Int = 0
+
+ /** The start offset of the current line */
+ var lineStartOffset: Int = 0
+
+ /** The start offset of the line before the current one */
+ var lastLineStartOffset: Int = 0
+
+ private var lastUnicodeOffset = -1
+
+ /** Is last character a unicode escape \\uxxxx? */
+ def isUnicodeEscape = charOffset == lastUnicodeOffset
+
+ /** Advance one character */
+ final def nextChar() {
+ if (charOffset >= buf.length) {
+ ch = SU
+ } else {
+ val c = buf(charOffset)
+ ch = c
+ charOffset += 1
+ if (c == '\\') potentialUnicode()
+ else if (c < ' ') potentialLineEnd()
+// print("`"+ch+"'")
+ }
+ }
+
+ /** Interpret \\uxxxx escapes */
+ private def potentialUnicode() {
+ def evenSlashPrefix: Boolean = {
+ var p = charOffset - 2
+ while (p >= 0 && buf(p) == '\\') p -= 1
+ (charOffset - p) % 2 == 0
+ }
+ def udigit: Int = {
+ val d = digit2int(buf(charOffset), 16)
+ if (d >= 0) charOffset += 1
+ else error(charOffset, "error in unicode escape")
+ d
+ }
+ if (charOffset < buf.length && buf(charOffset) == 'u' && decodeUni && evenSlashPrefix) {
+ do charOffset += 1
+ while (charOffset < buf.length && buf(charOffset) == 'u')
+ val code = udigit << 12 | udigit << 8 | udigit << 4 | udigit
+ lastUnicodeOffset = charOffset
+ ch = code.toChar
+ }
+ }
+
+ /** Handle line ends, replace CR+LF by LF */
+ private def potentialLineEnd() {
+ if (ch == CR)
+ if (charOffset < buf.length && buf(charOffset) == LF) {
+ charOffset += 1
+ ch = LF
+ }
+ if (ch == LF || ch == FF) {
+ lastLineStartOffset = lineStartOffset
+ lineStartOffset = charOffset
+ }
+ }
+
+ /** Convert a character digit to an Int according to given base,
+ * -1 if no success */
+ def digit2int(ch: Char, base: Int): Int = {
+ if ('0' <= ch && ch <= '9' && ch < '0' + base)
+ ch - '0'
+ else if ('A' <= ch && ch < 'A' + base - 10)
+ ch - 'A' + 10
+ else if ('a' <= ch && ch < 'a' + base - 10)
+ ch - 'a' + 10
+ else
+ -1
+ }
+
+ /** A new reader that takes off at the current character position */
+ def lookaheadReader = new CharArrayReader1 {
+ val buf = self.buf
+ charOffset = self.charOffset
+ ch = self.ch
+ override def decodeUni = self.decodeUni
+ def error(offset: Int, msg: String) = self.error(offset, msg)
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/util/NewCharArrayReader.scala b/src/compiler/scala/tools/nsc/util/NewCharArrayReader.scala
index 3dfb84d581..9bec95297f 100644
--- a/src/compiler/scala/tools/nsc/util/NewCharArrayReader.scala
+++ b/src/compiler/scala/tools/nsc/util/NewCharArrayReader.scala
@@ -34,7 +34,7 @@ class NewCharArrayReader(val buf: RandomAccessSeq[Char], // should not change
var ch = buf(idx)
idx = idx + 1
ch match {
- case CR if buf.safeIs(idx + 1, LF) =>
+ case CR if (idx + 1 < length && buf(idx + 1) == LF) =>
idx += 1; ch = LF
case LF | FF =>
case '\\' =>
diff --git a/src/compiler/scala/tools/util/AbstractTimer.scala b/src/compiler/scala/tools/util/AbstractTimer.scala
index e92987030c..ab86533ba1 100644
--- a/src/compiler/scala/tools/util/AbstractTimer.scala
+++ b/src/compiler/scala/tools/util/AbstractTimer.scala
@@ -36,7 +36,7 @@ abstract class AbstractTimer {
/** Starts a new timer. */
def start() {
- starts += currentTime
+ starts push currentTime
}
/** Ends the current timer. */