summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authormichelou <michelou@epfl.ch>2007-04-23 11:59:28 +0000
committermichelou <michelou@epfl.ch>2007-04-23 11:59:28 +0000
commita9cc141f197da8741b395c4e2321c4f8b15e3c0c (patch)
tree24b0a21c63e7fef334f3d67cd7c044d1df86f5c0 /src
parent97f23516de046b12a5ee4ed1ac91d656d199418c (diff)
downloadscala-a9cc141f197da8741b395c4e2321c4f8b15e3c0c.tar.gz
scala-a9cc141f197da8741b395c4e2321c4f8b15e3c0c.tar.bz2
scala-a9cc141f197da8741b395c4e2321c4f8b15e3c0c.zip
updated self aliases, for-comprehension, += ops
Diffstat (limited to 'src')
-rw-r--r--src/compiler/scala/tools/nsc/CompileClient.scala1
-rw-r--r--src/compiler/scala/tools/nsc/MainTokenMetric.scala10
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala26
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeGen.scala15
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeInfo.scala8
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreePrinters.scala21
-rw-r--r--src/compiler/scala/tools/nsc/ast/Trees.scala6
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala24
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala53
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Scanners.scala124
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala14
-rw-r--r--src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala12
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala36
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/GenICode.scala79
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala73
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/Inliners.scala61
-rw-r--r--src/compiler/scala/tools/nsc/doc/DocDriver.scala89
-rw-r--r--src/compiler/scala/tools/nsc/doc/DocGenerator.scala6
-rw-r--r--src/compiler/scala/tools/nsc/doc/DocUtil.scala4
-rw-r--r--src/compiler/scala/tools/nsc/doc/ModelExtractor.scala64
-rw-r--r--src/compiler/scala/tools/nsc/doc/ModelFrames.scala36
-rw-r--r--src/compiler/scala/tools/nsc/doc/ModelToXML.scala18
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala60
-rw-r--r--src/compiler/scala/tools/nsc/symtab/Types.scala40
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala73
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala183
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/PickleBuffer.scala20
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala26
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/UnPickler.scala19
-rw-r--r--src/compiler/scala/tools/nsc/transform/UnCurry.scala6
-rw-r--r--src/compiler/scala/tools/nsc/util/NameTransformer.scala12
-rw-r--r--src/compiler/scala/tools/nsc/util/SourceFile.scala26
32 files changed, 621 insertions, 624 deletions
diff --git a/src/compiler/scala/tools/nsc/CompileClient.scala b/src/compiler/scala/tools/nsc/CompileClient.scala
index 7c3a664641..32023cccc9 100644
--- a/src/compiler/scala/tools/nsc/CompileClient.scala
+++ b/src/compiler/scala/tools/nsc/CompileClient.scala
@@ -8,7 +8,6 @@ package scala.tools.nsc
import java.io.{BufferedReader, File, InputStreamReader, PrintWriter}
-import scala.compat.StringBuilder
import scala.tools.util.StringOps
/** The main class for NSC, a compiler for the programming
diff --git a/src/compiler/scala/tools/nsc/MainTokenMetric.scala b/src/compiler/scala/tools/nsc/MainTokenMetric.scala
index e936dd85e9..5444aac3fd 100644
--- a/src/compiler/scala/tools/nsc/MainTokenMetric.scala
+++ b/src/compiler/scala/tools/nsc/MainTokenMetric.scala
@@ -7,7 +7,7 @@
package scala.tools.nsc
import compat.Math.log
-import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
+import scala.tools.nsc.reporters.ConsoleReporter
/** The main class for NSC, a compiler for the programming
* language Scala.
@@ -21,10 +21,10 @@ object MainTokenMetric {
import compiler.syntaxAnalyzer.UnitScanner
import ast.parser.Tokens.EOF
var totale = 0
- for (val source <- fnames) {
+ for (source <- fnames) {
val s = new UnitScanner(new CompilationUnit(compiler.getSourceFile(source)))
var i = 0
- while(s.token != EOF) {
+ while (s.token != EOF) {
i += 1
s.nextToken
}
@@ -35,7 +35,7 @@ object MainTokenMetric {
Console.print(i.toString())
Console.print(" ")
Console.println(source.toString())
- totale = totale + i
+ totale += i
}
Console.println(totale.toString()+" total")
}
@@ -55,7 +55,7 @@ object MainTokenMetric {
}
}
- def main(args: Array[String]): unit = {
+ def main(args: Array[String]) {
process(args)
exit(if (reporter.hasErrors) 1 else 0)
}
diff --git a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
index fc5131358e..8ab01e697a 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
@@ -1,28 +1,22 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2006 LAMP/EPFL
+ * Copyright 2005-2007 LAMP/EPFL
* @author Martin Odersky
*/
// $Id$
package scala.tools.nsc.ast
-import compat.StringBuilder
-import scala.concurrent.Lock
-import symtab.Flags._
-
-import java.lang.Math
-import java.util.HashMap
+import java.awt.{List => awtList, _}
+import java.awt.event._
import java.io.StringWriter
-import javax.swing.tree._
-import javax.swing.event.TreeModelListener
import javax.swing._
+import javax.swing.event.TreeModelListener
+import javax.swing.tree._
-import java.awt.BorderLayout
-import java.awt.{List => awtList, _}
-import java.awt.event._
-
+import scala.concurrent.Lock
import scala.text._
+import symtab.Flags._
/**
* Tree browsers can show the AST in a graphical and interactive
@@ -77,7 +71,7 @@ abstract class TreeBrowsers {
val phase: Phase = globalPhase
var unitList: List[UnitTree] = Nil
- for (val i <- units)
+ for (i <- units)
unitList = UnitTree(i) :: unitList
val tm = new ASTTreeModel(ProgramTree(unitList))
@@ -230,8 +224,8 @@ abstract class TreeBrowsers {
str.append(buf.toString())
str.append("\nSymbol tpe: ")
if (t.symbol ne null) {
- str.append(t.symbol.tpe).append("\n");
- buf = new StringWriter();
+ str.append(t.symbol.tpe).append("\n")
+ buf = new StringWriter()
TypePrinter.toDocument(t.symbol.tpe).format(getWidth() / getColumnWidth(), buf)
str.append(buf.toString())
}
diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
index 510e07c0cf..ff3b2a21d6 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
@@ -1,14 +1,13 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2006 LAMP/EPFL
+ * Copyright 2005-2007 LAMP/EPFL
* @author Martin Odersky
*/
// $Id$
package scala.tools.nsc.ast
-import scala.tools.nsc.util.Position
-import symtab.Flags._
import scala.collection.mutable.ListBuffer
+import symtab.Flags._
abstract class TreeGen {
@@ -38,7 +37,7 @@ abstract class TreeGen {
if (sym.isRoot) {
mkAttributedThis(sym)
} else if (sym.isModuleClass) {
- val qual = mkAttributedRef(pre, sym.sourceModule);
+ val qual = mkAttributedRef(pre, sym.sourceModule)
qual.tpe match {
case MethodType(List(), restpe) =>
Apply(qual, List()) setType restpe
@@ -193,8 +192,8 @@ abstract class TreeGen {
.setInfo(accessor.tpe.finalResultType)
.setFlag(MODULEVAR);
if (mvar.owner.isClass) {
- mvar setFlag (PRIVATE | LOCAL | SYNTHETIC);
- mvar.owner.info.decls.enter(mvar);
+ mvar setFlag (PRIVATE | LOCAL | SYNTHETIC)
+ mvar.owner.info.decls.enter(mvar)
}
ValDef(mvar, if (mvar.owner.isClass) EmptyTree else Literal(Constant(null)))
}
@@ -204,7 +203,7 @@ abstract class TreeGen {
DefDef(accessor, vparamss =>
mkCached(mvar,
New(TypeTree(mvar.tpe),
- List(for (val pt <- mvar.tpe.symbol.primaryConstructor.info.paramTypes)
+ List(for (pt <- mvar.tpe.symbol.primaryConstructor.info.paramTypes)
yield This(accessor.owner.enclClass)))))
// def m: T;
@@ -228,7 +227,7 @@ abstract class TreeGen {
def evalOnceAll(exprs: List[Tree], owner: Symbol, unit: CompilationUnit)(within: (List[() => Tree]) => Tree): Tree = {
val vdefs = new ListBuffer[ValDef]
val exprs1 = new ListBuffer[() => Tree]
- for (val expr <- exprs) {
+ for (expr <- exprs) {
if (treeInfo.isPureExpr(expr)) {
exprs1 += (() => expr)
} else {
diff --git a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
index 66dc60fe45..dd567af5fe 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2006 LAMP/EPFL
+ * Copyright 2005-2007 LAMP/EPFL
* @author Martin Odersky
*/
// $Id$
@@ -7,7 +7,7 @@
package scala.tools.nsc.ast
import symtab.Flags._
-import util.{Set, HashSet}
+import util.HashSet
/** This class ...
*
@@ -151,7 +151,7 @@ abstract class TreeInfo {
/** The value definitions marked PRESUPER in this statement sequence */
def preSuperFields(stats: List[Tree]): List[ValDef] =
- for (val vdef @ ValDef(mods, _, _, _) <- stats; mods hasFlag PRESUPER) yield vdef
+ for (vdef @ ValDef(mods, _, _, _) <- stats if mods hasFlag PRESUPER) yield vdef
def isPreSuper(tree: Tree) = tree match {
case ValDef(mods, _, _, _) => mods hasFlag PRESUPER
@@ -161,7 +161,7 @@ abstract class TreeInfo {
/** Is name a left-associative operator? */
def isLeftAssoc(operator: Name): boolean =
- operator.length > 0 && operator(operator.length - 1) != ':';
+ operator.length > 0 && operator(operator.length - 1) != ':'
private val reserved = new HashSet[Name]
reserved addEntry nme.false_
diff --git a/src/compiler/scala/tools/nsc/ast/TreePrinters.scala b/src/compiler/scala/tools/nsc/ast/TreePrinters.scala
index c32728ac81..1ae4120bb2 100644
--- a/src/compiler/scala/tools/nsc/ast/TreePrinters.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreePrinters.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2006 LAMP/EPFL
+ * Copyright 2005-2007 LAMP/EPFL
* @author Martin Odersky
*/
// $Id: TreePrinters.scala 9925 2007-02-07 18:30:46 +0000 (Wed, 07 Feb 2007) odersky $
@@ -7,7 +7,6 @@
package scala.tools.nsc.ast
import compat.Platform.{EOL => LINE_SEPARATOR}
-import compat.StringBuilder
import java.io.{OutputStream, PrintWriter, Writer}
import symtab.Flags._
@@ -23,13 +22,13 @@ abstract class TreePrinters {
def flush = out.flush()
- def indent = indentMargin = indentMargin + indentStep
- def undent = indentMargin = indentMargin - indentStep
+ def indent = indentMargin += indentStep
+ def undent = indentMargin -= indentStep
def println = {
out.println()
while (indentMargin > indentString.length())
- indentString = indentString + indentString
+ indentString += indentString
if (indentMargin > 0)
out.write(indentString, 0, indentMargin)
}
@@ -195,7 +194,7 @@ abstract class TreePrinters {
if (!args.isEmpty)
printRow(args, "(", ",", ")")
if (!elements.isEmpty)
- print((for (val Assign(name, value) <- elements) yield "val " + name + " = " + value).
+ print((for (Assign(name, value) <- elements) yield "val " + name + " = " + value).
mkString("{", ",", "}"))
case Template(parents, body) =>
@@ -308,7 +307,7 @@ abstract class TreePrinters {
if (!args.isEmpty)
printRow(args, "(", ",", ")")
if (!elements.isEmpty)
- print((for (val Assign(name, value) <- elements) yield "val " + name + " = " + value).
+ print((for (Assign(name, value) <- elements) yield "val " + name + " = " + value).
mkString("{", ",", "}"))
}
if (tree.isType) { printAnnot(); print(" "); print(tree) }
@@ -353,7 +352,7 @@ abstract class TreePrinters {
} else tree)
}
- def print(unit: CompilationUnit): unit = {
+ def print(unit: CompilationUnit) {
print("// Scala source: " + unit.source + LINE_SEPARATOR)
if (unit.body ne null) {
print(unit.body); println
@@ -363,10 +362,10 @@ abstract class TreePrinters {
println; flush
}
- def printAll(): unit = {
+ def printAll() {
print("[[syntax trees at end of " + phase + "]]")
atPhase(phase.next) {
- for (val unit <- global.currentRun.units) print(unit)
+ for (unit <- global.currentRun.units) print(unit)
}
}
}
@@ -381,7 +380,7 @@ abstract class TreePrinters {
object ConsoleWriter extends Writer {
override def write(str: String): unit = Console.print(str)
- def write(cbuf: Array[char], off: int, len: int): unit = {
+ def write(cbuf: Array[char], off: int, len: int) {
val str = new String(cbuf, off, len)
write(str)
}
diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala
index d792b084fe..9a163b698b 100644
--- a/src/compiler/scala/tools/nsc/ast/Trees.scala
+++ b/src/compiler/scala/tools/nsc/ast/Trees.scala
@@ -120,14 +120,14 @@ trait Trees {
val that0 = that.asInstanceOf[List[Any]]
if (thiz.length != that0.length) false
else {
- val results0 = for (val i <- 0.until(thiz.length).toList)
+ val results0 = for (i <- 0.until(thiz.length).toList)
yield equals0(thiz(i), that0(i))
results0.foldLeft(true)((x,y) => x && y)
}
case thiz =>
thiz == that
}
- val results = for (val i <- 0.until(this0.productArity).toList) yield
+ val results = for (i <- 0.until(this0.productArity).toList) yield
equals0(this0.productElement(i), that0.productElement(i))
val b = results.foldLeft(true)((x,y) => x && y)
b && (if (tpe == null || tpe == NoType) that.tpe == null || that.tpe == NoType
@@ -1495,7 +1495,7 @@ trait Trees {
case Template(parents, body) =>
tree.symbol = NoSymbol
tree.tpe = null
- for (val stat <- body)
+ for (stat <- body)
if (stat.isDef) erasedSyms.addEntry(stat.symbol)
super.traverse(tree)
case _: DefTree =>
diff --git a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
index 25af76f466..f70eeac08a 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
@@ -1,12 +1,11 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2006 LAMP/EPFL
+ * Copyright 2005-2007 LAMP/EPFL
* @author Burak Emir
*/
// $Id$
package scala.tools.nsc.ast.parser
-import compat.StringBuilder
import scala.collection.immutable.ListMap
import scala.collection.mutable
import scala.tools.nsc.util.Position
@@ -17,7 +16,8 @@ import scala.xml.{Text, TextBuffer}
* @author Burak Emir
* @version 1.0
*/
-trait MarkupParsers requires SyntaxAnalyzer {
+trait MarkupParsers {
+ self: SyntaxAnalyzer =>
import global._
//import posAssigner.atPos
@@ -26,7 +26,7 @@ class MarkupParser(p: UnitParser, presWS: boolean) /*with scala.xml.parsing.Mark
import Tokens.{EMPTY, LBRACE, RBRACE}
- final val preserveWS = presWS;
+ final val preserveWS = presWS
import p.{symbXMLBuilder => handle}
def s = p.in
@@ -254,7 +254,7 @@ class MarkupParser(p: UnitParser, presWS: boolean) /*with scala.xml.parsing.Mark
xToken('-')
while (true) {
if( ch=='-' && { sb.append(ch); nextch; ch == '-' } ) {
- sb.setLength(sb.length() - 1);
+ sb.setLength(sb.length() - 1)
nextch
xToken('>')
return handle.comment(pos, sb.toString())
@@ -272,7 +272,7 @@ class MarkupParser(p: UnitParser, presWS: boolean) /*with scala.xml.parsing.Mark
/*[Duplicate]*/ def appendText(pos: Position, ts: mutable.Buffer[Tree],
txt: String): Unit =
if (!preserveWS) {
- for (val t <- TextBuffer.fromString(txt).toText) {
+ for (t <- TextBuffer.fromString(txt).toText) {
ts.append(handle.text(pos, t.text))
}
}
@@ -555,7 +555,7 @@ class MarkupParser(p: UnitParser, presWS: boolean) /*with scala.xml.parsing.Mark
def xLiteralPattern:Tree = try {
init; pushScannerState
val oldMode = handle.isPattern;
- handle.isPattern = true;
+ handle.isPattern = true
var tree = xPattern; xSpaceOpt;
handle.isPattern = oldMode;
s.next.token = Tokens.EMPTY;
@@ -571,12 +571,12 @@ class MarkupParser(p: UnitParser, presWS: boolean) /*with scala.xml.parsing.Mark
}
def xEmbeddedExpr: Tree = {
- sync;
+ sync
val b = p.block() //p.expr(true,false);
if(/*s.*/token != RBRACE) {
- reportSyntaxError(" expected end of Scala block");
+ reportSyntaxError(" expected end of Scala block")
}
- init;
+ init
return b
}
@@ -586,9 +586,9 @@ class MarkupParser(p: UnitParser, presWS: boolean) /*with scala.xml.parsing.Mark
sync;
val b = p.patterns(true);
if (/*s.*/token != RBRACE) {
- reportSyntaxError(" expected end of Scala patterns");
+ reportSyntaxError(" expected end of Scala patterns")
}
- init;
+ init
return b
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index d41a5fa515..a2707969a5 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -49,14 +49,19 @@ import Tokens._
* </li>
* </ol>
*/
-trait Parsers requires SyntaxAnalyzer {
+trait Parsers {
+ self: SyntaxAnalyzer =>
import global._
//import RequiresIntsAsPositions._
private val glob: global.type = global
import global.posAssigner.atPos
- class UnitParser(unit : global.CompilationUnit) extends Parser {
+ /** ...
+ *
+ * @author Sean McDirmid
+ */
+ class UnitParser(unit: global.CompilationUnit) extends Parser {
val in = new UnitScanner(unit)
in.init
import in.ScanPosition
@@ -69,25 +74,24 @@ trait Parsers requires SyntaxAnalyzer {
}
abstract class Parser {
- protected val in : AbstractScanner
+ protected val in: AbstractScanner
import in.ScanPosition
- protected def freshName(prefix : String) : Name
- protected def posToReport : ScanPosition
+ protected def freshName(prefix: String): Name
+ protected def posToReport: ScanPosition
import in.{p2g, g2p}
/** the markup parser */
def xmlp = {
if (xmlp0 == null)
xmlp0 = this match {
- case in : UnitParser => new MarkupParser(in, true)
- case _ =>
- Console.println("Cannot create XML PARSER " + in)
- null
- }
- xmlp0;
+ case in: UnitParser => new MarkupParser(in, true)
+ case _ =>
+ Console.println("Cannot create XML PARSER " + in)
+ null
+ }
+ xmlp0
}
- private var xmlp0 : MarkupParser = null;
-
+ private var xmlp0: MarkupParser = null
object treeBuilder extends TreeBuilder {
val global: Parsers.this.global.type = Parsers.this.global
@@ -118,7 +122,7 @@ trait Parsers requires SyntaxAnalyzer {
/////// ERROR HANDLING //////////////////////////////////////////////////////
- private def skip(): Unit = {
+ private def skip() {
//System.out.println("<skipping> " + in.configuration.token2string(in.token))//DEBUG
var nparens = 0
var nbraces = 0
@@ -150,7 +154,7 @@ trait Parsers requires SyntaxAnalyzer {
def syntaxError(msg: String, skipIt: boolean): unit =
syntaxError(in.currentPos, msg, skipIt)
- def syntaxError(pos: ScanPosition, msg: String, skipIt: boolean): Unit = {
+ def syntaxError(pos: ScanPosition, msg: String, skipIt: boolean) {
if (pos != in.errpos) {
in.error(pos, msg)
in.errpos = pos
@@ -171,7 +175,7 @@ trait Parsers requires SyntaxAnalyzer {
in.errpos = in.currentPos
}
- def incompleteInputError(pos: ScanPosition, msg: String): Unit = {
+ def incompleteInputError(pos: ScanPosition, msg: String) {
if (pos != in.errpos) {
in.incompleteInputError(pos, msg)
in.errpos = pos
@@ -181,20 +185,20 @@ trait Parsers requires SyntaxAnalyzer {
def incompleteInputError(msg: String): Unit =
incompleteInputError(in.currentPos, msg) // in.currentPos should be at the EOF
- def syntaxErrorOrIncomplete(msg: String, skipIt: Boolean): Unit = {
- if(in.token == EOF)
+ def syntaxErrorOrIncomplete(msg: String, skipIt: Boolean) {
+ if (in.token == EOF)
incompleteInputError(msg)
else
syntaxError(in.currentPos, msg, skipIt)
}
- def mismatch(expected : Int, found : Int) = {
+ def mismatch(expected: Int, found: Int) {
val posToReport = this.posToReport
val msg =
in.configuration.token2string(expected) + " expected but " +
in.configuration.token2string(found) + " found."
- if(found == EOF)
+ if (found == EOF)
incompleteInputError(posToReport, msg)
else
syntaxError(posToReport, msg, true)
@@ -289,9 +293,8 @@ trait Parsers requires SyntaxAnalyzer {
*/
def joinComment(trees: => List[Tree]): List[Tree] = {
val buf = in.flushDoc
- if (buf ne null) {
- trees map (t => DocDef(buf, t) setPos t.pos)
- } else trees
+ if (buf ne null) trees map (t => DocDef(buf, t) setPos t.pos)
+ else trees
}
/////// TREE CONSTRUCTION ////////////////////////////////////////////////////
@@ -574,12 +577,12 @@ trait Parsers requires SyntaxAnalyzer {
in.nextToken
}
- def newLineOptWhenFollowedBy(token: int): unit = {
+ def newLineOptWhenFollowedBy(token: int) {
// note: next is defined here because current == NEWLINE
if (in.token == NEWLINE && in.next.token == token) newLineOpt()
}
- def newLineOptWhenFollowing(p: int => boolean): unit = {
+ def newLineOptWhenFollowing(p: int => boolean) {
// note: next is defined here because current == NEWLINE
if (in.token == NEWLINE && p(in.next.token)) newLineOpt()
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
index 4f6f06fb0b..42a89f142f 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
@@ -1,38 +1,41 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2006 LAMP/EPFL
+ * Copyright 2005-2007 LAMP/EPFL
* @author Martin Odersky
*/
// $Id$
package scala.tools.nsc.ast.parser
-import compat.StringBuilder
-import Tokens._
-import scala.tools.nsc.util.{Position, OffsetPosition, SourceFile}
+import scala.tools.nsc.util.{CharArrayReader, Position, OffsetPosition,
+ SourceFile}
import SourceFile.{LF, FF, CR, SU}
-import scala.tools.nsc.util.CharArrayReader
+import Tokens._
-trait Scanners requires SyntaxAnalyzer {
+trait Scanners {
+ self: SyntaxAnalyzer =>
import global._
+
+ /** ...
+ */
abstract class AbstractTokenData {
- def token : Int
+ def token: Int
type ScanPosition
- val NoPos : ScanPosition
- def pos : ScanPosition
- def currentPos : ScanPosition
- def name : Name
+ val NoPos: ScanPosition
+ def pos: ScanPosition
+ def currentPos: ScanPosition
+ def name: Name
}
+
/** A class for representing a token's data. */
trait TokenData extends AbstractTokenData {
type ScanPosition = Int
-
- val NoPos = -1
+ val NoPos: Int = -1
/** the next token */
var token: Int = EMPTY
/** the token's position */
- var pos: Int = (0)
- override def currentPos : Int = pos - 1
+ var pos: Int = 0
+ override def currentPos: Int = pos - 1
/** the first character position after the previous token */
var lastPos: Int = 0
@@ -51,39 +54,40 @@ trait Scanners requires SyntaxAnalyzer {
this.base = td.base
}
}
+
+ /** ...
+ */
abstract class AbstractScanner extends AbstractTokenData {
- implicit def p2g(pos : Position ) : ScanPosition
- implicit def g2p(pos : ScanPosition) : Position
- def configuration : ScannerConfiguration
- def warning(pos : ScanPosition, msg : String) : Unit
- def error (pos : ScanPosition, msg : String) : Unit
- def incompleteInputError(pos : ScanPosition, msg : String) : Unit
- def deprecationWarning(pos : ScanPosition, msg : String) : Unit
+ implicit def p2g(pos: Position): ScanPosition
+ implicit def g2p(pos: ScanPosition): Position
+ def configuration: ScannerConfiguration
+ def warning(pos: ScanPosition, msg: String): Unit
+ def error (pos: ScanPosition, msg: String): Unit
+ def incompleteInputError(pos: ScanPosition, msg: String): Unit
+ def deprecationWarning(pos: ScanPosition, msg: String): Unit
/** the last error position
*/
- var errpos : ScanPosition
- var lastPos : ScanPosition
+ var errpos: ScanPosition
+ var lastPos: ScanPosition
def skipToken: ScanPosition
def nextToken: Unit
- def next : AbstractTokenData
+ def next: AbstractTokenData
def intVal(negated: Boolean): Long
def floatVal(negated: Boolean): Double
- def intVal : Long = intVal(false)
- def floatVal : Double = floatVal(false)
+ def intVal: Long = intVal(false)
+ def floatVal: Double = floatVal(false)
//def token2string(token : Int) : String = configuration.token2string(token)
/* disabled in presentation compiler */
- var newNewLine : Boolean
+ var newNewLine: Boolean
/* disabled in presentation compiler */
- var skipping : Boolean
+ var skipping: Boolean
/** return recent scala doc, if any */
- def flushDoc : String
-
+ def flushDoc: String
}
-
trait ScannerConfiguration {
// Keywords -----------------------------------------------------------------
- /** Keyword array; maps from name indices to tokens */
+ /** Keyword array; maps from name indices to tokens */
private var key: Array[byte] = _
private var maxKey = 0
private var tokenName = new Array[Name](128)
@@ -93,7 +97,7 @@ trait Scanners requires SyntaxAnalyzer {
// Enter keywords
- def enterKeyword(n: Name, tokenId: int): unit = {
+ def enterKeyword(n: Name, tokenId: int) {
while (tokenId >= tokenName.length) {
val newTokName = new Array[Name](tokenName.length * 2)
Array.copy(tokenName, 0, newTokName, 0, newTokName.length)
@@ -155,10 +159,10 @@ trait Scanners requires SyntaxAnalyzer {
enterKeyword(nme.ATkw, AT)
// Build keyword array
- key = new Array[byte](maxKey+1)
- for (val i <- 0 to maxKey)
+ key = new Array[byte](maxKey + 1)
+ for (i <- 0 to maxKey)
key(i) = IDENTIFIER
- for (val j <- 0 until tokenCount)
+ for (j <- 0 until tokenCount)
if (tokenName(j) ne null)
key(tokenName(j).start) = j.asInstanceOf[byte]
@@ -236,13 +240,12 @@ trait Scanners requires SyntaxAnalyzer {
* @version 1.1
*/
abstract class Scanner extends AbstractScanner with TokenData {
- import Tokens._
- import java.lang.{Integer, Long, Float, Double, Character}
+ import java.lang.{Integer, Long, Float, Double, Character} // MAX_VALUE, valueOf
override def intVal = super.intVal
override def floatVal = super.floatVal
- override var errpos : Int = -1
+ override var errpos: Int = NoPos
- val in : CharArrayReader
+ val in: CharArrayReader
/** character buffer for literals
*/
@@ -253,7 +256,7 @@ trait Scanners requires SyntaxAnalyzer {
protected def putChar(c: char): unit = cbuf.append(c)
/** Clear buffer and set name */
- private def setName: unit = {
+ private def setName {
name = newTermName(cbuf.toString())
cbuf.setLength(0)
}
@@ -263,7 +266,7 @@ trait Scanners requires SyntaxAnalyzer {
var docBuffer: StringBuilder = null
def flushDoc = {
- val ret = if (docBuffer != null) docBuffer.toString else null;
+ val ret = if (docBuffer != null) docBuffer.toString else null
docBuffer = null
ret
}
@@ -278,8 +281,8 @@ trait Scanners requires SyntaxAnalyzer {
protected def putDocChar(c: char): unit =
if (docBuffer ne null) docBuffer.append(c)
- private class TokenData0 extends TokenData {
- }
+ private class TokenData0 extends TokenData
+
/** we need one token lookahead
*/
val next : TokenData = new TokenData0
@@ -310,7 +313,7 @@ trait Scanners requires SyntaxAnalyzer {
(p - 1)
}
- def nextToken: Unit = {
+ def nextToken {
if (token == LPAREN) {
sepRegions = RPAREN :: sepRegions
} else if (token == LBRACKET) {
@@ -587,7 +590,7 @@ trait Scanners requires SyntaxAnalyzer {
in.next; token = COMMA
return
case '(' => //scala-mode: need to understand character quotes
- in.next; token = LPAREN;
+ in.next; token = LPAREN
return
case '{' =>
in.next; token = LBRACE
@@ -663,7 +666,7 @@ trait Scanners requires SyntaxAnalyzer {
} while (in.ch != '/' && in.ch != SU)
if (in.ch == '/') in.next
else incompleteInputError("unclosed comment")
- openComments = openComments - 1
+ openComments -= 1
}
true
} else {
@@ -739,7 +742,7 @@ trait Scanners requires SyntaxAnalyzer {
token = configuration.name2token(name)
return
case _ =>
- if (java.lang.Character.isUnicodeIdentifierPart(in.ch)) {
+ if (Character.isUnicodeIdentifierPart(in.ch)) {
putChar(in.ch)
in.next
} else {
@@ -953,7 +956,7 @@ trait Scanners requires SyntaxAnalyzer {
return 0
}
value = value * base + d
- i = i + 1
+ i += 1
}
if (negated) -value else value
}
@@ -966,7 +969,7 @@ trait Scanners requires SyntaxAnalyzer {
val limit: double =
if (token == DOUBLELIT) Double.MAX_VALUE else Float.MAX_VALUE
try {
- val value : double = Double.valueOf(name.toString()).doubleValue()
+ val value: double = Double.valueOf(name.toString()).doubleValue()
if (value > limit)
syntaxError("floating point number too large")
if (negated) -value else value
@@ -978,7 +981,7 @@ trait Scanners requires SyntaxAnalyzer {
}
/** read a number into name and set base
*/
- protected def getNumber:unit = {
+ protected def getNumber {
while (in.digit2int(in.ch, if (base < 10) 10 else base) >= 0) {
putChar(in.ch)
in.next
@@ -1076,14 +1079,17 @@ trait Scanners requires SyntaxAnalyzer {
nextToken
}
}
- class UnitScanner(unit : CompilationUnit) extends Scanner with ScannerConfiguration {
+
+ /** ...
+ */
+ class UnitScanner(unit: CompilationUnit) extends Scanner with ScannerConfiguration {
override def configuration = this
val in = new CharArrayReader(unit.source.getContent(), !settings.nouescape.value, syntaxError)
- def warning(pos : Int, msg : String) = unit.warning(pos, msg)
- def error (pos : Int, msg : String) = unit. error(pos, msg)
- def incompleteInputError(pos : Int, msg : String) = unit.incompleteInputError(pos, msg)
- def deprecationWarning(pos : Int, msg : String) = unit.deprecationWarning(pos, msg)
- implicit def p2g(pos : Position) : Int = pos.offset.get(-1)
- implicit def g2p(pos : Int ) : Position = new OffsetPosition(unit.source, pos)
+ def warning(pos: Int, msg: String) = unit.warning(pos, msg)
+ def error (pos: Int, msg: String) = unit. error(pos, msg)
+ def incompleteInputError(pos: Int, msg: String) = unit.incompleteInputError(pos, msg)
+ def deprecationWarning(pos: Int, msg: String) = unit.deprecationWarning(pos, msg)
+ implicit def p2g(pos: Position): Int = pos.offset.get(-1)
+ implicit def g2p(pos: Int): Position = new OffsetPosition(unit.source, pos)
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
index ce5d39e43b..7f9a7e385c 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2006 LAMP/EPFL
+ * Copyright 2005-2007 LAMP/EPFL
* @author Burak Emir
*/
// $Id$
@@ -89,7 +89,7 @@ abstract class SymbolicXMLBuilder(make: TreeBuilder, p: Parsers # Parser, preser
private def bufferToArray(buf: mutable.Buffer[Tree]): Array[Tree] = {
val arr = new Array[Tree](buf.length)
var i = 0
- for (val x <- buf.elements) { arr(i) = x; i = i + 1; }
+ for (x <- buf.elements) { arr(i) = x; i = i + 1; }
arr
}
*/
@@ -110,7 +110,7 @@ abstract class SymbolicXMLBuilder(make: TreeBuilder, p: Parsers # Parser, preser
pre, label, Ident( nme.WILDCARD ) /* md */ , Ident( nme.WILDCARD )) /* scope */ ::: children.toList )
}
} else {
- var ab = List(pre, label, attrs, scope);
+ var ab = List(pre, label, attrs, scope)
if (children.length > 0)
ab = ab ::: List(Typed(makeXMLseq(pos, children), Ident(nme.WILDCARD_STAR.toTypeName)));
atPos(pos) { New( _scala_xml_Elem, List(ab) )}
@@ -183,7 +183,7 @@ abstract class SymbolicXMLBuilder(make: TreeBuilder, p: Parsers # Parser, preser
val ns = xml.Utility.parseAttributeValue(s)
val ts:collection.mutable.ListBuffer[Tree] = new collection.mutable.ListBuffer
val it = ns.elements
- while(it.hasNext) it.next match {
+ while (it.hasNext) it.next match {
case Text(s) => ts += text(pos, s) // makeText1(Literal(Constant(s)))
case EntityRef(s) => ts += entityRef(pos, s)
}
@@ -201,7 +201,7 @@ abstract class SymbolicXMLBuilder(make: TreeBuilder, p: Parsers # Parser, preser
buf.remove(i)
buf.insert(i, t2)
}
- i = i + 1
+ i += 1
}
}
@@ -261,7 +261,7 @@ abstract class SymbolicXMLBuilder(make: TreeBuilder, p: Parsers # Parser, preser
val attrIt = attrMap.keys
while (attrIt.hasNext) {
val z = attrIt.next
- if (z.startsWith("xmlns")) { // handle namespace
+ if (z startsWith "xmlns") { // handle namespace
val i = z.indexOf(':')
if( i == -1 )
handleNamespaceBinding(null, attrMap(z))
@@ -311,7 +311,7 @@ abstract class SymbolicXMLBuilder(make: TreeBuilder, p: Parsers # Parser, preser
Assign(Ident(_md), New(_scala_xml_UnprefixedAttribute,
LL(Literal(Constant(key)),value,Ident(_md))
))};
- tlist2 = t :: tlist2;
+ tlist2 = t :: tlist2
}
var it = attrMap.elements
diff --git a/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala b/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala
index d32e44afa5..90fec80c0b 100644
--- a/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala
+++ b/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2006 LAMP/EPFL
+ * Copyright 2005-2007 LAMP/EPFL
* @author Martin Odersky
*/
@@ -206,7 +206,7 @@ abstract class ScalaPrimitives {
private var primitives: Map[Symbol, Int] = _
/** Initialize the primitive map */
- def init: Unit = {
+ def init {
primitives = new HashMap()
// scala.Any
@@ -468,24 +468,24 @@ abstract class ScalaPrimitives {
}
/** Add a primitive operation to the map */
- def addPrimitive(s: Symbol, code: Int): Unit = {
+ def addPrimitive(s: Symbol, code: Int) {
assert(!(primitives contains s), "Duplicate primitive " + s)
primitives(s) = code
}
- def addPrimitives(cls: Symbol, method: Name, code: Int): Unit = {
+ def addPrimitives(cls: Symbol, method: Name, code: Int) {
val tpe = cls.info
val sym = tpe.member(method)
if (sym == NoSymbol)
inform("Unknown primitive method " + cls + "." + method)
- for (val s <- sym.alternatives)
+ for (s <- sym.alternatives)
addPrimitive(
s,
if (code == ADD && s.info.paramTypes.head == definitions.StringClass.tpe) CONCAT
else code)
}
- def isCoercion(code: Int): Boolean = (code >= B2B) && (code <= D2D);
+ def isCoercion(code: Int): Boolean = (code >= B2B) && (code <= D2D)
/** Check whether the given operation code is an array operation. */
def isArrayOp(code: Int): Boolean =
diff --git a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
index ac96121d78..dc38fef872 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
@@ -7,13 +7,13 @@
package scala.tools.nsc.backend.icode
-import compat.StringBuilder
-import scala.tools.nsc.ast._
+//import scala.tools.nsc.ast._
import scala.collection.mutable.{Map, Set, LinkedHashSet}
import scala.tools.nsc.util.{Position,NoPosition}
import scala.tools.nsc.backend.icode.analysis.ProgramPoint
-trait BasicBlocks requires ICodes {
+trait BasicBlocks {
+ self: ICodes =>
import opcodes._
/** This class represents a basic block. Each
@@ -70,7 +70,7 @@ trait BasicBlocks requires ICodes {
instrs
}
- def fromList(is: List[Instruction]): Unit = {
+ def fromList(is: List[Instruction]) {
instrs = toInstructionArray(is)
closed = true
}
@@ -85,7 +85,7 @@ trait BasicBlocks requires ICodes {
var i = 0
while (i < instrs.length) {
if (instrs(i) eq inst) return i
- i = i + 1
+ i += 1
}
-1
}
@@ -105,8 +105,8 @@ trait BasicBlocks requires ICodes {
def traverseBackwards(f: Instruction => Unit) = {
var i = instrs.length - 1
while (i >= 0) {
- f(instrs(i));
- i = i - 1
+ f(instrs(i))
+ i -= 1
}
}
@@ -125,11 +125,11 @@ trait BasicBlocks requires ICodes {
var i = pos
var d = 0
while (i > 0) {
- i = i - 1
+ i -= 1
val prod = instrs(i).produced
if (prod > 0 && d == 0)
return Some(i)
- d = d + (instrs(i).consumed - instrs(i).produced);
+ d += (instrs(i).consumed - instrs(i).produced)
}
None
}
@@ -172,7 +172,7 @@ trait BasicBlocks requires ICodes {
instrs(i) = newInstr
changed = true
}
- i = i + 1
+ i += 1
}
changed
}
@@ -192,7 +192,7 @@ trait BasicBlocks requires ICodes {
var changed = false
while (i < instrs.length && (instrs(i) ne iold))
- i = i + 1;
+ i += 1
if (i < instrs.length) {
val newInstrs = new Array[Instruction](instrs.length + is.length - 1);
@@ -201,7 +201,7 @@ trait BasicBlocks requires ICodes {
var j = i
for (val x <- is) {
newInstrs(j) = x
- j = j + 1
+ j += 1
}
if (i + 1 < instrs.length)
Array.copy(instrs, i + 1, newInstrs, j, instrs.length - i - 1)
@@ -222,7 +222,7 @@ trait BasicBlocks requires ICodes {
var j = i
for (val x <- is) {
newInstrs(j) = x
- j = j + 1
+ j += 1
}
if (i + 1 < instrs.length)
Array.copy(instrs, i + 1, newInstrs, j, instrs.length - i)
@@ -234,7 +234,7 @@ trait BasicBlocks requires ICodes {
*
* @param positions ...
*/
- def removeInstructionsAt(positions: Int*): Unit = {
+ def removeInstructionsAt(positions: Int*) {
assert(closed)
val removed = positions.toList
val newInstrs = new Array[Instruction](instrs.length - positions.length)
@@ -243,9 +243,9 @@ trait BasicBlocks requires ICodes {
while (i < instrs.length) {
if (!removed.contains(i)) {
newInstrs(j) = instrs(i)
- j = j + 1
+ j += 1
}
- i = i + 1
+ i += 1
}
instrs = newInstrs
}
@@ -278,7 +278,7 @@ trait BasicBlocks requires ICodes {
}
}
- private def substOnList(map: Map[Instruction, Instruction]): Unit = {
+ private def substOnList(map: Map[Instruction, Instruction]) {
def subst(l: List[Instruction]): List[Instruction] = l match {
case Nil => Nil
case x :: xs =>
@@ -371,7 +371,7 @@ trait BasicBlocks requires ICodes {
var array = new Array[Instruction](l.length)
var i: Int = 0
- l foreach (x => { array(i) = x; i = i + 1 })
+ l foreach (x => { array(i) = x; i += 1 })
array
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
index 3dab1034a8..5752b96240 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2006 LAMP/EPFL
+ * Copyright 2005-2007 LAMP/EPFL
* @author Martin Odersky
*/
@@ -7,8 +7,7 @@
package scala.tools.nsc.backend.icode
-import compat.StringBuilder
-import scala.collection.mutable.{Map, HashMap, ListBuffer, Buffer, Set, HashSet}
+import scala.collection.mutable.{Map, HashMap, ListBuffer, Buffer, HashSet}
import scala.tools.nsc.symtab._
@@ -67,9 +66,7 @@ abstract class GenICode extends SubComponent {
def gen(trees: List[Tree], ctx: Context): Context = {
var ctx1 = ctx
- for (val t <- trees)
- ctx1 = gen(t, ctx1)
-
+ for (t <- trees) ctx1 = gen(t, ctx1)
ctx1
}
@@ -141,7 +138,7 @@ abstract class GenICode extends SubComponent {
private def genStat(trees: List[Tree], ctx: Context): Context = {
var currentCtx = ctx
- for (val t <- trees)
+ for (t <- trees)
currentCtx = genStat(t, currentCtx)
currentCtx
@@ -457,7 +454,7 @@ abstract class GenICode extends SubComponent {
val returnedKind = toTypeKind(expr.tpe)
var ctx1 = genLoad(expr, ctx, returnedKind)
val oldcleanups = ctx1.cleanups
- for (val op <- ctx1.cleanups) op match {
+ for (op <- ctx1.cleanups) op match {
case MonitorRelease(m) =>
ctx1.bb.emit(LOAD_LOCAL(m))
ctx1.bb.emit(MONITOR_EXIT())
@@ -477,7 +474,7 @@ abstract class GenICode extends SubComponent {
case Try(block, catches, finalizer) =>
val kind = toTypeKind(tree.tpe)
- var handlers = for (val CaseDef(pat, _, body) <- catches.reverse)
+ var handlers = for (CaseDef(pat, _, body) <- catches.reverse)
yield pat match {
case Typed(Ident(nme.WILDCARD), tpt) => (tpt.tpe.symbol, kind, {
ctx: Context =>
@@ -924,29 +921,28 @@ abstract class GenICode extends SubComponent {
var tags: List[Int] = Nil
var default: BasicBlock = afterCtx.bb
- for (val caze <- cases)
- caze match {
- case CaseDef(Literal(value), EmptyTree, body) =>
- tags = value.intValue :: tags
- val tmpCtx = ctx1.newBlock
- targets = tmpCtx.bb :: targets
+ for (caze <- cases) caze match {
+ case CaseDef(Literal(value), EmptyTree, body) =>
+ tags = value.intValue :: tags
+ val tmpCtx = ctx1.newBlock
+ targets = tmpCtx.bb :: targets
- caseCtx = genLoad(body, tmpCtx , kind)
- caseCtx.bb.emit(JUMP(afterCtx.bb), caze.pos)
- caseCtx.bb.close
+ caseCtx = genLoad(body, tmpCtx , kind)
+ caseCtx.bb.emit(JUMP(afterCtx.bb), caze.pos)
+ caseCtx.bb.close
- case CaseDef(Ident(nme.WILDCARD), EmptyTree, body) =>
- val tmpCtx = ctx1.newBlock
- default = tmpCtx.bb
+ case CaseDef(Ident(nme.WILDCARD), EmptyTree, body) =>
+ val tmpCtx = ctx1.newBlock
+ default = tmpCtx.bb
- caseCtx = genLoad(body, tmpCtx , kind)
- caseCtx.bb.emit(JUMP(afterCtx.bb), caze.pos)
- caseCtx.bb.close
+ caseCtx = genLoad(body, tmpCtx , kind)
+ caseCtx.bb.emit(JUMP(afterCtx.bb), caze.pos)
+ caseCtx.bb.close
- case _ =>
- abort("Invalid case statement in switch-like pattern match: " +
- tree + " at: " + (tree.pos))
- }
+ case _ =>
+ abort("Invalid case statement in switch-like pattern match: " +
+ tree + " at: " + (tree.pos))
+ }
ctx1.bb.emit(SWITCH(tags.reverse map (x => List(x)),
(default :: targets).reverse), tree.pos)
ctx1.bb.close
@@ -1219,7 +1215,7 @@ abstract class GenICode extends SubComponent {
log("Lifted string concatenations for " + tree + "\n to: " + concatenations);
ctx1.bb.emit(CALL_PRIMITIVE(StartConcat), tree.pos);
- for (val elem <- concatenations) {
+ for (elem <- concatenations) {
val kind = toTypeKind(elem.tpe)
ctx1 = genLoad(elem, ctx1, kind)
ctx1.bb.emit(CALL_PRIMITIVE(StringConcat(kind)), elem.pos)
@@ -1462,7 +1458,7 @@ abstract class GenICode extends SubComponent {
assert(ctx.clazz.symbol eq cls,
"Classes are not the same: " + ctx.clazz.symbol + ", " + cls)
- for (val f <- cls.info.decls.elements)
+ for (f <- cls.info.decls.elements)
if (!f.isMethod && f.isTerm)
ctx.clazz.addField(new IField(f));
}
@@ -1476,7 +1472,7 @@ abstract class GenICode extends SubComponent {
case Nil => ()
case vparams :: Nil =>
- for (val p <- vparams) {
+ for (p <- vparams) {
val lv = new Local(p.symbol, toTypeKind(p.symbol.info), true)
ctx.method.addParam(lv)
ctx.scope.add(lv)
@@ -1569,8 +1565,8 @@ abstract class GenICode extends SubComponent {
}
if (changed) {
log("Removing block: " + block)
- method.code.removeBlock(block);
- for (val e <- method.exh) {
+ method.code.removeBlock(block)
+ for (e <- method.exh) {
e.covered = e.covered filter (.!=(block))
e.blocks = e.blocks filter (.!=(block))
if (e.startBlock eq block)
@@ -1782,7 +1778,12 @@ abstract class GenICode extends SubComponent {
this
}
- /** Prepare a new context upon entry into a method */
+ /** Prepare a new context upon entry into a method.
+ *
+ * @param m ...
+ * @param d ...
+ * @return ...
+ */
def enterMethod(m: IMethod, d: DefDef): Context = {
val ctx1 = new Context(this) setMethod(m)
ctx1.labels = new HashMap()
@@ -1804,7 +1805,7 @@ abstract class GenICode extends SubComponent {
}
block.varsInScope = new HashSet()
block.varsInScope ++= scope.varsInScope
- new Context(this) setBasicBlock block;
+ new Context(this) setBasicBlock block
}
def enterScope = {
@@ -1824,7 +1825,7 @@ abstract class GenICode extends SubComponent {
* previously active handlers).
*/
def newHandler(cls: Symbol, resultKind: TypeKind): ExceptionHandler = {
- handlerCount = handlerCount + 1
+ handlerCount += 1
val exh = new ExceptionHandler(method, "" + handlerCount, cls)
exh.resultKind = resultKind
method.addHandler(exh)
@@ -1945,11 +1946,11 @@ abstract class GenICode extends SubComponent {
* Patch the code by replacing pseudo call instructions with
* jumps to the given basic block.
*/
- def patch(code: Code): Unit = {
+ def patch(code: Code) {
def substMap: Map[Instruction, Instruction] = {
- val map = new HashMap[Instruction, Instruction]();
+ val map = new HashMap[Instruction, Instruction]()
- toPatch foreach (i => map += i -> patch(i));
+ toPatch foreach (i => map += i -> patch(i))
map
}
diff --git a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
index b13656353a..63c865b79e 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
@@ -1,5 +1,5 @@
- /* NSC -- new scala compiler
- * Copyright 2005 LAMP/EPFL
+ /* NSC -- new Scala compiler
+ * Copyright 2005-2007 LAMP/EPFL
* @author Iulian Dragos
*/
@@ -12,23 +12,24 @@ import scala.tools.nsc.backend.icode.analysis.LubError;
import scala.tools.nsc.symtab._;
/**
+ * @author Iulian Dragos
*/
abstract class ClosureElimination extends SubComponent {
- import global._;
- import icodes._;
- import icodes.opcodes._;
+ import global._
+ import icodes._
+ import icodes.opcodes._
- val phaseName = "closelim";
+ val phaseName = "closelim"
/** Create a new phase */
- override def newPhase(p: Phase) = new ClosureEliminationPhase(p);
+ override def newPhase(p: Phase) = new ClosureEliminationPhase(p)
/** The Inlining phase.
*/
class ClosureEliminationPhase(prev: Phase) extends ICodePhase(prev) {
def name = phaseName
- val closser = new ClosureElim;
+ val closser = new ClosureElim
override def apply(c: IClass): Unit =
closser.analyzeClass(c)
@@ -43,11 +44,11 @@ abstract class ClosureElimination extends SubComponent {
class ClosureElim {
/* fresh name counter */
- var count = 0;
+ var count = 0
def freshName(s: String) = {
- val ret = s + this.count;
- this.count = this.count + 1;
+ val ret = s + this.count
+ this.count += 1
ret
}
@@ -72,46 +73,46 @@ abstract class ClosureElimination extends SubComponent {
else
Some(DROP(REFERENCE(definitions.ObjectClass)) :: Nil);
- case _ => None;
+ case _ => None
});
def analyzeClass(cls: IClass): Unit = if (settings.Xcloselim.value) {
cls.methods.foreach { m =>
- analyzeMethod(m);
+ analyzeMethod(m)
peephole.transformMethod(m);
}}
- val cpp = new copyPropagation.CopyAnalysis;
+ val cpp = new copyPropagation.CopyAnalysis
- import copyPropagation._;
+ import copyPropagation._
/* Some embryonic copy propagation. */
def analyzeMethod(m: IMethod): Unit = try {if (m.code ne null) {
- log("Analyzing " + m);
- cpp.init(m);
- cpp.run;
+ log("Analyzing " + m)
+ cpp.init(m)
+ cpp.run
- for (val bb <- linearizer.linearize(m)) {
- var info = cpp.in(bb);
+ for (bb <- linearizer.linearize(m)) {
+ var info = cpp.in(bb)
- for (val i <- bb.toList) {
+ for (i <- bb.toList) {
i match {
case LOAD_LOCAL(l) if (info.bindings.isDefinedAt(LocalVar(l))) =>
- val t = info.getBinding(l);
+ val t = info.getBinding(l)
t match {
case Deref(LocalVar(v)) =>
bb.replaceInstruction(i, valueToInstruction(t));
- log("replaced " + i + " with " + t);
+ log("replaced " + i + " with " + t)
case This() =>
bb.replaceInstruction(i, valueToInstruction(t));
- log("replaced " + i + " with " + t);
+ log("replaced " + i + " with " + t)
case _ =>
bb.replaceInstruction(i, LOAD_LOCAL(info.getAlias(l)));
- log("replaced " + i + " with " + info.getAlias(l));
+ log("replaced " + i + " with " + info.getAlias(l))
}
@@ -144,13 +145,13 @@ abstract class ClosureElimination extends SubComponent {
case _ => ();
}
- info = cpp.interpret(info, i);
+ info = cpp.interpret(info, i)
}
}
}} catch {
case e: LubError =>
- Console.println("In method: " + m);
- Console.println(e);
+ Console.println("In method: " + m)
+ Console.println(e)
e.printStackTrace
}
@@ -176,20 +177,20 @@ abstract class ClosureElimination extends SubComponent {
/** Peephole optimization. */
class PeepholeOpt(peep: (Instruction, Instruction) => Option[List[Instruction]]) {
- private var method: IMethod = null;
+ private var method: IMethod = null
def transformMethod(m: IMethod): Unit = if (m.code ne null) {
- method = m;
- for (val b <- m.code.blocks)
- transformBlock(b);
+ method = m
+ for (b <- m.code.blocks)
+ transformBlock(b)
}
def transformBlock(b: BasicBlock): Unit = if (b.size >= 2) {
var newInstructions: List[Instruction] = Nil;
- newInstructions = b.toList;
+ newInstructions = b.toList
- var redo = false;
+ var redo = false
do {
var h = newInstructions.head;
var t = newInstructions.tail;
@@ -206,10 +207,10 @@ abstract class ClosureElimination extends SubComponent {
}
seen = h :: seen;
h = t.head;
- t = t.tail;
+ t = t.tail
}
} while (redo);
- b.fromList(newInstructions);
+ b.fromList(newInstructions)
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
index d99aa70966..c8dbfc85f0 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
@@ -1,36 +1,37 @@
-/* NSC -- new scala compiler
- * Copyright 2005 LAMP/EPFL
+/* NSC -- new Scala compiler
+ * Copyright 2005-2007 LAMP/EPFL
* @author Iulian Dragos
*/
// $Id$
-package scala.tools.nsc.backend.opt;
+package scala.tools.nsc.backend.opt
-import scala.collection.mutable.{Map, HashMap, Set, HashSet};
-import scala.tools.nsc.symtab._;
+import scala.collection.mutable.{Map, HashMap, Set, HashSet}
+import scala.tools.nsc.symtab._
/**
+ * @author Iulian Dragos
*/
abstract class Inliners extends SubComponent {
- import global._;
- import icodes._;
- import icodes.opcodes._;
+ import global._
+ import icodes._
+ import icodes.opcodes._
- val phaseName = "inliner";
+ val phaseName = "inliner"
/** The maximum size in basic blocks of methods considered for inlining. */
final val MAX_INLINE_SIZE = 16
/** Create a new phase */
- override def newPhase(p: Phase) = new InliningPhase(p);
+ override def newPhase(p: Phase) = new InliningPhase(p)
/** The Inlining phase.
*/
class InliningPhase(prev: Phase) extends ICodePhase(prev) {
def name = phaseName
- val inliner = new Inliner;
+ val inliner = new Inliner
override def apply(c: IClass): Unit =
inliner.analyzeClass(c)
@@ -46,7 +47,7 @@ abstract class Inliners extends SubComponent {
val fresh = new HashMap[String, Int]
/* fresh name counter */
- var count = 0;
+ var count = 0
def freshName(s: String) = fresh.get(s) match {
case Some(count) =>
@@ -71,14 +72,14 @@ abstract class Inliners extends SubComponent {
case _ => "<nopos>"
}));
- val targetPos = instr.pos;
- val a = new analysis.MethodTFA(callee);
+ val targetPos = instr.pos
+ val a = new analysis.MethodTFA(callee)
/* The exception handlers that are active at the current block. */
- val activeHandlers = caller.exh.filter(.covered.contains(block));
+ val activeHandlers = caller.exh.filter(.covered.contains(block))
/* Map 'original' blocks to the ones inlined in the caller. */
- val inlinedBlock: Map[BasicBlock, BasicBlock] = new HashMap;
+ val inlinedBlock: Map[BasicBlock, BasicBlock] = new HashMap
val varsInScope: Set[Local] = new HashSet[Local] ++ block.varsInScope.elements
@@ -104,8 +105,8 @@ abstract class Inliners extends SubComponent {
/** Add a new block in the current context. */
def newBlock = {
- val b = caller.code.newBlock;
- activeHandlers.foreach (.addCoveredBlock(b));
+ val b = caller.code.newBlock
+ activeHandlers.foreach (.addCoveredBlock(b))
if (retVal ne null) b.varsInScope += retVal
b.varsInScope += inlinedThis
b.varsInScope ++= varsInScope
@@ -113,9 +114,9 @@ abstract class Inliners extends SubComponent {
}
def translateExh(e: ExceptionHandler) = {
- var handler: ExceptionHandler = e.dup;
- handler.covered = handler.covered.map(inlinedBlock);
- handler.setStartBlock(inlinedBlock(e.startBlock));
+ var handler: ExceptionHandler = e.dup
+ handler.covered = handler.covered.map(inlinedBlock)
+ handler.setStartBlock(inlinedBlock(e.startBlock))
handler
}
@@ -290,11 +291,11 @@ abstract class Inliners extends SubComponent {
if (m.code ne null) {
if (settings.debug.value)
log("Analyzing " + m + " count " + count);
- tfa.init(m);
- tfa.run;
- for (val bb <- linearizer.linearize(m)) {
+ tfa.init(m)
+ tfa.run
+ for (bb <- linearizer.linearize(m)) {
var info = tfa.in(bb);
- for (val i <- bb.toList) {
+ for (i <- bb.toList) {
if (!retry) {
i match {
case CALL_METHOD(msym, Dynamic) =>
@@ -349,9 +350,9 @@ abstract class Inliners extends SubComponent {
case _ => ();
}
- info = tfa.interpret(info, i);
+ info = tfa.interpret(info, i)
}}}}
- } while (retry && count < 15);
+ } while (retry && count < 15)
m.normalize
} catch {
case e =>
@@ -360,7 +361,7 @@ abstract class Inliners extends SubComponent {
"\nMethod owner: " + m.symbol.owner);
e.printStackTrace();
m.dump
- throw e;
+ throw e
}
/** Cache whether a method calls private members. */
@@ -385,8 +386,8 @@ abstract class Inliners extends SubComponent {
callsPrivate get (callee) match {
case Some(b) => callsPrivateMember = b;
case None =>
- for (val b <- callee.code.blocks)
- for (val i <- b.toList)
+ for (b <- callee.code.blocks)
+ for (i <- b.toList)
i match {
case CALL_METHOD(m, style) =>
if (m.hasFlag(Flags.PRIVATE) ||
diff --git a/src/compiler/scala/tools/nsc/doc/DocDriver.scala b/src/compiler/scala/tools/nsc/doc/DocDriver.scala
index 89e5be0b77..1df9e84c2f 100644
--- a/src/compiler/scala/tools/nsc/doc/DocDriver.scala
+++ b/src/compiler/scala/tools/nsc/doc/DocDriver.scala
@@ -4,11 +4,13 @@
*/
// $Id$
-package scala.tools.nsc.doc;
+package scala.tools.nsc.doc
-import scala.collection.jcl;
-import symtab.Flags._;
-import scala.xml._;
+import java.util.zip.ZipFile
+
+import scala.collection.jcl
+import symtab.Flags._
+import scala.xml._
/**
* @author Sean McDirmid
@@ -16,15 +18,15 @@ import scala.xml._;
abstract class DocDriver extends ModelFrames with ModelToXML {
import global._;
- object additions extends jcl.LinkedHashSet[Symbol];
+ object additions extends jcl.LinkedHashSet[Symbol]
object additions0 extends ModelAdditions(global) {
- override def addition(sym : global.Symbol) = {
- super.addition(sym);
+ override def addition(sym: global.Symbol) = {
+ super.addition(sym)
sym match {
- case sym : global.ClassSymbol => additions += sym.asInstanceOf[Symbol];
- case sym : global.ModuleSymbol => additions += sym.asInstanceOf[Symbol];
- case sym : global.TypeSymbol => additions += sym.asInstanceOf[Symbol];
- case _ =>
+ case sym : global.ClassSymbol => additions += sym.asInstanceOf[Symbol]
+ case sym : global.ModuleSymbol => additions += sym.asInstanceOf[Symbol]
+ case sym : global.TypeSymbol => additions += sym.asInstanceOf[Symbol]
+ case _ =>
}
}
def init : Unit = {}
@@ -56,19 +58,19 @@ abstract class DocDriver extends ModelFrames with ModelToXML {
units.foreach(unit => f(null, unit.body));
- for (val p <- allClasses; val d <- p._2) {
+ for (p <- allClasses; val d <- p._2) {
symbols += d.sym;
- for (val pp <- d.sym.tpe.parents) subClasses(pp.symbol) += d;
+ for (pp <- d.sym.tpe.parents) subClasses(pp.symbol) += d;
}
- additions0.init;
- copyResources;
- val packages0 = sort(allClasses.keySet);
- new AllPackagesFrame with Frame { def packages = packages0; };
- new PackagesContentFrame with Frame { def packages = packages0; };
- new NavigationFrame with Frame { };
+ additions0.init
+ copyResources
+ val packages0 = sort(allClasses.keySet)
+ new AllPackagesFrame with Frame { def packages = packages0; }
+ new PackagesContentFrame with Frame { def packages = packages0; }
+ new NavigationFrame with Frame { }
new ListClassFrame with Frame {
def classes = {
- for (val p <- allClasses; val d <- p._2) yield d;
+ for (p <- allClasses; d <- p._2) yield d;
}
object organized extends jcl.LinkedHashMap[(List[String],Boolean),List[ClassOrObject]] {
override def default(key : (List[String],Boolean)) = Nil;
@@ -97,7 +99,7 @@ abstract class DocDriver extends ModelFrames with ModelToXML {
}
}
- for (val (pkg0,classes0) <- allClasses) {
+ for ((pkg0,classes0) <- allClasses) {
new ListClassFrame with Frame {
def title =
"List of classes and objects in package " + pkg0.fullName('.')
@@ -109,7 +111,7 @@ abstract class DocDriver extends ModelFrames with ModelToXML {
def classes = classes0;
def pkg = pkg0;
}
- for (val clazz0 <- classes0) {
+ for (clazz0 <- classes0) {
new ClassContentFrame with Frame {
def clazz = clazz0;
def title =
@@ -117,7 +119,7 @@ abstract class DocDriver extends ModelFrames with ModelToXML {
}
}
}
- for (val sym <- additions) sym match {
+ for (sym <- additions) sym match {
case sym : ClassSymbol =>
val add = new TopLevelClass(sym);
new ClassContentFrame with Frame {
@@ -164,32 +166,35 @@ abstract class DocDriver extends ModelFrames with ModelToXML {
})++super.classBody(entity);
protected def urlFor(sym : Symbol)(implicit frame : Frame) = frame.urlFor(sym);
- override protected def decodeTag(tag : String) : String = tag match {
- case "exception" => "Throws"
- case "ex" => "Examples"
- case "param" => "Parameters"
- case "pre" => "Precondition"
- case "return" => "Returns"
- case "note" => "Notes"
- case "see" => "See Also"
- case tag => super.decodeTag(tag);
+ override protected def decodeTag(tag: String): String = tag match {
+ case "exception" => "Throws"
+ case "ex" => "Examples"
+ case "param" => "Parameters"
+ case "pre" => "Precondition"
+ case "return" => "Returns"
+ case "note" => "Notes"
+ case "see" => "See Also"
+ case tag => super.decodeTag(tag)
}
- override protected def decodeOption(tag : String, option : String) : NodeSeq = tag match {
- case "throws" if additions0.exceptions.contains(option) =>
- val (sym, s) = additions0.exceptions(option);
- val path = "../" //todo: fix path
- val href = path + sym.fullNameString('/') +
- (if (sym.isModule || sym.isModuleClass) NAME_SUFFIX_OBJECT else "") +
- "#" + s
- <a href={href}>{option}</a> ++ {Text(" - ")};
- case _ => super.decodeOption(tag,option);
+
+ override protected def decodeOption(tag: String, option: String): NodeSeq = tag match {
+ case "throws" if additions0.exceptions.contains(option) =>
+ val (sym, s) = additions0.exceptions(option);
+ val path = "../" //todo: fix path
+ val href = path + sym.fullNameString('/') +
+ (if (sym.isModule || sym.isModuleClass) NAME_SUFFIX_OBJECT else "") +
+ "#" + s
+ <a href={href}>{option}</a> ++ {Text(" - ")};
+ case _ =>
+ super.decodeOption(tag,option)
}
+
object roots extends jcl.LinkedHashMap[String,String];
roots("classes") = "http://java.sun.com/j2se/1.5.0/docs/api";
roots("rt") = roots("classes");
roots("scala-library") = "http://www.scala-lang.org/docu/files/api";
- private def keyFor(file : java.util.zip.ZipFile) : String = {
+ private def keyFor(file: ZipFile): String = {
var name = file.getName;
var idx = name.lastIndexOf(java.io.File.pathSeparator);
if (idx == -1) idx = name.lastIndexOf('/');
diff --git a/src/compiler/scala/tools/nsc/doc/DocGenerator.scala b/src/compiler/scala/tools/nsc/doc/DocGenerator.scala
index 82f08f549e..bcdddd4820 100644
--- a/src/compiler/scala/tools/nsc/doc/DocGenerator.scala
+++ b/src/compiler/scala/tools/nsc/doc/DocGenerator.scala
@@ -26,7 +26,6 @@ abstract class DocGenerator extends Models {
import global._
import DocUtil._
import Kinds._
- import compat.StringBuilder
def outdir: String
@@ -336,7 +335,7 @@ abstract class DocGenerator extends Models {
if (!args.isEmpty)
buf.append(args.map(.escapedStringValue).mkString("(", ",", ")"))
if (!nvPairs.isEmpty)
- for (val ((name, value), index) <- nvPairs.zipWithIndex) {
+ for (((name, value), index) <- nvPairs.zipWithIndex) {
if (index > 0)
buf.append(", ")
buf.append(name).append(" = ").append(value)
@@ -419,8 +418,7 @@ abstract class DocGenerator extends Models {
<td colspan="2" class="title">{Text(labelFor(kind))} Summary</td>
</tr>
{ {
- for (val mmbr <- map(kind).toList) yield
- shortHeader(mmbr)
+ for (mmbr <- map(kind).toList) yield shortHeader(mmbr)
} }
</table>
else
diff --git a/src/compiler/scala/tools/nsc/doc/DocUtil.scala b/src/compiler/scala/tools/nsc/doc/DocUtil.scala
index 34844c1367..a10289c721 100644
--- a/src/compiler/scala/tools/nsc/doc/DocUtil.scala
+++ b/src/compiler/scala/tools/nsc/doc/DocUtil.scala
@@ -81,14 +81,14 @@ object DocUtil {
def merge[T](ts0: TreeSet[T], ts1: TreeSet[T]): TreeSet[T] = {
var ts = ts0
- for (val t <- ts1.toList) ts = ts + t
+ for (t <- ts1.toList) ts += t
ts
}
def merge[T,S <: Ordered[S]](ts0: ListMap[T,TreeSet[S]],
ts1: ListMap[T,TreeSet[S]]): ListMap[T,TreeSet[S]] = {
var ts = ts0
- for (val t <- ts1.elements) {
+ for (t <- ts1.elements) {
if (!ts.contains(t._1))
ts = ts.update(t._1, new TreeSet[S]);
ts = ts.update(t._1, merge(ts(t._1), t._2))
diff --git a/src/compiler/scala/tools/nsc/doc/ModelExtractor.scala b/src/compiler/scala/tools/nsc/doc/ModelExtractor.scala
index bb10ec205a..9969aa63f5 100644
--- a/src/compiler/scala/tools/nsc/doc/ModelExtractor.scala
+++ b/src/compiler/scala/tools/nsc/doc/ModelExtractor.scala
@@ -16,8 +16,8 @@ import compat.Platform.{EOL => LINE_SEPARATOR}
* @author Sean McDirmid
*/
trait ModelExtractor {
- val global : Global;
- import global._;
+ val global : Global
+ import global._
def assert(b : Boolean) {
if (!b)
throw new Error;
@@ -31,7 +31,7 @@ trait ModelExtractor {
attributes.foreach(a => {
map(a.tag) = map(a.tag) ::: ((a.option,a.body) :: Nil);
});
- map;
+ map
}
}
protected def decode(sym : Symbol) = {
@@ -237,12 +237,12 @@ trait ModelExtractor {
object inherited extends jcl.LinkedHashMap[Symbol,List[Member]]() {
override def default(tpe : Symbol) = Nil;
{
- for (val m <- sym.tpe.members; !sym.tpe.decls.elements.contains(m) &&
+ for (m <- sym.tpe.members if !sym.tpe.decls.elements.contains(m) &&
(Values.f(m) || Methods.f(m))) {
- val o = m.overridingSymbol(sym);
+ val o = m.overridingSymbol(sym)
if ((o == NoSymbol)) {
- val parent = decode(m.enclClass);
- val mo = Member(m);
+ val parent = decode(m.enclClass)
+ val mo = Member(m)
if (!mo.isEmpty) {
this(parent) = mo.get :: this(parent);
}
@@ -265,7 +265,7 @@ trait ModelExtractor {
protected def resultType0 : Type;
override def overridden : Iterable[Symbol] = {
var ret : jcl.LinkedHashSet[Symbol] = null;
- for (val parent <- ClassOrObject.this.parents) {
+ for (parent <- ClassOrObject.this.parents) {
val sym0 = sym.overriddenSymbol(parent.symbol);
if (sym0 != NoSymbol) {
if (ret == null) ret = new jcl.LinkedHashSet[Symbol];
@@ -282,17 +282,17 @@ trait ModelExtractor {
case Some(argss) if argss.length > 1 || (!argss.isEmpty && !argss(0).isEmpty) =>
argss.map(.map(Param));
case _ =>
- var i = 0;
- val ret = for (val tpe <- sym.tpe.paramTypes) yield {
+ var i = 0
+ val ret = for (tpe <- sym.tpe.paramTypes) yield {
val ret = sym.newValueParameter(sym.pos, newTermName("arg" + i));
ret.setInfo(tpe);
- i = i + 1;
- Param(ret);
+ i += 1
+ Param(ret)
}
- if (ret.isEmpty) Nil;
- else ret :: Nil;
+ if (ret.isEmpty) Nil
+ else ret :: Nil
}
- override def kind = "def";
+ override def kind = "def"
}
case class Val(override val sym : TermSymbol) extends ValDef(sym) {
def resultType0 : Type = sym.tpe;
@@ -320,19 +320,19 @@ trait ModelExtractor {
case class NestedObject(override val sym : ModuleSymbol) extends NestedClassOrObject(sym) with Object;
def isVisible(sym : Symbol) : Boolean = {
import symtab.Flags._;
- if (sym.isLocalClass) return false;
- if (sym.isLocal) return false;
- if (sym.isPrivateLocal) return false;
- if (sym.hasFlag(PRIVATE)) return false;
- if (sym.hasFlag(SYNTHETIC)) return false;
- if (sym.hasFlag(BRIDGE)) return false;
- if (sym.nameString.indexOf("$") != -1) return false;
- if (sym.hasFlag(CASE) && sym.isMethod) return false;
- return true;
+ if (sym.isLocalClass) return false
+ if (sym.isLocal) return false
+ if (sym.isPrivateLocal) return false
+ if (sym.hasFlag(PRIVATE)) return false
+ if (sym.hasFlag(SYNTHETIC)) return false
+ if (sym.hasFlag(BRIDGE)) return false
+ if (sym.nameString.indexOf("$") != -1) return false
+ if (sym.hasFlag(CASE) && sym.isMethod) return false
+ return true
}
- def Member(sym : Symbol) : Option[Member] = {
- import global._;
- import symtab.Flags;
+ def Member(sym: Symbol): Option[Member] = {
+ import global._
+ import symtab.Flags
if (!isVisible(sym)) return None;
if (sym.hasFlag(Flags.ACCESSOR)) {
if (sym.isSetter) return None;
@@ -343,8 +343,8 @@ trait ModelExtractor {
Console.println("SYM: " + sym + " " + sym.fullNameString('.'));
Console.println("FLA: " + Flags.flagsToString(sym.flags));
}
- assert(sym.hasFlag(Flags.JAVA));
- return Some[Member](new Val(sym.asInstanceOf[TermSymbol]));
+ assert(sym.hasFlag(Flags.JAVA))
+ return Some[Member](new Val(sym.asInstanceOf[TermSymbol]))
}
if (sym.isValue && !sym.isModule) {
val str = Flags.flagsToString(sym.flags);
@@ -359,8 +359,8 @@ trait ModelExtractor {
}
case class Category(label : String)(g : Symbol => Boolean) {
- val f = g;
- def plural = label + "s";
+ val f = g
+ def plural = label + "s"
}
val Constructors = new Category("Additional Constructor")(e => e.isConstructor && !e.isPrimaryConstructor) {
// override def plural = "Additional Constructors";
@@ -410,6 +410,6 @@ trait ModelExtractor {
}
}});
set addAll entities;
- set;
+ set
}
}
diff --git a/src/compiler/scala/tools/nsc/doc/ModelFrames.scala b/src/compiler/scala/tools/nsc/doc/ModelFrames.scala
index cd20387f00..256e910fa4 100644
--- a/src/compiler/scala/tools/nsc/doc/ModelFrames.scala
+++ b/src/compiler/scala/tools/nsc/doc/ModelFrames.scala
@@ -17,29 +17,27 @@ import scala.xml._
* @author Sean McDirmid, Stephane Micheloud
*/
trait ModelFrames extends ModelExtractor {
- import DocUtil._;
- def outdir: String;
- def windowTitle: String;
- def documentTitle: String;
- def contentFrame = "contentFrame";
- def classesFrame = "classesFrame";
- def modulesFrame = "modulesFrame";
- protected val FILE_EXTENSION_HTML = ".html";
- protected val NAME_SUFFIX_OBJECT = "$object";
- protected val NAME_SUFFIX_PACKAGE = "$package";
+ import DocUtil._
+ def outdir: String
+ def windowTitle: String
+ def documentTitle: String
+ def contentFrame = "contentFrame"
+ def classesFrame = "classesFrame"
+ def modulesFrame = "modulesFrame"
+ protected val FILE_EXTENSION_HTML = ".html"
+ protected val NAME_SUFFIX_OBJECT = "$object"
+ protected val NAME_SUFFIX_PACKAGE = "$package"
def rootTitle = <div class="page-title"> Scala 2<br/>API Specification</div>;
def rootDesc = <p>This document is the API specification for Scala 2.</p>;
- final def hasLink(sym: global.Symbol) : Boolean = {
- if (sym == global.NoSymbol) false;
- else if (hasLink0(sym)) true;
- else {
- hasLink(decode(sym.owner));
- }
- }
- def hasLink0(sym: global.Symbol): Boolean = true;
+ final def hasLink(sym: global.Symbol): Boolean =
+ if (sym == global.NoSymbol) false
+ else if (hasLink0(sym)) true
+ else hasLink(decode(sym.owner))
+
+ def hasLink0(sym: global.Symbol): Boolean = true
abstract class Frame extends UrlContext {
{ // just save.
@@ -310,7 +308,7 @@ trait ModelFrames extends ModelExtractor {
import java.io._
// The name of a resource is a '/'-separated path name that identifies the resource.
val rsrcdir = "scala/tools/nsc/doc/"
- for (val base <- List("style.css", "script.js")) {
+ for (base <- List("style.css", "script.js")) {
try {
val in = loader.getResourceAsStream(rsrcdir + base)
val out = new FileOutputStream(new File(outdir + File.separator + base))
diff --git a/src/compiler/scala/tools/nsc/doc/ModelToXML.scala b/src/compiler/scala/tools/nsc/doc/ModelToXML.scala
index cc4b7848b2..7e3e36cfa0 100644
--- a/src/compiler/scala/tools/nsc/doc/ModelToXML.scala
+++ b/src/compiler/scala/tools/nsc/doc/ModelToXML.scala
@@ -159,9 +159,9 @@ trait ModelToXML extends ModelExtractor {
<div>{xs.mkXML("","\n","")(m => longHeader(m))}</div>);
}
- def shortList(entity : ClassOrObject, category : Category)(implicit from : Frame) : NodeSeq = {
- val xs = entity.members(category);
- var seq : NodeSeq = NodeSeq.Empty;
+ def shortList(entity: ClassOrObject, category: Category)(implicit from: Frame) : NodeSeq = {
+ val xs = entity.members(category)
+ var seq : NodeSeq = NodeSeq.Empty
if (xs.elements.hasNext) {
// alphabetic
val set = new scala.collection.jcl.TreeSet[entity.Member]()(mA => new Ordered[entity.Member] {
@@ -171,7 +171,7 @@ trait ModelToXML extends ModelExtractor {
if (diff != 0) return diff;
val diff0 = mA.hashCode - mB.hashCode;
assert(diff0 != 0);
- return diff0;
+ return diff0
}
});
set addAll xs;
@@ -181,7 +181,7 @@ trait ModelToXML extends ModelExtractor {
</table>
}
// list inherited members...if any.
- for (val (tpe,members) <- entity.inherited) {
+ for ((tpe,members) <- entity.inherited) {
val members0 = members.filter(m => category.f(m.sym));
if (!members0.isEmpty) seq = seq ++ <table cellpadding="3" class="inherited" summary="">
<tr><td colspan="2" class="title">
@@ -200,8 +200,8 @@ trait ModelToXML extends ModelExtractor {
seq;
}
- protected def decodeOption(tag : String, string : String) : NodeSeq = <code>{Text(string + " - ")}</code>;
- protected def decodeTag(tag : String) : String =
+ protected def decodeOption(tag: String, string: String): NodeSeq = <code>{Text(string + " - ")}</code>;
+ protected def decodeTag(tag: String): String =
"" + Character.toUpperCase(tag.charAt(0)) + tag.substring(1);
def shortHeader(entity : Entity)(implicit from : Frame) : NodeSeq = {
@@ -227,7 +227,7 @@ trait ModelToXML extends ModelExtractor {
if (!args.isEmpty)
buf.append(args.map(.escapedStringValue).mkString("(", ",", ")"))
if (!nvPairs.isEmpty)
- for (val ((name, value), index) <- nvPairs.zipWithIndex) {
+ for (((name, value), index) <- nvPairs.zipWithIndex) {
if (index > 0)
buf.append(", ")
buf.append(name).append(" = ").append(value)
@@ -237,7 +237,7 @@ trait ModelToXML extends ModelExtractor {
if (entity.sym.hasFlag(symtab.Flags.CASE)) NodeSeq.Empty;
else {
val sep = Text("@")
- for (val attr <- entity.attributes)
+ for (attr <- entity.attributes)
yield Group({(sep ++ attrFor(attr) ++ <br/>)})
}
}
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
index 621edb305c..931060a594 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2006 LAMP/EPFL
+ * Copyright 2005-2007 LAMP/EPFL
* @author Martin Odersky
*/
// $Id$
@@ -13,7 +13,7 @@ import scala.tools.nsc.io.AbstractFile
import scala.tools.nsc.util.{ClassPath, NameTransformer, Position, NoPosition}
import classfile.{ClassfileParser, SymblfileParser}
import Flags._
-import ch.epfl.lamp.compiler.msil.{Type => MSILType, Attribute => MSILAttribute};
+import ch.epfl.lamp.compiler.msil.{Type => MSILType, Attribute => MSILAttribute}
/** This class ...
*
@@ -142,8 +142,8 @@ abstract class SymbolLoaders {
val classes = new HashMap[String, global.classPath0.Context]
val packages = new HashMap[String, global.classPath0.Context]
- for (val dir <- directory.entries) if (dir.location ne null) {
- for (val file <- dir.location) {
+ for (dir <- directory.entries) if (dir.location ne null) {
+ for (file <- dir.location) {
if (file.isDirectory && directory.validPackage(file.name) && !packages.isDefinedAt(file.name))
packages(file.name) = directory.find(file.name, true);
else if (!global.forMSIL && !file.isDirectory && file.name.endsWith(".class")) {
@@ -155,8 +155,8 @@ abstract class SymbolLoaders {
}
}
}
- for (val dir <- directory.entries) if (dir.source ne null) {
- for (val file <- dir.source.location) {
+ for (dir <- directory.entries) if (dir.source ne null) {
+ for (file <- dir.source.location) {
if (file.isDirectory && directory.validPackage(file.name) && !packages.isDefinedAt(file.name))
packages(file.name) = directory.find(file.name, true)
else if (dir.source.compile && !file.isDirectory && file.name.endsWith(".scala")) {
@@ -174,7 +174,7 @@ abstract class SymbolLoaders {
}
// do classes first
- for (val (name, file) <- classes.elements) {
+ for ((name, file) <- classes.elements) {
val loader = if (!file.isSourceFile) {
new ClassfileLoader(file.classFile, file.sourceFile, file.sourcePath)
} else {
@@ -184,7 +184,7 @@ abstract class SymbolLoaders {
enterClassAndModule(name, loader)
}
- for (val (name, file) <- packages.elements)
+ for ((name, file) <- packages.elements)
enterPackage(name, newPackageLoader(file))
}
}
@@ -215,44 +215,44 @@ abstract class SymbolLoaders {
!types.contains(name)
}
- override protected def doComplete(root: Symbol): Unit = {
- clrTypes.collectMembers(root, types, namespaces);
+ override protected def doComplete(root: Symbol) {
+ clrTypes.collectMembers(root, types, namespaces)
- super.doComplete(root);
+ super.doComplete(root)
- for (val namespace <- namespaces.elements) {
- val oldPkg = root.info.decls.lookup(newTermName(namespace));
+ for (namespace <- namespaces.elements) {
+ val oldPkg = root.info.decls.lookup(newTermName(namespace))
if (oldPkg == NoSymbol)
- enterPackage(namespace, new NamespaceLoader(new classPath0.Context(List())));
- //else System.out.println("PackageLoader: package already in scope: " + oldPkg.fullNameString);
+ enterPackage(namespace, new NamespaceLoader(new classPath0.Context(List())))
+ //else System.out.println("PackageLoader: package already in scope: " + oldPkg.fullNameString)
}
// import the CLR types contained in the package (namespace)
- for (val (name, typ) <- types.elements) {
+ for ((name, typ) <- types.elements) {
assert(namespace == typ.Namespace, typ.FullName);
if (typ.IsDefined(clrTypes.SCALA_SYMTAB_ATTR, false)) {
- val attrs = typ.GetCustomAttributes(clrTypes.SCALA_SYMTAB_ATTR, false);
- assert (attrs.length == 1, attrs.length);
- val a = attrs(0).asInstanceOf[MSILAttribute];
+ val attrs = typ.GetCustomAttributes(clrTypes.SCALA_SYMTAB_ATTR, false)
+ assert (attrs.length == 1, attrs.length)
+ val a = attrs(0).asInstanceOf[MSILAttribute]
if (a.getConstructor() == clrTypes.SYMTAB_CONSTR)
- enterClassAndModule(name, new MSILTypeLoader(typ));
+ enterClassAndModule(name, new MSILTypeLoader(typ))
}
else
- enterClassAndModule(name, new MSILTypeLoader(typ));
+ enterClassAndModule(name, new MSILTypeLoader(typ))
}
}
} // NamespaceLoader
class MSILTypeLoader(typ: MSILType) extends SymbolLoader {
private object typeParser extends clr.TypeParser {
- val global: SymbolLoaders.this.global.type = SymbolLoaders.this.global;
+ val global: SymbolLoaders.this.global.type = SymbolLoaders.this.global
}
- protected def doComplete(root: Symbol): Unit = {
- typeParser.parse(typ, root);
+ protected def doComplete(root: Symbol) {
+ typeParser.parse(typ, root)
}
- protected def kindString: String = typ.FullName;
- protected def sourceString = typ.Assembly.FullName;
+ protected def kindString: String = typ.FullName
+ protected def sourceString = typ.Assembly.FullName
}
class ClassfileLoader(classFile: AbstractFile, override val sourceFile: AbstractFile, sourcePath0: AbstractFile) extends SymbolLoader {
@@ -286,13 +286,13 @@ abstract class SymbolLoaders {
object moduleClassLoader extends SymbolLoader {
protected def doComplete(root: Symbol): unit = root.sourceModule.initialize
- protected def kindString: String = "";
- protected def sourceString = "";
+ protected def kindString: String = ""
+ protected def sourceString = ""
}
object clrTypes extends clr.CLRTypes {
- val global: SymbolLoaders.this.global.type = SymbolLoaders.this.global;
- if (global.forMSIL) init();
+ val global: SymbolLoaders.this.global.type = SymbolLoaders.this.global
+ if (global.forMSIL) init()
}
}
diff --git a/src/compiler/scala/tools/nsc/symtab/Types.scala b/src/compiler/scala/tools/nsc/symtab/Types.scala
index 78b9f488f9..eb8b240378 100644
--- a/src/compiler/scala/tools/nsc/symtab/Types.scala
+++ b/src/compiler/scala/tools/nsc/symtab/Types.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2006 LAMP/EPFL
+ * Copyright 2005-2007 LAMP/EPFL
* @author Martin Odersky
*/
// $Id$
@@ -723,11 +723,11 @@ trait Types {
val pclosure = new Array[Array[Type]](nparents)
val index = new Array[int](nparents)
var i = 0
- for (val p <- parents) {
+ for (p <- parents) {
pclosure(i) = if (p.closure eq null) AnyClass.info.closure // cyclic reference
else p.closure
index(i) = 0
- i = i + 1
+ i += 1
}
def nextBaseType(i: int): Type = {
val j = index(i)
@@ -741,7 +741,7 @@ trait Types {
while (i < nparents) {
if (nextBaseType(i).symbol isLess minSym)
minSym = nextBaseType(i).symbol
- i = i + 1
+ i += 1
}
var minTypes: List[Type] = List()
i = 0
@@ -959,12 +959,12 @@ trait Types {
*/
private def computeRefs() {
refs = Array(Map(), Map())
- for (val tparam <- symbol.typeParams) {
+ for (tparam <- symbol.typeParams) {
val enterRefs = new TypeMap {
def apply(tp: Type): Type = {
tp match {
case TypeRef(_, sym, args) =>
- for (val (tparam1, arg) <- sym.info.typeParams zip args)
+ for ((tparam1, arg) <- sym.info.typeParams zip args)
if (arg contains tparam) {
addRef(NonExpansive, tparam, tparam1)
if (arg.symbol != tparam) addRef(Expansive, tparam, tparam1)
@@ -974,7 +974,7 @@ trait Types {
mapOver(tp)
}
}
- for (val p <- parents) enterRefs(p)
+ for (p <- parents) enterRefs(p)
}
state = Initializing
}
@@ -989,16 +989,16 @@ trait Types {
val lastRefs = Array(refs(0), refs(1))
state = Initialized
var change = false
- for (val (from, targets) <- refs(NonExpansive).elements)
- for (val target <- targets) {
+ for ((from, targets) <- refs(NonExpansive).elements)
+ for (target <- targets) {
var thatInfo = classInfo(target)
if (thatInfo.state != Initialized)
change = change | thatInfo.propagate()
addRefs(NonExpansive, from, thatInfo.getRefs(NonExpansive, target))
addRefs(Expansive, from, thatInfo.getRefs(Expansive, target))
}
- for (val (from, targets) <- refs(Expansive).elements)
- for (val target <- targets) {
+ for ((from, targets) <- refs(Expansive).elements)
+ for (target <- targets) {
var thatInfo = classInfo(target)
addRefs(Expansive, from, thatInfo.getRefs(NonExpansive, target))
}
@@ -1495,11 +1495,11 @@ A type's symbol should never be inspected directly.
else {
val result = refinedType(parents, original.symbol.owner)
val syms1 = decls.toList
- for (val sym <- syms1)
+ for (sym <- syms1)
result.decls.enter(sym.cloneSymbol(result.symbol))
val syms2 = result.decls.toList
val resultThis = result.symbol.thisType
- for (val sym <- syms2)
+ for (sym <- syms2)
sym.setInfo(sym.info.substSym(syms1, syms2).substThis(original.symbol, resultThis))
result
}
@@ -2135,14 +2135,14 @@ A type's symbol should never be inspected directly.
/** The maximum depth of all types in the closures of each of the types `tps' */
final def maxClosureDepth(tps: Seq[Type]): int = {
var d = 0
- for (val tp <- tps) d = max(d, tp.closureDepth)
+ for (tp <- tps) d = max(d, tp.closureDepth)
d
}
/** The maximum depth of all types `tps' */
final def maxDepth(tps: Seq[Type]): int = {
var d = 0
- for (val tp <- tps) d = max(d, maxDepth(tp))
+ for (tp <- tps) d = max(d, maxDepth(tp))
d
}
@@ -2558,7 +2558,7 @@ A type's symbol should never be inspected directly.
val glbs = glbList(tss1, depth)
val result = new Array[Type](glbs.length)
var i = 0
- for (val x <- glbs.elements) { result(i) = x; i = i + 1; }
+ for (x <- glbs.elements) { result(i) = x; i += 1 }
result
// Array(glbs: _*);
}
@@ -2688,7 +2688,7 @@ A type's symbol should never be inspected directly.
// efficiency.
alt != sym && !specializesSym(lubThisType, sym, tp, alt)))
}
- for (val sym <- lubBase.nonPrivateMembers)
+ for (sym <- lubBase.nonPrivateMembers)
// add a refinement symbol for all non-class members of lubBase
// which are refined by every type in ts.
if (!sym.isClass && !sym.isConstructor && (narrowts forall (t => refines(t, sym))))
@@ -2769,13 +2769,13 @@ A type's symbol should never be inspected directly.
if (symbounds.isEmpty)
mkTypeBounds(AllClass.tpe, AnyClass.tpe)
else glbBounds(symbounds)
- for (val t <- symtypes; !isTypeBound(t))
+ for (t <- symtypes if !isTypeBound(t))
if (result.bounds containsType t) result = t
else throw new MalformedClosure(symtypes);
result
})
}
- for (val t <- ts; val sym <- t.nonPrivateMembers)
+ for (t <- ts; val sym <- t.nonPrivateMembers)
if (!sym.isClass && !sym.isConstructor && !(glbThisType specializes sym))
try {
addMember(glbThisType, glbType, glbsym(sym))
@@ -2873,7 +2873,7 @@ A type's symbol should never be inspected directly.
if (settings.debug.value) log("add member " + sym)//debug
if (!(thistp specializes sym)) {
if (sym.isTerm)
- for (val alt <- tp.nonPrivateDecl(sym.name).alternatives)
+ for (alt <- tp.nonPrivateDecl(sym.name).alternatives)
if (specializesSym(thistp, sym, thistp, alt))
tp.decls unlink alt;
tp.decls enter sym
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index 866c4c492f..92b54344bf 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2006 LAMP/EPFL
+ * Copyright 2005-2007 LAMP/EPFL
* @author Martin Odersky
*/
// $Id$
@@ -16,13 +16,14 @@
*/
package scala.tools.nsc.symtab.classfile
-import scala.tools.nsc.util.{Position,NoPosition}
-import scala.tools.nsc.io.AbstractFile
-import scala.collection.mutable.{ListBuffer, ArrayBuffer}
+import java.io.IOException
+import java.lang.Integer.toHexString
+
import scala.collection.immutable.{Map, ListMap}
+import scala.collection.mutable.{ListBuffer, ArrayBuffer}
+import scala.tools.nsc.io.AbstractFile
+import scala.tools.nsc.util.{Position,NoPosition}
-import java.lang.Integer.toHexString
-import java.io.IOException
/** This abstract class implements a class file parser.
*
@@ -121,7 +122,7 @@ abstract class ClassfileParser {
{ var i = 1
while (i < starts.length) {
starts(i) = in.bp
- i = i + 1
+ i += 1
in.nextByte match {
case CONSTANT_UTF8 | CONSTANT_UNICODE =>
in.skip(in.nextChar)
@@ -132,7 +133,7 @@ abstract class ClassfileParser {
in.skip(4)
case CONSTANT_LONG | CONSTANT_DOUBLE =>
in.skip(8)
- i = i + 1
+ i += 1
case _ =>
errorBadTag(in.bp - 1)
}
@@ -370,7 +371,7 @@ abstract class ClassfileParser {
val superType = if (isAnnotation) { in.nextChar; definitions.AnnotationClass.tpe }
else pool.getSuperClass(in.nextChar).tpe
val ifaceCount = in.nextChar
- var ifaces = for (val i <- List.range(0, ifaceCount)) yield pool.getSuperClass(in.nextChar).tpe
+ var ifaces = for (i <- List.range(0, ifaceCount)) yield pool.getSuperClass(in.nextChar).tpe
if (isAnnotation) ifaces = definitions.ClassfileAnnotationClass.tpe :: ifaces
val parents = superType :: ifaces
@@ -395,10 +396,10 @@ abstract class ClassfileParser {
staticModule.moduleClass.setFlag(JAVA)
in.bp = curbp
val fieldCount = in.nextChar
- for (val i <- 0 until fieldCount) parseField()
+ for (i <- 0 until fieldCount) parseField()
sawPrivateConstructor = false
val methodCount = in.nextChar
- for (val i <- 0 until methodCount) parseMethod()
+ for (i <- 0 until methodCount) parseMethod()
if (!sawPrivateConstructor &&
(instanceDefs.lookup(nme.CONSTRUCTOR) == NoSymbol &&
(sflags & INTERFACE) == 0))
@@ -424,7 +425,7 @@ abstract class ClassfileParser {
}
}
- def parseField(): unit = {
+ def parseField() {
val jflags = in.nextChar
var sflags = transFlags(jflags)
if ((sflags & FINAL) == 0) sflags = sflags | MUTABLE
@@ -442,7 +443,7 @@ abstract class ClassfileParser {
}
}
- def parseMethod(): unit = {
+ def parseMethod() {
val jflags = in.nextChar
var sflags = transFlags(jflags)
if ((jflags & JAVA_ACC_PRIVATE) != 0 && !global.settings.XbytecodeRead.value) {
@@ -500,7 +501,7 @@ abstract class ClassfileParser {
while (sig(index) != '>') {
sig(index) match {
case variance @ ('+' | '-' | '*') =>
- index = index + 1
+ index += 1
val bounds = variance match {
case '+' => mkTypeBounds(definitions.AllRefClass.typeConstructor,
sig2type(tparams, covariant))
@@ -526,7 +527,7 @@ abstract class ClassfileParser {
xs.toList
}
def sig2type(tparams: Map[Name,Symbol], covariant: Boolean): Type = {
- val tag = sig(index); index = index + 1
+ val tag = sig(index); index += 1
tag match {
case BYTE_TAG => definitions.ByteClass.tpe
case CHAR_TAG => definitions.CharClass.tpe
@@ -541,40 +542,40 @@ abstract class ClassfileParser {
var tpe = definitions.getClass(subName(c => ((c == ';') || (c == '<')))).tpe
if (sig(index) == '<')
tpe = appliedType(tpe, typeParams(tparams, covariant))
- index = index + 1
+ index += 1
tpe
case ARRAY_TAG =>
- while ('0' <= sig(index) && sig(index) <= '9') index = index + 1
+ while ('0' <= sig(index) && sig(index) <= '9') index += 1
appliedType(definitions.ArrayClass.tpe, List(sig2type(tparams, covariant)))
case '(' =>
val paramtypes = new ListBuffer[Type]()
while (sig(index) != ')') {
paramtypes += objToAny(sig2type(tparams, false))
}
- index = index + 1
+ index += 1
val restype = if (sym.isConstructor) {
assert(sig(index) == 'V')
- index = index + 1
+ index += 1
clazz.tpe
} else
sig2type(tparams, true)
MethodType(paramtypes.toList, restype)
case 'T' =>
val n = subName(';'.==).toTypeName
- index = index + 1
+ index += 1
tparams(n).typeConstructor
}
}
var tparams = classTParams
if (sig(index) == '<') {
- index = index + 1
+ index += 1
while (sig(index) != '>') {
val tpname = subName(':'.==).toTypeName
val s = sym.newTypeParameter(NoPosition, tpname)
tparams = tparams + tpname -> s
val ts = new ListBuffer[Type]
while (sig(index) == ':') {
- index = index + 1
+ index += 1
if (sig(index) != ':') // guard against empty class bound
ts += sig2type(tparams, false)
}
@@ -582,7 +583,7 @@ abstract class ClassfileParser {
intersectionType(ts.toList, sym)))
newTParams += s
}
- index = index + 1
+ index += 1
}
val tpe =
if (sym.isClass) {
@@ -599,14 +600,14 @@ abstract class ClassfileParser {
else PolyType(newTParams.toList, tpe)
}
- def parseAttributes(sym: Symbol, symtype: Type): unit = {
+ def parseAttributes(sym: Symbol, symtype: Type) {
def convertTo(c: Constant, pt: Type): Constant = {
if (pt.symbol == definitions.BooleanClass && c.tag == IntTag)
Constant(c.value != 0)
else
c convertTo pt
}
- def parseAttribute(): unit = {
+ def parseAttribute() {
val attrName = pool.getName(in.nextChar)
val attrLen = in.nextInt
val oldpb = in.bp
@@ -690,21 +691,21 @@ abstract class ClassfileParser {
Constant(s)
case ARRAY_TAG =>
val arr = new ArrayBuffer[Constant]()
- for (val i <- 0 until index) {
+ for (i <- 0 until index) {
arr += parseTaggedConstant
}
new ArrayConstant(arr.toArray,
appliedType(definitions.ArrayClass.typeConstructor, List(arr(0).tpe)))
}
}
- def parseAnnotations(len: Int): Unit = {
+ def parseAnnotations(len: Int) {
val nAttr = in.nextChar
- for (val n <- 0 until nAttr) {
+ for (n <- 0 until nAttr) {
val attrNameIndex = in.nextChar
val attrType = pool.getType(attrNameIndex)
val nargs = in.nextChar
val nvpairs = new ListBuffer[(Name,Constant)]
- for (val i <- 0 until nargs) {
+ for (i <- 0 until nargs) {
val name = pool.getName(in.nextChar)
nvpairs += (name, parseTaggedConstant)
}
@@ -712,8 +713,8 @@ abstract class ClassfileParser {
}
}
- def parseInnerClasses(): unit = {
- for (val i <- 0 until in.nextChar) {
+ def parseInnerClasses() {
+ for (i <- 0 until in.nextChar) {
val innerIndex = in.nextChar
val outerIndex = in.nextChar
val nameIndex = in.nextChar
@@ -735,19 +736,19 @@ abstract class ClassfileParser {
}
}
val attrCount = in.nextChar
- for (val i <- 0 until attrCount) parseAttribute()
+ for (i <- 0 until attrCount) parseAttribute()
}
- def skipAttributes(): unit = {
+ def skipAttributes() {
val attrCount = in.nextChar
- for (val i <- 0 until attrCount) {
+ for (i <- 0 until attrCount) {
in.skip(2); in.skip(in.nextInt)
}
}
- def skipMembers(): unit = {
+ def skipMembers() {
val memberCount = in.nextChar
- for (val i <- 0 until memberCount) {
+ for (i <- 0 until memberCount) {
in.skip(6); skipAttributes()
}
}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
index 7e7736ffd4..59e3ea016e 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2006 LAMP/EPFL
+ * Copyright 2005-2007 LAMP/EPFL
* @author Iulian Dragos
*/
// $Id$
@@ -53,7 +53,7 @@ abstract class ICodeReader extends ClassfileParser {
sym = cls.linkedClassOfModule
assert(classFile ne null, "No classfile for " + cls)
-// for (val s <- cls.info.members)
+// for (s <- cls.info.members)
// Console.println("" + s + ": " + s.tpe)
this.instanceCode = new IClass(sym)
this.staticCode = new IClass(sym.linkedClassOfClass)
@@ -86,9 +86,9 @@ abstract class ICodeReader extends ClassfileParser {
in.skip(2) // super class
in.skip(2 * in.nextChar) // interfaces
val fieldCount = in.nextChar
- for (val i <- 0 until fieldCount) parseField();
+ for (i <- 0 until fieldCount) parseField()
val methodCount = in.nextChar
- for (val i <- 0 until methodCount) parseMethod();
+ for (i <- 0 until methodCount) parseMethod();
instanceCode.methods = instanceCode.methods.reverse
staticCode.methods = staticCode.methods.reverse
}
@@ -122,17 +122,16 @@ abstract class ICodeReader extends ClassfileParser {
}
}
- override def parseMethod(): Unit = {
- val (jflags, sym) = parseMember();
+ override def parseMethod() {
+ val (jflags, sym) = parseMember()
if (sym != NoSymbol) {
Console.println("Parsing method " + sym.fullNameString + ": " + sym.tpe);
this.method = new IMethod(sym);
- getCode(jflags).addMethod(this.method);
+ getCode(jflags).addMethod(this.method)
if ((jflags & JAVA_ACC_NATIVE) != 0)
this.method.native = true
- val attributeCount = in.nextChar;
- for (val i <- 0 until attributeCount)
- parseAttribute();
+ val attributeCount = in.nextChar
+ for (i <- 0 until attributeCount) parseAttribute()
} else {
if (settings.debug.value) log("Skipping non-existent method.");
skipAttributes();
@@ -191,7 +190,7 @@ abstract class ICodeReader extends ClassfileParser {
/** Parse 32 bit jump target. */
def parseJumpTargetW = {
- size = size + 4
+ size += 4
val offset = in.nextInt
val target = pc + offset
assert(target >= 0 && target < codeLength, "Illegal jump target: " + target + "pc: " + pc + " offset: " + offset)
@@ -218,17 +217,17 @@ abstract class ICodeReader extends ClassfileParser {
case JVM.dconst_0 => code.emit(CONSTANT(Constant(0.0)))
case JVM.dconst_1 => code.emit(CONSTANT(Constant(1.0)))
- case JVM.bipush => code.emit(CONSTANT(Constant(in.nextByte))); size = size + 1;
- case JVM.sipush => code.emit(CONSTANT(Constant(in.nextChar))); size = size + 2;
- case JVM.ldc => code.emit(CONSTANT(pool.getConstant(toUnsignedByte(in.nextByte)))); size = size + 1;
- case JVM.ldc_w => code.emit(CONSTANT(pool.getConstant(in.nextChar))); size = size + 2;
- case JVM.ldc2_w => code.emit(CONSTANT(pool.getConstant(in.nextChar))); size = size + 2;
- case JVM.iload => code.emit(LOAD_LOCAL(code.getLocal(in.nextByte, INT))); size = size + 1;
- case JVM.lload => code.emit(LOAD_LOCAL(code.getLocal(in.nextByte, LONG))); size = size + 1;
- case JVM.fload => code.emit(LOAD_LOCAL(code.getLocal(in.nextByte, FLOAT))); size = size + 1;
- case JVM.dload => code.emit(LOAD_LOCAL(code.getLocal(in.nextByte, DOUBLE))); size = size + 1;
+ case JVM.bipush => code.emit(CONSTANT(Constant(in.nextByte))); size += 1
+ case JVM.sipush => code.emit(CONSTANT(Constant(in.nextChar))); size += 2
+ case JVM.ldc => code.emit(CONSTANT(pool.getConstant(toUnsignedByte(in.nextByte)))); size += 1
+ case JVM.ldc_w => code.emit(CONSTANT(pool.getConstant(in.nextChar))); size += 2
+ case JVM.ldc2_w => code.emit(CONSTANT(pool.getConstant(in.nextChar))); size += 2
+ case JVM.iload => code.emit(LOAD_LOCAL(code.getLocal(in.nextByte, INT))); size += 1
+ case JVM.lload => code.emit(LOAD_LOCAL(code.getLocal(in.nextByte, LONG))); size += 1
+ case JVM.fload => code.emit(LOAD_LOCAL(code.getLocal(in.nextByte, FLOAT))); size += 1
+ case JVM.dload => code.emit(LOAD_LOCAL(code.getLocal(in.nextByte, DOUBLE))); size += 1
case JVM.aload =>
- val local = in.nextByte; size = size + 1;
+ val local = in.nextByte; size += 1
if (local == 0 && !method.isStatic)
code.emit(THIS(method.symbol.owner));
else
@@ -268,11 +267,11 @@ abstract class ICodeReader extends ClassfileParser {
case JVM.caload => code.emit(LOAD_ARRAY_ITEM(CHAR))
case JVM.saload => code.emit(LOAD_ARRAY_ITEM(SHORT))
- case JVM.istore => code.emit(STORE_LOCAL(code.getLocal(in.nextByte, INT))); size = size + 1;
- case JVM.lstore => code.emit(STORE_LOCAL(code.getLocal(in.nextByte, LONG))); size = size + 1;
- case JVM.fstore => code.emit(STORE_LOCAL(code.getLocal(in.nextByte, FLOAT))); size = size + 1;
- case JVM.dstore => code.emit(STORE_LOCAL(code.getLocal(in.nextByte, DOUBLE))); size = size + 1;
- case JVM.astore => code.emit(STORE_LOCAL(code.getLocal(in.nextByte, OBJECT))); size = size + 1;
+ case JVM.istore => code.emit(STORE_LOCAL(code.getLocal(in.nextByte, INT))); size += 1
+ case JVM.lstore => code.emit(STORE_LOCAL(code.getLocal(in.nextByte, LONG))); size += 1
+ case JVM.fstore => code.emit(STORE_LOCAL(code.getLocal(in.nextByte, FLOAT))); size += 1
+ case JVM.dstore => code.emit(STORE_LOCAL(code.getLocal(in.nextByte, DOUBLE))); size += 1
+ case JVM.astore => code.emit(STORE_LOCAL(code.getLocal(in.nextByte, OBJECT))); size += 1
case JVM.istore_0 => code.emit(STORE_LOCAL(code.getLocal(0, INT)))
case JVM.istore_1 => code.emit(STORE_LOCAL(code.getLocal(1, INT)))
case JVM.istore_2 => code.emit(STORE_LOCAL(code.getLocal(2, INT)))
@@ -302,14 +301,14 @@ abstract class ICodeReader extends ClassfileParser {
case JVM.castore => code.emit(STORE_ARRAY_ITEM(CHAR))
case JVM.sastore => code.emit(STORE_ARRAY_ITEM(SHORT))
- case JVM.pop => code.emit(DROP(INT)); // any 1-word type would do
- case JVM.pop2 => code.emit(DROP(LONG)); // any 2-word type would do
- case JVM.dup => code.emit(DUP(OBJECT)); // TODO: Is the kind inside DUP ever needed?
- case JVM.dup_x1 => code.emit(DUP_X1); // Predef.error("Unsupported JVM bytecode: dup_x1")
- case JVM.dup_x2 => code.emit(DUP_X2); // Predef.error("Unsupported JVM bytecode: dup_x2")
- case JVM.dup2 => code.emit(DUP(LONG)); // TODO: Is the kind inside DUP ever needed?
- case JVM.dup2_x1 => code.emit(DUP2_X1); // Predef.error("Unsupported JVM bytecode: dup2_x1")
- case JVM.dup2_x2 => code.emit(DUP2_X2); // Predef.error("Unsupported JVM bytecode: dup2_x2")
+ case JVM.pop => code.emit(DROP(INT)) // any 1-word type would do
+ case JVM.pop2 => code.emit(DROP(LONG)) // any 2-word type would do
+ case JVM.dup => code.emit(DUP(OBJECT)) // TODO: Is the kind inside DUP ever needed?
+ case JVM.dup_x1 => code.emit(DUP_X1) // Predef.error("Unsupported JVM bytecode: dup_x1")
+ case JVM.dup_x2 => code.emit(DUP_X2) // Predef.error("Unsupported JVM bytecode: dup_x2")
+ case JVM.dup2 => code.emit(DUP(LONG)) // TODO: Is the kind inside DUP ever needed?
+ case JVM.dup2_x1 => code.emit(DUP2_X1) // Predef.error("Unsupported JVM bytecode: dup2_x1")
+ case JVM.dup2_x2 => code.emit(DUP2_X2) // Predef.error("Unsupported JVM bytecode: dup2_x2")
case JVM.swap => Predef.error("Unsupported JVM bytecode: swap")
case JVM.iadd => code.emit(CALL_PRIMITIVE(Arithmetic(ADD, INT)))
@@ -351,7 +350,7 @@ abstract class ICodeReader extends ClassfileParser {
case JVM.ixor => code.emit(CALL_PRIMITIVE(Logical(XOR, INT)))
case JVM.lxor => code.emit(CALL_PRIMITIVE(Logical(XOR, LONG)))
case JVM.iinc =>
- size = size + 2
+ size += 2
val local = code.getLocal(in.nextByte, INT)
code.emit(LOAD_LOCAL(local))
code.emit(CONSTANT(Constant(in.nextByte)))
@@ -401,38 +400,38 @@ abstract class ICodeReader extends ClassfileParser {
case JVM.ret => Predef.error("Cannot handle jsr/ret")
case JVM.tableswitch =>
val padding = if ((pc + size) % 4 != 0) 4 - ((pc + size) % 4) else 0
- size = size + padding;
- in.bp = in.bp + padding
+ size += padding
+ in.bp += padding
assert((pc + size % 4) != 0)
/* var byte1 = in.nextByte; size = size + 1;
while (byte1 == 0) { byte1 = in.nextByte; size = size + 1; }
val default = byte1 << 24 | in.nextByte << 16 | in.nextByte << 8 | in.nextByte;
size = size + 3
*/
- val default = pc + in.nextInt; size = size + 4
+ val default = pc + in.nextInt; size += 4
val low = in.nextInt
val high = in.nextInt
- size = size + 8
+ size += 8
assert(low <= high, "Value low not <= high for tableswitch.")
val tags = List.tabulate(high - low + 1, n => List(low + n))
- val targets = for (val _ <- tags) yield parseJumpTargetW
+ val targets = for (_ <- tags) yield parseJumpTargetW
code.emit(LSWITCH(tags, targets ::: List(default)))
case JVM.lookupswitch =>
val padding = if ((pc + size) % 4 != 0) 4 - ((pc + size) % 4) else 0
- size = size + padding;
- in.bp = in.bp + padding
+ size += padding
+ in.bp += padding
assert((pc + size % 4) != 0)
- val default = pc + in.nextInt; size = size + 4
- val npairs = in.nextInt; size = size + 4
+ val default = pc + in.nextInt; size += 4
+ val npairs = in.nextInt; size += 4
var tags: List[List[Int]] = Nil
var targets: List[Int] = Nil
var i = 0
while (i < npairs) {
- tags = List(in.nextInt) :: tags; size = size + 4;
+ tags = List(in.nextInt) :: tags; size += 4
targets = parseJumpTargetW :: targets; // parseJumpTargetW updates 'size' itself
- i = i + 1;
+ i += 1
}
targets = default :: targets
code.emit(LSWITCH(tags.reverse, targets.reverse))
@@ -445,40 +444,40 @@ abstract class ICodeReader extends ClassfileParser {
case JVM.return_ => code.emit(RETURN(UNIT))
case JVM.getstatic =>
- val field = pool.getMemberSymbol(in.nextChar, true); size = size + 2;
+ val field = pool.getMemberSymbol(in.nextChar, true); size += 2
if (field.hasFlag(Flags.MODULE))
code.emit(LOAD_MODULE(field))
else
code.emit(LOAD_FIELD(field, true))
case JVM.putstatic =>
- val field = pool.getMemberSymbol(in.nextChar, true); size = size + 2;
+ val field = pool.getMemberSymbol(in.nextChar, true); size += 2
code.emit(STORE_FIELD(field, true))
case JVM.getfield =>
- val field = pool.getMemberSymbol(in.nextChar, false); size = size + 2;
+ val field = pool.getMemberSymbol(in.nextChar, false); size += 2
code.emit(LOAD_FIELD(field, false))
case JVM.putfield =>
- val field = pool.getMemberSymbol(in.nextChar, false); size = size + 2;
+ val field = pool.getMemberSymbol(in.nextChar, false); size += 2
code.emit(STORE_FIELD(field, false))
case JVM.invokevirtual =>
- val m = pool.getMemberSymbol(in.nextChar, false); size = size + 2;
+ val m = pool.getMemberSymbol(in.nextChar, false); size += 2
code.emit(CALL_METHOD(m, Dynamic))
case JVM.invokeinterface =>
- val m = pool.getMemberSymbol(in.nextChar, false); size = size + 4;
+ val m = pool.getMemberSymbol(in.nextChar, false); size += 4
in.skip(2)
- code.emit(CALL_METHOD(m, Dynamic));
+ code.emit(CALL_METHOD(m, Dynamic))
case JVM.invokespecial =>
- val m = pool.getMemberSymbol(in.nextChar, false); size = size + 2;
+ val m = pool.getMemberSymbol(in.nextChar, false); size += 2
val style = if (m.name == nme.CONSTRUCTOR || m.hasFlag(Flags.PRIVATE)) Static(true)
else SuperCall(m.owner.name);
code.emit(CALL_METHOD(m, style))
case JVM.invokestatic =>
- val m = pool.getMemberSymbol(in.nextChar, true); size = size + 2;
+ val m = pool.getMemberSymbol(in.nextChar, true); size += 2
code.emit(CALL_METHOD(m, Static(false)))
case JVM.new_ =>
code.emit(NEW(REFERENCE(pool.getClassSymbol(in.nextChar))))
- size = size + 2
+ size += 2
case JVM.newarray =>
val kind = in.nextByte match {
case T_BOOLEAN => BOOL
@@ -490,35 +489,37 @@ abstract class ICodeReader extends ClassfileParser {
case T_INT => INT
case T_LONG => LONG
}
- size = size + 1
+ size += 1
code.emit(CREATE_ARRAY(kind))
case JVM.anewarray =>
- val tpe = pool.getClassOrArrayType(in.nextChar); size = size + 2;
+ val tpe = pool.getClassOrArrayType(in.nextChar); size += 2
code.emit(CREATE_ARRAY(toTypeKind(tpe)))
case JVM.arraylength => code.emit(CALL_PRIMITIVE(ArrayLength(OBJECT))); // the kind does not matter
case JVM.athrow => code.emit(THROW());
- case JVM.checkcast => code.emit(CHECK_CAST(toTypeKind(pool.getClassOrArrayType(in.nextChar)))); size = size + 2;
- case JVM.instanceof => code.emit(IS_INSTANCE(toTypeKind(pool.getClassOrArrayType(in.nextChar)))); size = size + 2;
- case JVM.monitorenter => code.emit(MONITOR_ENTER());
- case JVM.monitorexit => code.emit(MONITOR_EXIT());
+ case JVM.checkcast =>
+ code.emit(CHECK_CAST(toTypeKind(pool.getClassOrArrayType(in.nextChar)))); size += 2
+ case JVM.instanceof =>
+ code.emit(IS_INSTANCE(toTypeKind(pool.getClassOrArrayType(in.nextChar)))); size += 2
+ case JVM.monitorenter => code.emit(MONITOR_ENTER())
+ case JVM.monitorexit => code.emit(MONITOR_EXIT())
case JVM.wide =>
size = size + 1;
toUnsignedByte(in.nextByte) match {
- case JVM.iload => code.emit(LOAD_LOCAL(code.getLocal(in.nextChar, INT))); size = size + 2;
- case JVM.lload => code.emit(LOAD_LOCAL(code.getLocal(in.nextChar, LONG))); size = size + 2;
- case JVM.fload => code.emit(LOAD_LOCAL(code.getLocal(in.nextChar, FLOAT))); size = size + 2;
- case JVM.dload => code.emit(LOAD_LOCAL(code.getLocal(in.nextChar, DOUBLE))); size = size + 2;
- case JVM.aload => code.emit(LOAD_LOCAL(code.getLocal(in.nextChar, OBJECT))); size = size + 2;
- case JVM.istore => code.emit(STORE_LOCAL(code.getLocal(in.nextChar, INT))); size = size + 2;
- case JVM.lstore => code.emit(STORE_LOCAL(code.getLocal(in.nextChar, LONG))); size = size + 2;
- case JVM.fstore => code.emit(STORE_LOCAL(code.getLocal(in.nextChar, FLOAT))); size = size + 2;
- case JVM.dstore => code.emit(STORE_LOCAL(code.getLocal(in.nextChar, DOUBLE))); size = size + 2;
- case JVM.astore => code.emit(STORE_LOCAL(code.getLocal(in.nextChar, OBJECT))); size = size + 2;
+ case JVM.iload => code.emit(LOAD_LOCAL(code.getLocal(in.nextChar, INT))); size += 2
+ case JVM.lload => code.emit(LOAD_LOCAL(code.getLocal(in.nextChar, LONG))); size += 2
+ case JVM.fload => code.emit(LOAD_LOCAL(code.getLocal(in.nextChar, FLOAT))); size += 2
+ case JVM.dload => code.emit(LOAD_LOCAL(code.getLocal(in.nextChar, DOUBLE))); size += 2
+ case JVM.aload => code.emit(LOAD_LOCAL(code.getLocal(in.nextChar, OBJECT))); size += 2
+ case JVM.istore => code.emit(STORE_LOCAL(code.getLocal(in.nextChar, INT))); size += 2
+ case JVM.lstore => code.emit(STORE_LOCAL(code.getLocal(in.nextChar, LONG))); size += 2
+ case JVM.fstore => code.emit(STORE_LOCAL(code.getLocal(in.nextChar, FLOAT))); size += 2
+ case JVM.dstore => code.emit(STORE_LOCAL(code.getLocal(in.nextChar, DOUBLE))); size += 2
+ case JVM.astore => code.emit(STORE_LOCAL(code.getLocal(in.nextChar, OBJECT))); size += 2
case JVM.ret => Predef.error("Cannot handle jsr/ret")
case JVM.iinc =>
- size = size + 4
+ size += 4
val local = code.getLocal(in.nextChar, INT)
code.emit(CONSTANT(Constant(in.nextChar)))
code.emit(CALL_PRIMITIVE(Arithmetic(ADD, INT)))
@@ -527,7 +528,7 @@ abstract class ICodeReader extends ClassfileParser {
}
case JVM.multianewarray =>
- size = size + 3
+ size += 3
val tpe = toTypeKind(pool.getClassOrArrayType(in.nextChar))
val dim = in.nextByte
assert(dim == 1, "Cannot handle multidimensional arrays yet.")
@@ -540,20 +541,18 @@ abstract class ICodeReader extends ClassfileParser {
// case _ => Predef.error("Unknown bytecode")
}
- pc = pc + size
+ pc += size
}
// add parameters
var idx = if (method.isStatic) 0 else 1
- for (val t <- method.symbol.tpe.paramTypes) {
+ for (t <- method.symbol.tpe.paramTypes) {
this.method.addParam(code.freshLocal(idx, toTypeKind(t), true))
- idx = idx + 1
+ idx += 1
}
pc = 0
- while (pc < codeLength) {
- parseInstruction
- }
+ while (pc < codeLength) parseInstruction
val exceptionEntries = in.nextChar.toInt
var i = 0
@@ -564,7 +563,7 @@ abstract class ICodeReader extends ClassfileParser {
code.jmpTargets += in.nextChar
// skip the exception type
in.skip(2)
- i = i + 1
+ i += 1
}
skipAttributes()
@@ -607,8 +606,8 @@ abstract class ICodeReader extends ClassfileParser {
var bb = code.startBlock
def makeBasicBlocks: Map[Int, BasicBlock] = {
- val block: Map[Int, BasicBlock] = new HashMap;
- for (val pc <- jmpTargets) block += pc -> code.newBlock
+ val block: Map[Int, BasicBlock] = new HashMap
+ for (pc <- jmpTargets) block += pc -> code.newBlock
block
}
@@ -616,7 +615,7 @@ abstract class ICodeReader extends ClassfileParser {
var otherBlock: BasicBlock = null
var disableJmpTarget = false
- for (val (pc, instr) <- instrs.elements) {
+ for ((pc, instr) <- instrs.elements) {
// Console.println("> " + pc + ": " + instr);
if (jmpTargets contains pc) {
otherBlock = blocks(pc)
@@ -723,9 +722,9 @@ abstract class ICodeReader extends ClassfileParser {
// method.dump
tfa.init(method)
tfa.run
- for (val bb <- linearizer.linearize(method)) {
- var info = tfa.in(bb);
- for (val i <- bb.toList) {
+ for (bb <- linearizer.linearize(method)) {
+ var info = tfa.in(bb)
+ for (i <- bb.toList) {
i match {
case DUP_X1 =>
val one = info._2.types(0)
@@ -928,11 +927,11 @@ abstract class ICodeReader extends ClassfileParser {
/** Duplicate and exchange pseudo-instruction. Should be later
* replaced by proper ICode */
- abstract class DupX extends Instruction;
+ abstract class DupX extends Instruction
- case object DUP_X1 extends DupX;
- case object DUP_X2 extends DupX;
- case object DUP2_X1 extends DupX;
- case object DUP2_X2 extends DupX;
+ case object DUP_X1 extends DupX
+ case object DUP_X2 extends DupX
+ case object DUP2_X1 extends DupX
+ case object DUP2_X2 extends DupX
}
}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/PickleBuffer.scala b/src/compiler/scala/tools/nsc/symtab/classfile/PickleBuffer.scala
index 6e85d606d4..f890eebaa2 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/PickleBuffer.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/PickleBuffer.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2006 LAMP/EPFL
+ * Copyright 2005-2007 LAMP/EPFL
* @author Martin Odersky
*/
// $Id$
@@ -31,16 +31,16 @@ class PickleBuffer(data: Array[byte], from: int, to: int) {
// -- Basic output routines --------------------------------------------
/** Write a byte of data */
- def writeByte(b: int): unit = {
+ def writeByte(b: int) {
if (writeIndex == bytes.length) dble
bytes(writeIndex) = b.asInstanceOf[byte]
- writeIndex = writeIndex + 1
+ writeIndex += 1
}
/** Write a natural number in big endian format, base 128.
* All but the last digits have bit 0x80 set.
*/
- def writeNat(x: int): unit = {
+ def writeNat(x: int) {
def writeNatPrefix(x: int): unit = {
val y = x >>> 7
if (y != 0) writeNatPrefix(y)
@@ -57,8 +57,8 @@ class PickleBuffer(data: Array[byte], from: int, to: int) {
* @param pos ...
* @param x ...
*/
- def patchNat(pos: int, x: int): unit = {
- def patchNatPrefix(x: int): unit = {
+ def patchNat(pos: int, x: int) {
+ def patchNatPrefix(x: int) {
writeByte(0)
Array.copy(bytes, pos, bytes, pos+1, writeIndex - (pos+1))
bytes(pos) = ((x & 0x7f) | 0x80).asInstanceOf[byte]
@@ -74,7 +74,7 @@ class PickleBuffer(data: Array[byte], from: int, to: int) {
*
* @param x The long number to be written.
*/
- def writeLong(x: long): unit = {
+ def writeLong(x: long) {
val y = x >> 8
val z = x & 0xff
if (-y != (z >> 7)) writeLong(y)
@@ -85,7 +85,7 @@ class PickleBuffer(data: Array[byte], from: int, to: int) {
/** Read a byte */
def readByte(): int = {
- val x = bytes(readIndex); readIndex = readIndex + 1; x
+ val x = bytes(readIndex); readIndex += 1; x
}
/** Read a natural number in big endian format, base 128.
@@ -106,7 +106,7 @@ class PickleBuffer(data: Array[byte], from: int, to: int) {
var i = 0
while (i < len) {
x = (x << 8) + (readByte() & 0xff)
- i = i + 1
+ i += 1
}
val leading = 64 - (len << 3)
x << leading >> leading
@@ -135,7 +135,7 @@ class PickleBuffer(data: Array[byte], from: int, to: int) {
*/
def createIndex: Array[int] = {
val index = new Array[int](readNat())
- for (val i <- Iterator.range(0, index.length)) {
+ for (i <- 0 until index.length) {
index(i) = readIndex
readByte()
readIndex = readNat() + readIndex
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
index 84c0efe5aa..33ecdcb479 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2006 LAMP/EPFL
+ * Copyright 2005-2007 LAMP/EPFL
* @author Martin Odersky
*/
// $Id$
@@ -7,7 +7,6 @@
package scala.tools.nsc.symtab.classfile
import java.lang.{Float, Double}
-import scala.collection.mutable.HashMap
import scala.tools.nsc.util.{Position, NoPosition, ShowPickled}
import Flags._
import PickleFormat._
@@ -57,6 +56,7 @@ abstract class Pickler extends SubComponent {
private class Pickle(rootName: Name, rootOwner: Symbol)
extends PickleBuffer(new Array[byte](4096), -1, 0) {
+ import scala.collection.mutable.HashMap
private var entries = new Array[AnyRef](256)
private var ep = 0
private val index = new HashMap[AnyRef, int]
@@ -111,7 +111,7 @@ abstract class Pickler extends SubComponent {
if (!sym.children.isEmpty)
putChildren(sym, sym.children.toList.sort((x, y) => x isLess y))
- for (val attr <- sym.attributes.reverse) {
+ for (attr <- sym.attributes.reverse) {
if (attr.atp.symbol isNonBottomSubClass definitions.StaticAnnotationClass)
putAnnotation(sym, attr)
}
@@ -158,7 +158,7 @@ abstract class Pickler extends SubComponent {
}
private def putTypes(tps: List[Type]): unit = tps foreach putType
- private def putTree(tree: reflect.Tree): unit = if(putEntry(tree)) {
+ private def putTree(tree: reflect.Tree): unit = if (putEntry(tree)) {
tree match {
case reflect.Ident(sym) => putSymbol(sym)
case reflect.Select(qual, sym) => putTree(qual); putSymbol(sym)
@@ -267,8 +267,8 @@ abstract class Pickler extends SubComponent {
private def putAnnotation(sym: Symbol, attr: AnnotationInfo[Constant]): unit = {
assert(putEntry((sym, attr)))
putType(attr.atp)
- for (val c <- attr.args) putConstant(c)
- for (val (name, c) <- attr.assocs) { putEntry(name); putConstant(c) }
+ for (c <- attr.args) putConstant(c)
+ for ((name, c) <- attr.assocs) { putEntry(name); putConstant(c) }
}
private def putAnnotation(annot: AnnotationInfo[Any]): unit =
@@ -276,7 +276,7 @@ abstract class Pickler extends SubComponent {
val AnnotationInfo(tpe, args, assocs) = annot
putType(tpe)
args foreach putTreeOrConstant
- for (val (name, rhs) <- assocs) { putEntry(name); putTreeOrConstant(rhs) }
+ for ((name, rhs) <- assocs) { putEntry(name); putTreeOrConstant(rhs) }
}
private def putTreeOrConstant(x: Any) {
@@ -395,12 +395,12 @@ abstract class Pickler extends SubComponent {
case (target: Symbol, attr @ AnnotationInfo(atp, args, assocs)) =>
writeRef(target)
writeRef(atp)
- for (val c <- args) writeRef(c.asInstanceOf[Constant])
- for (val (name, c) <- assocs) { writeRef(name); writeRef(c.asInstanceOf[Constant]) }
+ for (c <- args) writeRef(c.asInstanceOf[Constant])
+ for ((name, c) <- assocs) { writeRef(name); writeRef(c.asInstanceOf[Constant]) }
ATTRIBUTE
case (target: Symbol, children: List[_]) =>
writeRef(target)
- for (val c <- children) writeRef(c.asInstanceOf[Symbol])
+ for (c <- children) writeRef(c.asInstanceOf[Symbol])
CHILDREN
case reflect.Ident(sym) =>
writeNat(IDENTtree)
@@ -589,8 +589,8 @@ abstract class Pickler extends SubComponent {
case AnnotationInfo(target, args, assocs) =>
writeRef(target)
writeNat(args.length)
- for (val tree <- args) writeRef(tree.asInstanceOf[reflect.Tree])
- for (val (name, tree) <- assocs) {
+ for (tree <- args) writeRef(tree.asInstanceOf[reflect.Tree])
+ for ((name, tree) <- assocs) {
writeRef(name);
writeRef(tree.asInstanceOf[reflect.Tree])
}
@@ -612,7 +612,7 @@ abstract class Pickler extends SubComponent {
writeNat(MinorVersion)
writeNat(ep)
if (settings.debug.value) log("" + ep + " entries")//debug
- for (val i <- 0 until ep) writeEntry(entries(i));
+ for (i <- 0 until ep) writeEntry(entries(i))
if (settings.Xshowcls.value == rootName.toString) {
readIndex = 0
ShowPickled.printFile(this, Console.out)
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/UnPickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/UnPickler.scala
index 45e43db93e..75074162b9 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/UnPickler.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/UnPickler.scala
@@ -1,19 +1,20 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2006 LAMP/EPFL
+ * Copyright 2005-2007 LAMP/EPFL
* @author Martin Odersky
*/
// $Id$
package scala.tools.nsc.symtab.classfile
-import scala.tools.nsc.util.{Position,NoPosition}
-import scala.tools.util.UTF8Codec
+import java.io.IOException
import java.lang.{Float, Double}
+import scala.tools.nsc.util.{Position, NoPosition}
+import scala.tools.util.UTF8Codec
+
import Flags._
import PickleFormat._
import collection.mutable.{HashMap, ListBuffer}
-import java.io.IOException
/** This abstract class implements ..
*
@@ -45,7 +46,7 @@ abstract class UnPickler {
private val entries = new Array[AnyRef](index.length)
private val symScopes = new HashMap[Symbol, Scope]
- for (val i <- 0 until index.length) {
+ for (i <- 0 until index.length) {
if (isSymbolEntry(i)) { at(i, readSymbol); {} }
else if (isAnnotationEntry(i)) { at(i, readAnnotation); {} }
}
@@ -156,10 +157,10 @@ abstract class UnPickler {
sym = NoSymbol
case _ =>
val unusedPos : Int = {
- if (tag > PosOffset) readNat;
+ if (tag > PosOffset) readNat
else -1
}
- val pos : Position = NoPosition;
+ val pos: Position = NoPosition
val name = readNameRef()
val owner = readSymbolRef()
val flags = readNat()
@@ -530,7 +531,7 @@ abstract class UnPickler {
private class LazyTypeRef(i: int) extends LazyType {
private val definedAtRunId = currentRunId
- override def complete(sym: Symbol): unit = {
+ override def complete(sym: Symbol) {
val tp = at(i, readType)
sym setInfo tp
if (currentRunId != definedAtRunId) sym.setInfo(adaptToNewRunMap(tp))
@@ -539,7 +540,7 @@ abstract class UnPickler {
}
private class LazyTypeRefAndAlias(i: int, j: int) extends LazyTypeRef(i) {
- override def complete(sym: Symbol): unit = {
+ override def complete(sym: Symbol) {
super.complete(sym)
var alias = at(j, readSymbol)
if (alias hasFlag OVERLOADED)
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index 2cfd6886f8..e6f766ccfc 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2006 LAMP/EPFL
+ * Copyright 2005-2007 LAMP/EPFL
* @author
*/
// $Id$
@@ -8,7 +8,7 @@ package scala.tools.nsc.transform
import symtab.Flags._
import scala.collection.mutable.{HashMap, HashSet}
-import scala.tools.nsc.util.{Position}
+import scala.tools.nsc.util.Position
/*<export>*/
/** - uncurry all symbol and tree types (@see UnCurryPhase)
@@ -250,7 +250,7 @@ abstract class UnCurry extends InfoTransform with TypingTransformers {
val applyMethod = anonClass.newMethod(fun.pos, nme.apply)
.setFlag(FINAL).setInfo(MethodType(formals, restpe));
anonClass.info.decls enter applyMethod;
- for (val vparam <- fun.vparams) vparam.symbol.owner = applyMethod;
+ for (vparam <- fun.vparams) vparam.symbol.owner = applyMethod;
new ChangeOwnerTraverser(fun.symbol, applyMethod).traverse(fun.body);
var members = List(
DefDef(Modifiers(FINAL), nme.apply, List(), List(fun.vparams), TypeTree(restpe), fun.body)
diff --git a/src/compiler/scala/tools/nsc/util/NameTransformer.scala b/src/compiler/scala/tools/nsc/util/NameTransformer.scala
index 23ed83eb54..fa953b88c0 100644
--- a/src/compiler/scala/tools/nsc/util/NameTransformer.scala
+++ b/src/compiler/scala/tools/nsc/util/NameTransformer.scala
@@ -1,13 +1,11 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2006 LAMP/EPFL
+ * Copyright 2005-2007 LAMP/EPFL
* @author Martin Odersky
*/
// $Id$
package scala.tools.nsc.util
-import compat.StringBuilder
-
object NameTransformer {
private val nops = 128
private val ncodes = 26 * 26
@@ -62,7 +60,7 @@ object NameTransformer {
} else if (buf ne null) {
buf.append(c)
}
- i = i + 1
+ i += 1
}
if (buf eq null) name else buf.toString()
}
@@ -88,21 +86,21 @@ object NameTransformer {
val ch2 = name.charAt(i+2)
if ('a' <= ch2 && ch2 <= 'z') {
ops = code2op((ch1 - 'a') * 26 + ch2 - 'a')
- while ((ops ne null) && !name.startsWith(ops.code, i)) ops = ops.next;
+ while ((ops ne null) && !name.startsWith(ops.code, i)) ops = ops.next
if (ops ne null) {
if (buf eq null) {
buf = new StringBuilder()
buf.append(name.substring(0, i))
}
buf.append(ops.op)
- i = i + ops.code.length()
+ i += ops.code.length()
}
}
}
}
if (ops eq null) {
if (buf ne null) buf.append(c)
- i = i + 1
+ i += 1
}
}
//System.out.println("= " + (if (buf == null) name else buf.toString()));//DEBUG
diff --git a/src/compiler/scala/tools/nsc/util/SourceFile.scala b/src/compiler/scala/tools/nsc/util/SourceFile.scala
index 4f7f0607d4..28528ea455 100644
--- a/src/compiler/scala/tools/nsc/util/SourceFile.scala
+++ b/src/compiler/scala/tools/nsc/util/SourceFile.scala
@@ -1,17 +1,12 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2006, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
+/* NSC -- new Scala compiler
+ * Copyright 2005-2007 LAMP/EPFL
+ * @author Martin Odersky
+ */
// $Id$
package scala.tools.nsc.util
-
-import compat.StringBuilder
import scala.tools.nsc.io.{AbstractFile, VirtualFile}
/** Uses positions that are offsets rather than line/column pairs.
@@ -33,7 +28,7 @@ object SourceFile {
class SourceFile(val file: AbstractFile, _content: Array[Char]) {
import SourceFile._
- def this(_file: AbstractFile) = this(_file, _file.toCharArray);
+ def this(_file: AbstractFile) = this(_file, _file.toCharArray)
def this(sourceName: String, content: Array[Char]) =
this(new VirtualFile(sourceName), content)
@@ -46,7 +41,7 @@ class SourceFile(val file: AbstractFile, _content: Array[Char]) {
def isLineBreak(idx: Int) =
if (!SourceFile.isLineBreak(content(idx))) false
- else if (content(idx) == CR && idx + 1 < content.length && content(idx + 1) == LF) false;
+ else if (content(idx) == CR && idx + 1 < content.length && content(idx + 1) == LF) false
else true
def position(offset: Int) =
@@ -71,8 +66,6 @@ class SourceFile(val file: AbstractFile, _content: Array[Char]) {
var index = 0
var offset = 0
-
-
def find(toFind: Int, isIndex: Boolean): Int = {
if (toFind == 0) return 0
@@ -117,7 +110,7 @@ class SourceFile(val file: AbstractFile, _content: Array[Char]) {
while (idx < text.length()) {
if (offset + idx >= content.length) return false
if (content(offset + idx) != text.charAt(idx)) return false
- idx = idx + 1
+ idx += 1
}
return true
}
@@ -132,7 +125,7 @@ class SourceFile(val file: AbstractFile, _content: Array[Char]) {
val buf = new StringBuilder()
while (!isLineBreak(offset) && offset < content.length) {
buf.append(content(offset))
- offset = offset + 1
+ offset += 1
}
buf.toString()
}
@@ -221,7 +214,8 @@ extends SourceFile(name, contents)
stop)
override def positionInUltimateSource(position: Position) = {
- if (position.offset.isEmpty) super.positionInUltimateSource(position)
+ if (position.offset.isEmpty)
+ super.positionInUltimateSource(position)
else underlyingFile.positionInUltimateSource(
new OffsetPosition(underlyingFile, position.offset.get + start))
}