summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authormichelou <michelou@epfl.ch>2007-04-23 15:38:07 +0000
committermichelou <michelou@epfl.ch>2007-04-23 15:38:07 +0000
commitbd7866c7940c41d5c4f3f88e09c7354126eabe15 (patch)
tree7b2a2bd12160cf920fac5fd7867218ded4f0967c /src
parent90c68e19144fa811671b8e1dc3ad0e7ecc5b487c (diff)
downloadscala-bd7866c7940c41d5c4f3f88e09c7354126eabe15.tar.gz
scala-bd7866c7940c41d5c4f3f88e09c7354126eabe15.tar.bz2
scala-bd7866c7940c41d5c4f3f88e09c7354126eabe15.zip
updated self aliases, for-comprehensions
Diffstat (limited to 'src')
-rw-r--r--src/compiler/scala/tools/nsc/GenericRunnerSettings.scala20
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala4
-rw-r--r--src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala64
-rw-r--r--src/compiler/scala/tools/nsc/matching/ParallelMatching.scala47
-rw-r--r--src/compiler/scala/tools/nsc/matching/PatternMatchers.scala33
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala6
-rw-r--r--src/compiler/scala/tools/nsc/transform/LiftCode.scala8
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala61
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala34
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala2
-rw-r--r--src/library/scala/util/automata/SubsetConstruction.scala174
-rw-r--r--src/library/scala/util/automata/WordBerrySethi.scala4
-rw-r--r--src/library/scala/xml/PrettyPrinter.scala6
-rw-r--r--src/library/scala/xml/dtd/ContentModel.scala53
-rw-r--r--src/library/scala/xml/dtd/DTD.scala44
-rw-r--r--src/library/scala/xml/dtd/DocType.scala50
-rw-r--r--src/library/scala/xml/factory/Binder.scala56
-rw-r--r--src/manual/scala/tools/docutil/EmitManPage.scala2
18 files changed, 334 insertions, 334 deletions
diff --git a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala
index 4ba176d0fe..3e9c3e706b 100644
--- a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala
+++ b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2006 LAMP/EPFL
+ * Copyright 2006-2007 LAMP/EPFL
* @author Lex Spoon
*/
@@ -29,13 +29,11 @@ extends Settings(error) {
"-nocompdaemon",
"do not use the fsc compilation daemon")
- /* For some reason, "object defines extends Setting(...)"
- does not work here. The object is present but the setting
- is not added to allsettings. Thus,
- */
- class DefinesSetting
- extends Setting("set a Java property")
- {
+ /** For some reason, "object defines extends Setting(...)"
+ * does not work here. The object is present but the setting
+ * is not added to allsettings. Thus,
+ */
+ class DefinesSetting extends Setting("set a Java property") {
def name = "-D<prop>"
@@ -47,17 +45,15 @@ extends Settings(error) {
args match {
case arg0::rest
if arg0.startsWith("-D") =>
- {
val stripD = arg0.substring(2)
val eqidx = stripD.indexOf('=')
val addition =
- if(eqidx < 0)
+ if (eqidx < 0)
(stripD, "")
else
(stripD.substring(0, eqidx), stripD.substring(eqidx+1))
props += addition
rest
- }
case _ => args
}
@@ -66,7 +62,7 @@ extends Settings(error) {
/** Apply the specified properties to the current JVM */
def applyToCurrentJVM = {
val systemProps = getProperties
- for(val (key, value) <- props.toList)
+ for ((key, value) <- props.toList)
systemProps.setProperty(key, value)
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index e9f388f1f3..bdbc3532b4 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -1877,7 +1877,7 @@ trait Parsers {
}
trees
}
- for (val p <- lhs.toList; val d <- mkDefs(p)) yield d
+ for (p <- lhs.toList; d <- mkDefs(p)) yield d
}
/** VarDef ::= Id {`,' Id} [`:' Type] `=' Expr
@@ -1903,7 +1903,7 @@ trait Parsers {
newmods = newmods | Flags.DEFERRED
EmptyTree
}
- for (val (pos, name) <- lhs.toList) yield
+ for ((pos, name) <- lhs.toList) yield
atPos(pos) { ValDef(newmods, name, tp.duplicate, rhs.duplicate) }
}
diff --git a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala
index 1b5bd62ecd..e9061af154 100644
--- a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala
+++ b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala
@@ -1,5 +1,6 @@
/* NSC -- new scala compiler
* Copyright 2005-2007 LAMP/EPFL
+ * @author Nikolay Mihaylov
*/
// $Id$
@@ -12,7 +13,6 @@ import java.nio.{ByteBuffer, ByteOrder}
import scala.collection.mutable.{Map, HashMap, HashSet, Stack}
import scala.tools.nsc.symtab._
import scala.tools.nsc.util.Position
-import compat.StringBuilder
import ch.epfl.lamp.compiler.msil.{Type => MsilType, _}
import ch.epfl.lamp.compiler.msil.emit._
@@ -277,7 +277,7 @@ abstract class GenMSIL extends SubComponent {
val symtab: Array[Byte] = new Array[Byte](pickle.writeIndex + 8)
symtab(0) = 1.toByte
var size:Int = pickle.writeIndex
- for(val i <- Iterator.range(2, 6)) {
+ for (i <- 2 until 6) {
symtab(i) = (size & 0xff).toByte
size = size >> 8
}
@@ -298,7 +298,7 @@ abstract class GenMSIL extends SubComponent {
if (settings.debug.value)
log("creating attributes: " + attributes + " for member : " + member)
- for(val AnnotationInfo(typ, consts, nvPairs) <- attributes /* !typ.symbol.hasFlag(Flags.JAVA) */ ) {
+ for (AnnotationInfo(typ, consts, nvPairs) <- attributes /* !typ.symbol.hasFlag(Flags.JAVA) */ ) {
// assert(consts.length <= 1,
// "too many constant arguments for attribute; "+consts.toString())
@@ -455,7 +455,7 @@ abstract class GenMSIL extends SubComponent {
}
private def createTypes(): Unit =
- for (val sym <- classes.keys) {
+ for (sym <- classes.keys) {
if (settings.debug.value)
log("Calling CreatType for " + sym + ", " + types(sym))
types(sym).asInstanceOf[TypeBuilder].CreateType()
@@ -554,7 +554,7 @@ abstract class GenMSIL extends SubComponent {
}
if (mcode != null) {
- for (val local <- (m.locals.diff(m.params))) {
+ for (local <- m.locals.diff(m.params)) {
if (settings.debug.value)
log("add local var: " + local + ", of kind " + local.kind)
val t: MsilType = msilType(local.kind)
@@ -697,7 +697,7 @@ abstract class GenMSIL extends SubComponent {
var res = ""
res = res + TopBlock.indent + "BlockList0:\n"
TopBlock.indent = TopBlock.indent + " "
- for(val b <- blocks)
+ for (b <- blocks)
res = res + b + "\n"
TopBlock.indent = TopBlock.indent.substring(0,TopBlock.indent.length-2)
res
@@ -778,7 +778,7 @@ abstract class GenMSIL extends SubComponent {
TopBlock.indent = TopBlock.indent.substring(0,TopBlock.indent.length-4)
res = res + TopBlock.indent + " " + "catch:\n"
TopBlock.indent = TopBlock.indent + " "
- for(val b <- catchBlocks)
+ for (b <- catchBlocks)
res = res + b + "\n"
TopBlock.indent = TopBlock.indent.substring(0,TopBlock.indent.length-4)
res = res + TopBlock.indent + " " + "finally:\n"
@@ -887,9 +887,9 @@ abstract class GenMSIL extends SubComponent {
}
// get leaving blocks and their outside targets
def leavingBlocks(blocks: List[BasicBlock]): List[(BasicBlock, List[BasicBlock])] = {
- for {val b <- blocks
+ for {b <- blocks
val t = outsideTargets(b, blocks)
- t.length != 0 } yield (b, t)
+ if t.length != 0 } yield (b, t)
}
def replaceOutJumps(blocks: List[BasicBlock], leaving: List[(BasicBlock, List[BasicBlock])], exh: ExceptionHandler): (List[BasicBlock], Option[BasicBlock]) = {
@@ -1064,20 +1064,20 @@ abstract class GenMSIL extends SubComponent {
var orderedBlocks: List[BasicBlock] = Nil
def flatten(block: Block): Unit = {
if (block == TopBlock) {
- for (val b <- TopBlock.blocks) flatten(b)
+ for (b <- TopBlock.blocks) flatten(b)
} else block match {
case cb: CodeBlock =>
orderedBlocks = orderedBlocks ::: cb.basicBlocks
case bl: BlockList =>
- for (val b <- bl.blocks) flatten(b)
+ for (b <- bl.blocks) flatten(b)
case cb: CatchBlock =>
- for (val b <- cb.blocks) flatten(b)
+ for (b <- cb.blocks) flatten(b)
case eb: ExceptionBlock =>
val handler = eb.handler
addExHInstruction(eb.tryBlock.firstBasicBlock, new BeginExceptionBlock(handler))
omitJump(eb.tryBlock.lastBasicBlock)
flatten(eb.tryBlock)
- for(val c <- eb.catchBlocks) {
+ for (c <- eb.catchBlocks) {
val t: MsilType = (if (c.exSym == NoSymbol) EXCEPTION
else getType(c.exSym))
addExHInstruction(c.firstBasicBlock, new BeginCatchBlock(handler, t))
@@ -1480,14 +1480,14 @@ abstract class GenMSIL extends SubComponent {
mcode.Emit(OpCodes.Stloc, switchLocal)
var i: Int = 0
- for(val l <- tags) {
+ for (l <- tags) {
var targetLabel = labels(branches(i))
- for(val i <- l) {
+ for (i <- l) {
mcode.Emit(OpCodes.Ldloc, switchLocal)
loadI4(i, mcode)
mcode.Emit(OpCodes.Beq, targetLabel)
}
- i = i + 1
+ i += 1
}
val defaultTarget = labels(branches(i))
if (nextBlock != defaultTarget && !omitJumpBlocks.contains(currentBlock))
@@ -1732,7 +1732,7 @@ abstract class GenMSIL extends SubComponent {
def makeLabels(bs: List[BasicBlock]) = {
if (settings.debug.value)
log("Making labels for: " + method)
- for (val bb <- bs) labels(bb) = mcode.DefineLabel()
+ for (bb <- bs) labels(bb) = mcode.DefineLabel()
}
////////////////////// local vars ///////////////////////
@@ -1747,21 +1747,21 @@ abstract class GenMSIL extends SubComponent {
if (isStaticSymbol(m.symbol))
idx = 0
- for (val l <- params) {
+ for (l <- params) {
if (settings.debug.value)
log("Index value for parameter " + l + ": " + idx)
l.index = idx
- idx = idx + 1 // sizeOf(l.kind)
+ idx += 1 // sizeOf(l.kind)
}
val locvars = m.locals.diff(params)
idx = 0
- for (val l <- locvars) {
+ for (l <- locvars) {
if (settings.debug.value)
log("Index value for local variable " + l + ": " + idx)
l.index = idx
- idx = idx + 1 // sizeOf(l.kind)
+ idx += 1 // sizeOf(l.kind)
}
}
@@ -1899,7 +1899,7 @@ abstract class GenMSIL extends SubComponent {
false
}
- for (val m <- cls.methods) {
+ for (m <- cls.methods) {
if (isEntryPoint(m.symbol)) {
if (entryPoint == null)
entryPoint = m.symbol
@@ -1977,7 +1977,7 @@ abstract class GenMSIL extends SubComponent {
if (parents.length > 1) {
if (settings.debug.value){
log("interfaces:")
- for(val i <- Iterator.range(0, interfaces.length)){
+ for (i <- Iterator.range(0, interfaces.length)){
log(" type: " + parents(i + 1).symbol + ", msil type: " + interfaces(i))
}
}
@@ -2002,7 +2002,7 @@ abstract class GenMSIL extends SubComponent {
def createClassMembers0(iclass: IClass): Unit = {
val mtype = getType(iclass.symbol).asInstanceOf[TypeBuilder]
- for (val ifield <- iclass.fields) {
+ for (ifield <- iclass.fields) {
val sym = ifield.symbol
if (settings.debug.value)
log("Adding field: " + sym.fullNameString)
@@ -2014,7 +2014,7 @@ abstract class GenMSIL extends SubComponent {
}
if (iclass.symbol != definitions.ArrayClass)
- for (val m: IMethod <- iclass.methods) {
+ for (m: IMethod <- iclass.methods) {
val sym = m.symbol
if (settings.debug.value)
log("Creating MethodBuilder for " + Flags.flagsToString(sym.flags) + " " +
@@ -2028,7 +2028,7 @@ abstract class GenMSIL extends SubComponent {
if (m.symbol.isClassConstructor) {
val constr =
ownerType.DefineConstructor(attr, CallingConventions.Standard, paramTypes)
- for (val i <- Iterator.range(0, paramTypes.length)) {
+ for (i <- Iterator.range(0, paramTypes.length)) {
constr.DefineParameter(i, ParameterAttributes.None, msilName(m.params(i).sym))
}
mapConstructor(sym, constr)
@@ -2037,7 +2037,7 @@ abstract class GenMSIL extends SubComponent {
var resType = msilType(m.returnType)
val method =
ownerType.DefineMethod(getMethodName(sym), attr, resType, paramTypes)
- for (val i <- Iterator.range(0, paramTypes.length)){
+ for (i <- Iterator.range(0, paramTypes.length)){
method.DefineParameter(i, ParameterAttributes.None, msilName(m.params(i).sym))
}
if (!methods.contains(sym))
@@ -2145,15 +2145,15 @@ abstract class GenMSIL extends SubComponent {
MOBJECT,
MsilType.EmptyTypes)
- for (val m <- sym.tpe.nonPrivateMembers;
- m.owner != definitions.ObjectClass && !m.hasFlag(Flags.PROTECTED) &&
+ for (m <- sym.tpe.nonPrivateMembers
+ if m.owner != definitions.ObjectClass && !m.hasFlag(Flags.PROTECTED) &&
m.isMethod && !m.isClassConstructor && !isStaticSymbol(m) && !m.hasFlag(Flags.CASE))
{
if (settings.debug.value)
log(" Mirroring method: " + m)
val paramTypes = msilParamTypes(m)
val paramNames: Array[String] = new Array[String](paramTypes.length)
- for (val i <- Iterator.range(0, paramTypes.length))
+ for (i <- 0 until paramTypes.length)
paramNames(i) = "x_" + i
// CHECK: verify if getMethodName is better than msilName
@@ -2214,7 +2214,7 @@ abstract class GenMSIL extends SubComponent {
"$delegateCaller$$" + nbDelegateCallers,
MethodAttributes.Final | MethodAttributes.Public | MethodAttributes.Static,
msilType(returnType), paramTypes.map(msilType).toArray)
- for(val i <- Iterator.range(0, paramTypes.length))
+ for (i <- 0 until paramTypes.length)
caller.DefineParameter(i, ParameterAttributes.None, "arg" + i)
val delegCtor = msilType(delegateType).GetConstructor(Array(MOBJECT, INT_PTR))
mcode.Emit(OpCodes.Ldnull)
@@ -2226,7 +2226,7 @@ abstract class GenMSIL extends SubComponent {
val functionApply: MethodInfo = getMethod(functionType.member(nme.apply))
val dcode: ILGenerator = caller.GetILGenerator()
dcode.Emit(OpCodes.Ldsfld, anonfunField)
- for(val i <- Iterator.range(0, paramTypes.length)) {
+ for (i <- 0 until paramTypes.length) {
loadArg(dcode)(i)
emitBox(dcode, toTypeKind(paramTypes(i)))
}
diff --git a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
index ecca05b6c4..59a4523c3f 100644
--- a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
+++ b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
@@ -1,10 +1,19 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2007 LAMP/EPFL
+ * @author Burak Emir
+ */
+// $Id$
+
package scala.tools.nsc.matching
-import compat.StringBuilder
import scala.tools.nsc.util.Position
-/** utility methods (not just for ParallelMatching) */
-trait ParallelMatching requires (transform.ExplicitOuter with PatternMatchers with CodeFactory) {
+/** Utility methods (not just for ParallelMatching).
+ *
+ * @author Burak Emir
+ */
+trait ParallelMatching {
+ self: transform.ExplicitOuter with PatternMatchers with CodeFactory =>
import global._
@@ -12,7 +21,7 @@ trait ParallelMatching requires (transform.ExplicitOuter with PatternMatchers wi
// ---------------------------------- data
sealed trait RuleApplication
- case class ErrorRule extends RuleApplication
+ case class ErrorRule extends RuleApplication
case class VariableRule(subst:List[Pair[Symbol,Symbol]], guard: Tree, body: Tree) extends RuleApplication
def MixtureRule(scrutinee:Symbol, column:List[Tree], rest:Rep): MixtureRule = {
@@ -337,14 +346,13 @@ trait ParallelMatching requires (transform.ExplicitOuter with PatternMatchers wi
def combine(colcom: List[(Int,Set[Symbol])]): List[List[(Int,Symbol)]] = colcom match {
case Nil => Nil
case (i,syms)::Nil => syms.toList.map { sym => List((i,sym)) }
- case (i,syms)::cs => for(val s <- syms.toList; val rest <- combine(cs)) yield (i,s) :: rest
+ case (i,syms)::cs => for (s <- syms.toList; rest <- combine(cs)) yield (i,s) :: rest
}
if(!sealedCols.isEmpty) {
DEBUG("cols"+sealedCols)
DEBUG("comb")
- for(val com <- sealedComb)
- DEBUG(com.toString)
+ for (com <- sealedComb) DEBUG(com.toString)
val allcomb = combine(sealedCols zip sealedComb)
//Console.println("all comb!" + allcomb)
@@ -358,10 +366,9 @@ trait ParallelMatching requires (transform.ExplicitOuter with PatternMatchers wi
val coversAll = allcomb forall { combination => row exists { r => covers(r._1, combination)}}
//Console.println("all combinations covered? "+coversAll)
if(!coversAll) {
- val sb = new compat.StringBuilder()
+ val sb = new StringBuilder()
sb.append("match is not exhaustive!\n")
- for(val open <- allcomb;
- !(row exists { r => covers(r._1, open)})) {
+ for (open <- allcomb if !(row exists { r => covers(r._1, open)})) {
sb.append("missing combination ")
val NPAD = 15
def pad(s:String) = { Iterator.range(1,NPAD - s.length).foreach { x => sb.append(" ") }; sb.append(s) }
@@ -410,10 +417,10 @@ trait ParallelMatching requires (transform.ExplicitOuter with PatternMatchers wi
val sb = new StringBuilder
val NPAD = 15
def pad(s:String) = { Iterator.range(1,NPAD - s.length).foreach { x => sb.append(" ") }; sb.append(s) }
- for(val tmp <- temp) pad(tmp.name.toString)
+ for (tmp <- temp) pad(tmp.name.toString)
sb.append('\n')
- for(val (r,i) <- row.zipWithIndex) {
- for(val c <- r._1 ::: List(r._2, r._3)) {
+ for ((r,i) <- row.zipWithIndex) {
+ for (c <- r._1 ::: List(r._2, r._3)) {
pad(c.toString)
}
sb.append('\n')
@@ -556,20 +563,20 @@ trait ParallelMatching requires (transform.ExplicitOuter with PatternMatchers wi
}
def condition(tpe: Type, scrutineeTree: Tree): Tree = {
- assert( tpe ne NoType )
+ assert(tpe ne NoType)
assert(scrutineeTree.tpe ne NoType)
- if(tpe.isInstanceOf[SingletonType] && !tpe.isInstanceOf[ConstantType]) {
- if(scrutineeTree.tpe <:< definitions.AnyRefClass.tpe)
+ if (tpe.isInstanceOf[SingletonType] && !tpe.isInstanceOf[ConstantType]) {
+ if (scrutineeTree.tpe <:< definitions.AnyRefClass.tpe)
Eq(gen.mkAttributedRef(tpe.symbol), scrutineeTree) // object
else
Equals(gen.mkAttributedRef(tpe.symbol), scrutineeTree) // object
- } else if(tpe.isInstanceOf[ConstantType]) {
+ } else if (tpe.isInstanceOf[ConstantType]) {
val value = tpe.asInstanceOf[ConstantType].value
//if(false && value.isInstanceOf[NamedConstant])
// Equals(Ident(scrut), value.asInstanceOf[NamedConstant].tree) // constant
//assert(scrut.tpe <:< definitions.AnyRefClass.tpe, "stupid, should be caught by type checker "+value)
//else
- if(value == Constant(null) && scrutineeTree.tpe <:< definitions.AnyRefClass.tpe)
+ if (value == Constant(null) && scrutineeTree.tpe <:< definitions.AnyRefClass.tpe)
Eq(scrutineeTree, Literal(value)) // constant
else
Equals(scrutineeTree, Literal(value)) // constant
@@ -615,8 +622,8 @@ trait ParallelMatching requires (transform.ExplicitOuter with PatternMatchers wi
theRef = handleOuter(theRef)
val outerAcc = outerAccessor(tpe2test.symbol)
- if(outerAcc == NoSymbol) {
- if(settings.debug.value) cunit.warning(scrutinee.pos, "no outer acc for "+tpe2test.symbol)
+ if (outerAcc == NoSymbol) {
+ if (settings.debug.value) cunit.warning(scrutinee.pos, "no outer acc for "+tpe2test.symbol)
cond
} else
And(cond,
diff --git a/src/compiler/scala/tools/nsc/matching/PatternMatchers.scala b/src/compiler/scala/tools/nsc/matching/PatternMatchers.scala
index 601262b217..cb17975243 100644
--- a/src/compiler/scala/tools/nsc/matching/PatternMatchers.scala
+++ b/src/compiler/scala/tools/nsc/matching/PatternMatchers.scala
@@ -1,13 +1,12 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2006 LAMP/EPFL
+ * Copyright 2005-2007 LAMP/EPFL
* @author Burak Emir
*/
// $Id$
package scala.tools.nsc.matching
-import compat.StringBuilder
-import scala.tools.nsc.util.{Position,NoPosition}
+import scala.tools.nsc.util.{Position, NoPosition}
/** This trait ...
*
@@ -167,7 +166,7 @@ trait PatternMatchers requires (transform.ExplicitOuter with PatternNodes with P
if(i != -1) {
val CaseDef(_,_,b) = cases(i)
DEBUG("*** damn, unreachable!")
- //for(val b <- bodies) {
+ //for (b <- bodies) {
// Console.println(b)
//}
cunit.error(b.pos, "unreachable code")
@@ -205,7 +204,7 @@ trait PatternMatchers requires (transform.ExplicitOuter with PatternNodes with P
Console.println("****")
Console.println("**** falling back, cause " + e.getMessage)
Console.println("****")
- for(val CaseDef(pat,guard,_) <- cases)
+ for (CaseDef(pat,guard,_) <- cases)
Console.println(pat.toString)
}
}
@@ -228,17 +227,17 @@ trait PatternMatchers requires (transform.ExplicitOuter with PatternNodes with P
case _h: Header =>
val h = _h.findLast;
- // target.and is a header
- //
- //print()
- //cunit.error(pat.pos, "duplicate case")
- h.or = pDefaultPat(caseDef.pos, target.tpe)
- h.or.and = pBody(caseDef.pos, env.getBoundVars(), guard, body)
- //print()
- //Console.println("tao = "+target.and.or)
- //Console.println("tao = "+target.and.or.or)
+ // target.and is a header
+ //
+ //print()
+ //cunit.error(pat.pos, "duplicate case")
+ h.or = pDefaultPat(caseDef.pos, target.tpe)
+ h.or.and = pBody(caseDef.pos, env.getBoundVars(), guard, body)
+ //print()
+ //Console.println("tao = "+target.and.or)
+ //Console.println("tao = "+target.and.or.or)
case _ =>
- Predef.error("overlapping match at unit = " + cunit + "; cdef = " + caseDef);
+ Predef.error("overlapping match at unit = " + cunit + "; cdef = " + caseDef)
}
}
@@ -386,7 +385,7 @@ trait PatternMatchers requires (transform.ExplicitOuter with PatternNodes with P
var i = 0; while (i < ts.length) {
val target = enter1(ts(i), -1, subroot, subroot.casted, subenv)
target.and = pBody(tree.pos)
- i = i + 1
+ i += 1
}
pAltPat(tree.pos, subroot.and.asInstanceOf[Header])
/*
@@ -622,7 +621,7 @@ print()
}
var i = 0; while (i < pats.length) {
target = enter1(pats(i), i, target, casted, env)
- i = i + 1
+ i += 1
}
target
}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
index 33ecdcb479..145c86a6bd 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
@@ -206,7 +206,7 @@ abstract class Pickler extends SubComponent {
putRefTypes(formals); putType(restpe)
case reflect.PolyType(typeParams, typeBounds, resultType) =>
putRefSymbols(typeParams)
- for(val (t1,t2) <- typeBounds) {
+ for ((t1,t2) <- typeBounds) {
putType(t1)
putType(t2)
}
@@ -479,7 +479,7 @@ abstract class Pickler extends SubComponent {
writeRef(sym)
writeRef(ret)
writeRef(rhs)
- for(val vparams <- vparamss) {
+ for (vparams <- vparamss) {
writeNat(vparams.length)
writeRefs(vparams)
}
@@ -540,7 +540,7 @@ abstract class Pickler extends SubComponent {
writeNat(POLYrtpe)
writeRef(resultType)
writeNat(typeBounds.length)
- for(val (t1,t2) <- typeBounds) {
+ for ((t1,t2) <- typeBounds) {
writeRef(t1)
writeRef(t2)
}
diff --git a/src/compiler/scala/tools/nsc/transform/LiftCode.scala b/src/compiler/scala/tools/nsc/transform/LiftCode.scala
index 21e94a41ac..a77411093f 100644
--- a/src/compiler/scala/tools/nsc/transform/LiftCode.scala
+++ b/src/compiler/scala/tools/nsc/transform/LiftCode.scala
@@ -98,8 +98,8 @@ abstract class LiftCode extends Transform {
reflect.TypeApply(reify(fun), args map (.tpe) map reify)
case Function(vparams, body) =>
- var env1 = env;
- for (val vparam <- vparams) {
+ var env1 = env
+ for (vparam <- vparams) {
val local = reflect.LocalValue(
currentOwner, vparam.symbol.name.toString(), reify(vparam.symbol.tpe));
env1.update(vparam.symbol, local);
@@ -142,7 +142,7 @@ abstract class LiftCode extends Transform {
reflect.ClassDef(rsym, rtpe, rimp.asInstanceOf[reflect.Template])
case tmpl @ Template(parents, body) =>
- val rparents = for(val p <- parents) yield { reify(p.tpe) }
+ val rparents = for (p <- parents) yield { reify(p.tpe) }
reflect.Template(rparents, body.map(reify))
case dd @ DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
@@ -266,7 +266,7 @@ abstract class LiftCode extends Transform {
val name = className(c)
if (name.length() == 0) throw new Error("don't know how to inject " + value)
val injectedArgs = new ListBuffer[Tree]
- for (val i <- 0 until c.productArity)
+ for (i <- 0 until c.productArity)
injectedArgs += inject(c.productElement(i))
New(Ident(definitions.getClass(name)), List(injectedArgs.toList))
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index 4624782479..f119288d54 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2006 LAMP/EPFL
+ * Copyright 2005-2007 LAMP/EPFL
* @author Martin Odersky
*/
// $Id$
@@ -14,7 +14,8 @@ import symtab.Flags._
* @author Martin Odersky
* @version 1.0
*/
-trait Infer requires Analyzer {
+trait Infer {
+ self: Analyzer =>
import global._
import definitions._
import posAssigner.atPos
@@ -46,7 +47,7 @@ trait Infer requires Analyzer {
}
if (isVarArgs(formals1)) {
val ft = formals1.last.normalize.typeArgs.head
- formals1.init ::: (for (val i <- List.range(formals1.length - 1, nargs)) yield ft)
+ formals1.init ::: (for (i <- List.range(formals1.length - 1, nargs)) yield ft)
} else formals1
}
@@ -157,7 +158,7 @@ trait Infer requires Analyzer {
val bound: Type = if (up) tparam.info.bounds.hi else tparam.info.bounds.lo
// Console.println("solveOne0 "+tvar+" "+config+" "+bound);//DEBUG
var cyclic = bound contains tparam
- for (val (tvar2, (tparam2, variance2)) <- config) {
+ for ((tvar2, (tparam2, variance2)) <- config) {
if (tparam2 != tparam &&
((bound contains tparam2) ||
up && (tparam2.info.bounds.lo =:= tparam.tpe) || //@M TODO: might be affected by change to tpe in Symbol
@@ -172,7 +173,7 @@ trait Infer requires Analyzer {
tvar.constr.hibounds =
bound.instantiateTypeParams(tparams, tvars) :: tvar.constr.hibounds
}
- for (val tparam2 <- tparams)
+ for (tparam2 <- tparams)
if (tparam2.info.bounds.lo =:= tparam.tpe) //@M TODO: might be affected by change to tpe in Symbol
tvar.constr.hibounds =
tparam2.tpe.instantiateTypeParams(tparams, tvars) :: tvar.constr.hibounds
@@ -181,7 +182,7 @@ trait Infer requires Analyzer {
tvar.constr.lobounds =
bound.instantiateTypeParams(tparams, tvars) :: tvar.constr.lobounds
}
- for (val tparam2 <- tparams)
+ for (tparam2 <- tparams)
if (tparam2.info.bounds.hi =:= tparam.tpe) //@M TODO: might be affected by change to tpe in Symbol
tvar.constr.lobounds =
tparam2.tpe.instantiateTypeParams(tparams, tvars) :: tvar.constr.lobounds
@@ -193,7 +194,7 @@ trait Infer requires Analyzer {
assertNonCyclic(tvar)//debug
}
}
- for (val (tvar, (tparam, variance)) <- config)
+ for ((tvar, (tparam, variance)) <- config)
solveOne(tvar, tparam, variance)
tvars map instantiate
}
@@ -310,9 +311,9 @@ trait Infer requires Analyzer {
val syms1 = typeRefs.collect(tp1)
val syms2 = typeRefs.collect(tp2)
for {
- val sym1 <- syms1
- val sym2 <- syms2
- sym1 != sym2 && sym1.toString == sym2.toString
+ sym1 <- syms1
+ sym2 <- syms2
+ if sym1 != sym2 && sym1.toString == sym2.toString
} yield {
val name = sym1.name
explainName(sym1)
@@ -324,7 +325,7 @@ trait Infer requires Analyzer {
val result = op
- for (val (sym1, sym2, name) <- patches) {
+ for ((sym1, sym2, name) <- patches) {
sym1.name = name
sym2.name = name
}
@@ -534,7 +535,7 @@ trait Infer requires Analyzer {
throw new DeferredNoInstance(() =>
"result type " + normalize(restpe) + " is incompatible with expected type " + pt)
}
- for (val tvar <- tvars)
+ for (tvar <- tvars)
if (!isFullyDefined(tvar)) tvar.constr.inst = NoType
// Then define remaining type variables from argument types.
@@ -700,16 +701,16 @@ trait Infer requires Analyzer {
def varianceMismatches(as: Iterable[(Symbol, Symbol)]): unit = _varianceMismatches ++= as
def stricterBounds(as: Iterable[(Symbol, Symbol)]): unit = _stricterBounds ++= as
- for(val (hkarg, hkparam) <- hkargs zip hkparams) {
- if(hkparam.typeParams.isEmpty) { // base-case: kind *
- if(!variancesMatch(hkarg, hkparam))
+ for ((hkarg, hkparam) <- hkargs zip hkparams) {
+ if (hkparam.typeParams.isEmpty) { // base-case: kind *
+ if (!variancesMatch(hkarg, hkparam))
varianceMismatch(hkarg, hkparam)
// instantiateTypeParams(tparams, targs) --> higher-order bounds may contain references to type arguments
// substSym(hkparams, hkargs) --> these types are going to be compared as types of kind *
// --> their arguments use different symbols, but are conceptually the same
// (could also replace the types by polytypes, but can't just strip the symbols, as ordering is lost then)
- if(!(hkparam.info.instantiateTypeParams(tparams, targs).bounds.substSym(hkparams, hkargs) <:< hkarg.info.bounds))
+ if (!(hkparam.info.instantiateTypeParams(tparams, targs).bounds.substSym(hkparams, hkargs) <:< hkarg.info.bounds))
stricterBound(hkarg, hkparam)
} else {
val (vm, sb) = checkKindBoundsHK(hkarg.typeParams, hkparam.typeParams)
@@ -728,20 +729,23 @@ trait Infer requires Analyzer {
else "invariant";
def qualify(a0: Symbol, b0: Symbol): String = if(a0.toString != b0.toString) "" else {
- assert((a0 ne b0) && (a0.owner ne b0.owner)); var a=a0; var b=b0
- while(a.owner.name == b.owner.name) {a=a.owner; b=b.owner}
- if(a.locationString ne "") " (" + a.locationString.trim + ")" else ""
+ assert((a0 ne b0) && (a0.owner ne b0.owner));
+ var a = a0; var b = b0
+ while (a.owner.name == b.owner.name) { a = a.owner; b = b.owner}
+ if (a.locationString ne "") " (" + a.locationString.trim + ")" else ""
}
val errors = new ListBuffer[String]
(tparams zip targs).foreach{ case (tparam, targ) if(targ.isHigherKinded) =>
val (varianceMismatches, stricterBounds) = checkKindBoundsHK(targ.typeParams, tparam.typeParams)
- if(!(varianceMismatches.isEmpty && stricterBounds.isEmpty)){
+ if (!(varianceMismatches.isEmpty && stricterBounds.isEmpty)){
errors += (targ+"'s type parameters do not match "+tparam+"'s expected parameters: "+
- (for(val (a,p) <- varianceMismatches) yield a+qualify(a,p)+ " is "+varStr(a)+", but "+
+ (for ((a, p) <- varianceMismatches)
+ yield a+qualify(a,p)+ " is "+varStr(a)+", but "+
p+qualify(p,a)+" is declared "+varStr(p)).toList.mkString("", ", ", "") +
- (for(val (a,p) <- stricterBounds) yield a+qualify(a,p)+"'s bounds "+a.info+" are stricter than "+
+ (for ((a, p) <- stricterBounds)
+ yield a+qualify(a,p)+"'s bounds "+a.info+" are stricter than "+
p+qualify(p,a)+"'s declared bounds "+p.info).toList.mkString("", ", ", ""))
}
case _ =>
@@ -962,7 +966,7 @@ trait Infer requires Analyzer {
else if (sym == AllClass || sym == AllRefClass)
error(pos, "this type cannot be used in a type pattern")
else
- for (val arg <- args) {
+ for (arg <- args) {
if (sym == ArrayClass) checkCheckable(pos, arg)
else arg match {
case TypeRef(_, sym, _) if isLocalBinding(sym) =>
@@ -973,7 +977,7 @@ trait Infer requires Analyzer {
}
checkCheckable(pos, pre)
case RefinedType(parents, decls) =>
- if (decls.isEmpty) for (val p <- parents) checkCheckable(pos, p)
+ if (decls.isEmpty) for (p <- parents) checkCheckable(pos, p)
else patternWarning(tp, "refinement ")
case ThisType(_) =>
;
@@ -984,8 +988,8 @@ trait Infer requires Analyzer {
}
}
- /** Type intersection of simple type `tp1' with general type `tp2'
- * The result eliminates some redundancies
+ /** Type intersection of simple type <code>tp1</code> with general
+ * type <code>tp2</code>. The result eliminates some redundancies.
*/
def intersect(tp1: Type, tp2: Type): Type = {
if (tp1 <:< tp2) tp1
@@ -1081,7 +1085,8 @@ trait Infer requires Analyzer {
/** A traverser to collect type parameters referred to in a type
*/
object freeTypeParamsOfTerms extends SymCollector {
- protected def includeCondition(sym: Symbol): boolean = sym.isAbstractType && sym.owner.isTerm
+ protected def includeCondition(sym: Symbol): boolean =
+ sym.isAbstractType && sym.owner.isTerm
}
object typeRefs extends SymCollector {
@@ -1098,7 +1103,7 @@ trait Infer requires Analyzer {
def checkNotShadowed(pos: Position, pre: Type, best: Symbol, eligible: List[Symbol]) =
if (!phase.erasedTypes)
- for (val alt <- eligible) {
+ for (alt <- eligible) {
if (alt.owner != best.owner && alt.owner.isSubClass(best.owner))
error(pos,
"erroneous reference to overloaded definition,\n"+
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index 9adfbe07fc..cc89c8e623 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2006 LAMP/EPFL
+ * Copyright 2005-2007 LAMP/EPFL
* @author Martin Odersky
*/
// $Id$
@@ -210,7 +210,7 @@ trait Namers requires Analyzer {
def enterSyms(trees: List[Tree]): Namer = {
var namer : Namer = this
- for (val tree <- trees) {
+ for (tree <- trees) {
val txt = namer.enterSym(tree)
if (!(txt eq namer.context)) namer = new Namer(txt)
}
@@ -232,7 +232,7 @@ trait Namers requires Analyzer {
*/
def skolemize(tparams: List[AbsTypeDef]): unit = {
val tskolems = newTypeSkolems(tparams map (.symbol))
- for (val (tparam, tskolem) <- tparams zip tskolems) tparam.symbol = tskolem
+ for ((tparam, tskolem) <- tparams zip tskolems) tparam.symbol = tskolem
}
def applicableTypeParams(owner: Symbol): List[Symbol] =
@@ -241,14 +241,12 @@ trait Namers requires Analyzer {
def deSkolemize: TypeMap = new DeSkolemizeMap(applicableTypeParams(context.owner))
-
- private def doLateParams = if(!lateParams.isEmpty) {
- val todo = lateParams.toList
- lateParams.clear
- for(val rec <- todo) {
- rec._1.finishWith0(rec._2, rec._3, rec._4)
- }
- }
+ private def doLateParams = if (!lateParams.isEmpty) {
+ val todo = lateParams.toList
+ lateParams.clear
+ for (rec <- todo)
+ rec._1.finishWith0(rec._2, rec._3, rec._4)
+ }
private def finishWith0(sym: Symbol, tree: Tree, tparams: List[AbsTypeDef]): unit = {
if (settings.debug.value) log("entered " + sym + " in " + context.owner + ", scope-id = " + context.scope.hashCode());
@@ -519,7 +517,7 @@ trait Namers requires Analyzer {
// fill in result type and parameter types from overridden symbol if there is a unique one.
if (meth.owner.isClass && (tpt.isEmpty || vparamss.exists(.exists(.tpt.isEmpty)))) {
// try to complete from matching definition in base type
- for (val vparams <- vparamss; val vparam <- vparams)
+ for (vparams <- vparamss; vparam <- vparams)
if (vparam.tpt.isEmpty) vparam.symbol setInfo WildcardType
val overridden = overriddenSymbol
if (overridden != NoSymbol && !(overridden hasFlag OVERLOADED)) {
@@ -528,9 +526,9 @@ trait Namers requires Analyzer {
case mt => mt
}
- for (val vparams <- vparamss) {
+ for (vparams <- vparamss) {
var pfs = resultPt.paramTypes
- for (val vparam <- vparams) {
+ for (vparam <- vparams) {
if (vparam.tpt.isEmpty) {
vparam.tpt.tpe = pfs.head
vparam.symbol setInfo pfs.head
@@ -556,7 +554,7 @@ trait Namers requires Analyzer {
.info.isInstanceOf[MethodType])) {
vparamSymss = List(List())
}
- for (val vparams <- vparamss; val vparam <- vparams; vparam.tpt.isEmpty) {
+ for (vparams <- vparamss; vparam <- vparams if vparam.tpt.isEmpty) {
context.error(vparam.pos, "missing parameter type")
vparam.tpt.tpe = ErrorType
}
@@ -594,7 +592,7 @@ trait Namers requires Analyzer {
if (sym hasFlag IMPLICIT) {
val p = provided(tp);
//Console.println("check contractive: "+sym+" "+p+"/"+required(tp))
- for (val r <- required(tp)) {
+ for (r <- required(tp)) {
if (!isContainedIn(r, p) || (r =:= p)) {
context.error(sym.pos, "implicit " + sym + " is not contractive," +
"\n because the implicit parameter type " + r +
@@ -627,9 +625,9 @@ trait Namers requires Analyzer {
tree match {
case defn: MemberDef =>
val ainfos = for {
- val annot <- defn.mods.annotations
+ annot <- defn.mods.annotations
val ainfo = typer.typedAnnotation(annot, typer.getConstant)
- !ainfo.atp.isError
+ if !ainfo.atp.isError
} yield ainfo
if (!ainfos.isEmpty) {
val annotated = if (sym.isModule) sym.moduleClass else sym
diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
index 10c7818e6e..c395acc60b 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
@@ -81,7 +81,7 @@ trait SyntheticMethods requires Analyzer {
//val retTpe = lub(accs map (.tpe.resultType))
val method = syntheticMethod(nme.productElement, FINAL, MethodType(List(IntClass.tpe), AnyClass.tpe/*retTpe*/))
typed(DefDef(method, vparamss => Match(Ident(vparamss.head.head), {
- (for((sym,i) <- accs.zipWithIndex) yield {
+ (for ((sym,i) <- accs.zipWithIndex) yield {
CaseDef(Literal(Constant(i)),EmptyTree, Ident(sym))
}):::List(CaseDef(Ident(nme.WILDCARD), EmptyTree,
Throw(New(TypeTree(IndexOutOfBoundsExceptionClass.tpe), List(List(
diff --git a/src/library/scala/util/automata/SubsetConstruction.scala b/src/library/scala/util/automata/SubsetConstruction.scala
index 8786ada85c..46d4c916f5 100644
--- a/src/library/scala/util/automata/SubsetConstruction.scala
+++ b/src/library/scala/util/automata/SubsetConstruction.scala
@@ -1,7 +1,7 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2006, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
+** / __/ __// _ | / / / _ | (c) 2003-2007, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
@@ -9,28 +9,26 @@
// $Id$
-package scala.util.automata ;
+package scala.util.automata
class SubsetConstruction[T <: AnyRef](val nfa: NondetWordAutom[T]) {
- //import nfa.{ _labelT, labels };
- import nfa.labels ;
- import scala.collection.{immutable, mutable, Map} ;
-
- import immutable.{ BitSet, TreeMap, TreeSet } ;
+ import nfa.labels
+ import scala.collection.{immutable, mutable, Map}
+ import immutable.{BitSet, TreeMap, TreeSet}
implicit def toOrdered(bs: BitSet): Ordered[BitSet] = new Ordered[BitSet] {
- def compare (that: BitSet): Int = {
- val it1 = bs.elements;
- val it2 = that.elements;
- var res = 0;
+ def compare(that: BitSet): Int = {
+ val it1 = bs.elements
+ val it2 = that.elements
+ var res = 0
while((0 == res) && it1.hasNext) {
while((0 == res) && it2.hasNext) {
if (!it1.hasNext)
res = -1
else {
- val i1 = it1.next;
- val i2 = it2.next;
+ val i1 = it1.next
+ val i2 = it2.next
if (i1 < i2)
res = -1
else if (i1 > i2)
@@ -41,7 +39,7 @@ class SubsetConstruction[T <: AnyRef](val nfa: NondetWordAutom[T]) {
res = 1
}
if (it2.hasNext)
- res = -1;
+ res = -1
res
}
@@ -50,111 +48,108 @@ class SubsetConstruction[T <: AnyRef](val nfa: NondetWordAutom[T]) {
/** the set {0} */
final val _initialBitSet = {
- val rbs = new mutable.BitSet(1);
- rbs += 0;
- rbs.toImmutable;
+ val rbs = new mutable.BitSet(1)
+ rbs += 0
+ rbs.toImmutable
}
/** the set {} */
- final val _sinkBitSet = new mutable.BitSet(1).toImmutable;
+ final val _sinkBitSet = new mutable.BitSet(1).toImmutable
- final val _emptyBitSet = new scala.collection.mutable.BitSet(1).toImmutable;
+ final val _emptyBitSet = new scala.collection.mutable.BitSet(1).toImmutable
- def selectTag(Q:BitSet, finals:Array[Int]) = {
- val it = Q.elements;
- var mintag = compat.Math.MAX_INT;
- while(it.hasNext) {
- val tag = finals(it.next);
- if((0 < tag) && (tag < mintag))
+ def selectTag(Q: BitSet, finals: Array[Int]) = {
+ val it = Q.elements
+ var mintag = compat.Math.MAX_INT
+ while (it.hasNext) {
+ val tag = finals(it.next)
+ if ((0 < tag) && (tag < mintag))
mintag = tag
}
mintag
}
- def determinize: DetWordAutom[ T ] = {
+ def determinize: DetWordAutom[T] = {
// for assigning numbers to bitsets
- var indexMap = new TreeMap[ BitSet, Int ];
- var invIndexMap = new TreeMap[ Int, BitSet ];
- var ix = 0;
+ var indexMap = new TreeMap[BitSet, Int]
+ var invIndexMap = new TreeMap[Int, BitSet]
+ var ix = 0
// we compute the dfa with states = bitsets
- var states = new TreeSet[BitSet]();
+ var states = new TreeSet[BitSet]()
val delta = new mutable.HashMap[BitSet,
- mutable.HashMap[T, BitSet]];
- var deftrans = new TreeMap[BitSet, BitSet];
- var finals = new TreeMap[BitSet, Int];
+ mutable.HashMap[T, BitSet]]
+ var deftrans = new TreeMap[BitSet, BitSet]
+ var finals = new TreeMap[BitSet, Int]
- val q0 = _initialBitSet;
- states = states + q0;
+ val q0 = _initialBitSet
+ states = states + q0
- val sink = _emptyBitSet;
- states = states + sink;
+ val sink = _emptyBitSet
+ states = states + sink
deftrans = deftrans.update(q0,sink);
deftrans = deftrans.update(sink,sink);
val rest = new mutable.Stack[BitSet]();
- def add(Q: BitSet): Unit = {
- if(!states.contains(Q)) {
- states = states + Q;
- rest.push(Q);
- if(nfa.containsFinal(Q))
- finals = finals.update(Q, selectTag(Q,nfa.finals));
+ def add(Q: BitSet) {
+ if (!states.contains(Q)) {
+ states = states + Q
+ rest.push(Q)
+ if (nfa.containsFinal(Q))
+ finals = finals.update(Q, selectTag(Q,nfa.finals));
}
- }
- rest.push( sink );
- val sinkIndex = 1;
- rest.push( q0 );
- while(!rest.isEmpty) {
+ }
+ rest.push(sink)
+ val sinkIndex = 1
+ rest.push(q0)
+ while (!rest.isEmpty) {
// assign a number to this bitset
- val P = rest.pop;
- indexMap = indexMap.update(P,ix);
- invIndexMap = invIndexMap.update(ix,P);
- ix = ix + 1;
+ val P = rest.pop
+ indexMap = indexMap.update(P,ix)
+ invIndexMap = invIndexMap.update(ix,P)
+ ix += 1
// make transitiion map
- val Pdelta = new mutable.HashMap[T, BitSet];
- delta.update( P, Pdelta );
+ val Pdelta = new mutable.HashMap[T, BitSet]
+ delta.update(P, Pdelta)
val it = labels.elements; while(it.hasNext) {
- val label = it.next;
-
- val Q = nfa.next(P,label);
-
- Pdelta.update( label, Q );
-
- add(Q);
+ val label = it.next
+ val Q = nfa.next(P,label)
+ Pdelta.update(label, Q)
+ add(Q)
}
// collect default transitions
- val Pdef = nfa.nextDefault(P);
- deftrans = deftrans.update(P,Pdef);
- add(Pdef);
+ val Pdef = nfa.nextDefault(P)
+ deftrans = deftrans.update(P, Pdef)
+ add(Pdef)
};
// create DetWordAutom, using indices instead of sets
- val nstatesR = states.size;
- val deltaR = new Array[Map[T,Int]](nstatesR);
- val defaultR = new Array[Int](nstatesR);
- val finalsR = new Array[Int](nstatesR);
-
- for(val w <- states) {
- val Q = w;
- val q = indexMap(Q);
- val trans = delta(Q);
- val transDef = deftrans(Q);
- val qDef = indexMap(transDef);
- val ntrans = new mutable.HashMap[T,Int]();
+ val nstatesR = states.size
+ val deltaR = new Array[Map[T,Int]](nstatesR)
+ val defaultR = new Array[Int](nstatesR)
+ val finalsR = new Array[Int](nstatesR)
+
+ for (w <- states) {
+ val Q = w
+ val q = indexMap(Q)
+ val trans = delta(Q)
+ val transDef = deftrans(Q)
+ val qDef = indexMap(transDef)
+ val ntrans = new mutable.HashMap[T,Int]()
val it = trans.keys; while(it.hasNext) {
- val label = it.next;
- val p = indexMap(trans(label));
- if( p != qDef )
+ val label = it.next
+ val p = indexMap(trans(label))
+ if (p != qDef)
ntrans.update(label, p)
}
- deltaR.update(q, ntrans);
- defaultR.update(q, qDef);
+ deltaR.update(q, ntrans)
+ defaultR.update(q, qDef)
//cleanup? leave to garbage collector?
//delta.remove(Q);
@@ -162,17 +157,14 @@ class SubsetConstruction[T <: AnyRef](val nfa: NondetWordAutom[T]) {
}
- for(val fQ <- finals.keys) {
- finalsR(indexMap(fQ)) = finals(fQ);
- }
-
- new DetWordAutom [ T ] {
+ for (fQ <- finals.keys) finalsR(indexMap(fQ)) = finals(fQ)
+ new DetWordAutom [T] {
//type _labelT = SubsetConstruction.this.nfa._labelT;
- val nstates = nstatesR;
- val delta = deltaR;
- val default = defaultR;
- val finals = finalsR;
+ val nstates = nstatesR
+ val delta = deltaR
+ val default = defaultR
+ val finals = finalsR
}
}
}
diff --git a/src/library/scala/util/automata/WordBerrySethi.scala b/src/library/scala/util/automata/WordBerrySethi.scala
index ee031d67c6..a268344557 100644
--- a/src/library/scala/util/automata/WordBerrySethi.scala
+++ b/src/library/scala/util/automata/WordBerrySethi.scala
@@ -278,7 +278,7 @@ abstract class WordBerrySethi extends BaseBerrySethi {
System.out.println("#positions:" + pos);
System.out.println("posMap:");
- for( Iterator it = this.posMap.keySet().iterator();
+ for (Iterator it = this.posMap.keySet().iterator();
it.hasNext(); ) {
Tree t = (Tree) it.next();
switch(t) {
@@ -289,7 +289,7 @@ abstract class WordBerrySethi extends BaseBerrySethi {
}
}
System.out.println("\nfollow: ");
- for( int j = 1; j < pos; j++ ) {
+ for (int j = 1; j < pos; j++ ) {
TreeSet fol = (TreeSet) this.follow.get(new Integer(j));
System.out.print("("+j+" -> "+fol.toString()+") ");
//debugPrint( fol );
diff --git a/src/library/scala/xml/PrettyPrinter.scala b/src/library/scala/xml/PrettyPrinter.scala
index ad3f26c2ed..8f92149c18 100644
--- a/src/library/scala/xml/PrettyPrinter.scala
+++ b/src/library/scala/xml/PrettyPrinter.scala
@@ -187,9 +187,9 @@ class PrettyPrinter( width:Int, step:Int ) {
val sq:Seq[String] = stg.split(" ");
val it = sq.elements;
it.next;
- for(c <- it) {
- makeBox( ind+len2-2, c );
- makeBreak();
+ for (c <- it) {
+ makeBox(ind+len2-2, c)
+ makeBreak()
}
}*/
makeBox(ind, stg.substring(len2, stg.length()))
diff --git a/src/library/scala/xml/dtd/ContentModel.scala b/src/library/scala/xml/dtd/ContentModel.scala
index d1113b15b7..8b0b6e7ae3 100644
--- a/src/library/scala/xml/dtd/ContentModel.scala
+++ b/src/library/scala/xml/dtd/ContentModel.scala
@@ -11,19 +11,18 @@
package scala.xml.dtd
-import compat.StringBuilder
import scala.util.regexp.WordExp
-import scala.util.automata._
+import scala.util.automata.{DetWordAutom, SubsetConstruction, WordBerrySethi}
-object ContentModel extends WordExp {
+object ContentModel extends WordExp {
type _labelT = ElemName
type _regexpT = RegExp
- object Translator extends WordBerrySethi {
+ object Translator extends WordBerrySethi {
- override val lang: ContentModel.this.type = ContentModel.this;
- import lang._ ;
- //val re = Sequ(Star(Letter(IntConst( 3 ))));
+ override val lang: ContentModel.this.type = ContentModel.this
+ import lang._
+ //val re = Sequ(Star(Letter(IntConst( 3 ))))
//val aut = automatonFrom(re, 7)
}
@@ -32,21 +31,20 @@ object ContentModel extends WordExp {
override def toString() = "ElemName(\""+name+"\")"
}
- def isMixed(cm: ContentModel) = cm.isInstanceOf[MIXED];
+ def isMixed(cm: ContentModel) = cm.isInstanceOf[MIXED]
def containsText(cm: ContentModel) = (cm == PCDATA) || isMixed(cm)
def parse(s: String): ContentModel = ContentModelParser.parse(s)
def getLabels(r: RegExp): scala.collection.Set[String] = {
val s = new scala.collection.mutable.HashSet[String]()
- def traverse1(xs: Seq[RegExp]): Unit = {
- val it = xs.elements;
- while( it.hasNext )
- traverse( it.next );
- }
- def traverse(r: RegExp): Unit = {
+ def traverse1(xs: Seq[RegExp]) {
+ val it = xs.elements
+ while (it.hasNext) traverse(it.next)
+ }
+ def traverse(r: RegExp) {
r match {
- case Letter(ElemName( name )) => s += name;
+ case Letter(ElemName(name)) => s += name
case Star( x @ _ ) => traverse( x ) // bug if x@_*
case Sequ( xs @ _* ) => traverse1(xs)
case Alt( xs @ _* ) => traverse1(xs)
@@ -63,12 +61,11 @@ object ContentModel extends WordExp {
}
/* precond: rs.length >= 1 */
- private def toString(rs: Seq[RegExp], sb: StringBuilder, sep: Char): Unit = {
-
+ private def toString(rs: Seq[RegExp], sb: StringBuilder, sep: Char) {
val it = rs.elements
val fst = it.next
toString(fst, sb)
- for(val z <- it) {
+ for (z <- it) {
sb.append(sep)
toString(z, sb)
}
@@ -112,24 +109,24 @@ sealed abstract class ContentModel {
def toString(sb:StringBuilder): StringBuilder;
/*
def validate(cs: NodeSeq): Boolean = this.match {
- case ANY => true ;
- case EMPTY => cs.length == 0;
+ case ANY => true
+ case EMPTY => cs.length == 0
case PCDATA => cs.length == 0
- || (cs.length == 1 && cs(0).isInstanceOf[Text]);
- case m@MIXED(r) => m.runDFA(cs);
- case e@ELEMENTS(r) => e.runDFA(cs);
+ || (cs.length == 1 && cs(0).isInstanceOf[Text])
+ case m@MIXED(r) => m.runDFA(cs)
+ case e@ELEMENTS(r) => e.runDFA(cs)
}
*/
}
case object PCDATA extends ContentModel {
- override def toString(sb:StringBuilder): StringBuilder = sb.append("(#PCDATA)")
+ override def toString(sb: StringBuilder): StringBuilder = sb.append("(#PCDATA)")
}
case object EMPTY extends ContentModel {
- override def toString(sb:StringBuilder): StringBuilder = sb.append("EMPTY")
+ override def toString(sb: StringBuilder): StringBuilder = sb.append("EMPTY")
}
case object ANY extends ContentModel {
- override def toString(sb:StringBuilder): StringBuilder = sb.append("ANY")
+ override def toString(sb: StringBuilder): StringBuilder = sb.append("ANY")
}
sealed abstract class DFAContentModel extends ContentModel {
import ContentModel.ElemName
@@ -145,7 +142,7 @@ sealed abstract class DFAContentModel extends ContentModel {
}
}
case class MIXED(r:ContentModel.RegExp) extends DFAContentModel {
- import ContentModel.{ Alt, Eps, RegExp };
+ import ContentModel.{Alt, Eps, RegExp}
/*
def getIterator(ns:NodeSeq) = new Iterator[String] {
def cond(n:Node) =
@@ -167,7 +164,7 @@ Console.println("ns = "+ns);
}
}
*/
- override def toString(sb:StringBuilder): StringBuilder = {
+ override def toString(sb: StringBuilder): StringBuilder = {
sb.append("(#PCDATA|");
//r match {
// case Alt(Eps, rs@_*) => ContentModel.toString(Alt(rs:_*):RegExp, sb);
diff --git a/src/library/scala/xml/dtd/DTD.scala b/src/library/scala/xml/dtd/DTD.scala
index d1907fd5c0..0ca74a7926 100644
--- a/src/library/scala/xml/dtd/DTD.scala
+++ b/src/library/scala/xml/dtd/DTD.scala
@@ -1,7 +1,7 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2006, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
+** / __/ __// _ | / / / _ | (c) 2002-2007, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
@@ -9,44 +9,46 @@
// $Id$
-package scala.xml.dtd;
+package scala.xml.dtd
-import scala.collection.mutable.{ HashMap, Map }
+import scala.collection.mutable.{HashMap, Map}
-/** a document type declaration */
+/** A document type declaration.
+ *
+ * @author Burak Emir
+ */
abstract class DTD {
- var externalID: ExternalID = null;
+ var externalID: ExternalID = null
- def notations: Seq[NotationDecl] = Nil;
+ def notations: Seq[NotationDecl] = Nil
- def unparsedEntities: Seq[EntityDecl] = Nil;
+ def unparsedEntities: Seq[EntityDecl] = Nil
- var elem: Map[String, ElemDecl] = new HashMap[String, ElemDecl]();
+ var elem: Map[String, ElemDecl] = new HashMap[String, ElemDecl]()
- var attr: Map[String, AttListDecl] = new HashMap[String, AttListDecl]();
+ var attr: Map[String, AttListDecl] = new HashMap[String, AttListDecl]()
- var ent: Map[String, EntityDecl] = new HashMap[String, EntityDecl]();
+ var ent: Map[String, EntityDecl] = new HashMap[String, EntityDecl]()
- var decls: List[Decl] = Nil;
+ var decls: List[Decl] = Nil
- //def getElemDecl(elem:String): ElemDecl;
+ //def getElemDecl(elem:String): ElemDecl
- //def getAttribDecl(elem: String, attr: String): AttrDecl;
+ //def getAttribDecl(elem: String, attr: String): AttrDecl
override def toString() = {
- val sb = new compat.StringBuilder();
- sb.append("DTD [\n");
- if(null != externalID)
- sb.append(externalID.toString()).append('\n');
- for(val d <- decls)
- sb.append(d.toString()).append('\n');
+ val sb = new StringBuilder("DTD [\n")
+ if (null != externalID)
+ sb.append(externalID.toString()).append('\n')
+ for (d <- decls)
+ sb.append(d.toString()).append('\n')
sb.append("]").toString()
}
/*
def initializeEntities() = {
- for(val x <- decls) x match {
+ for (x <- decls) x match {
case y @ ParsedEntityDecl(name, _) => ent.update(name, y);
case y @ UnparsedEntityDecl(name, _, _) => ent.update(name, y);
case y @ ParameterEntityDecl(name, _) => ent.update(name, y);
diff --git a/src/library/scala/xml/dtd/DocType.scala b/src/library/scala/xml/dtd/DocType.scala
index e31f43990f..aa80a685e1 100644
--- a/src/library/scala/xml/dtd/DocType.scala
+++ b/src/library/scala/xml/dtd/DocType.scala
@@ -1,7 +1,7 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2006, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
+** / __/ __// _ | / / / _ | (c) 2003-2007, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
@@ -9,39 +9,37 @@
// $Id$
-package scala.xml.dtd;
+package scala.xml.dtd
-
-/** an XML node for document type declaration
+/** An XML node for document type declaration.
*
- * @author Burak Emir
- * @param target name of this DOCTYPE
- * @param extID None, or Some(external ID of this doctype)
- * @param intSubset sequence of internal subset declarations
-**/
-
-case class DocType( name:String, extID:ExternalID, intSubset:Seq[dtd.Decl]) {
+ * @author Burak Emir
+ *
+ * @param target name of this DOCTYPE
+ * @param extID None, or Some(external ID of this doctype)
+ * @param intSubset sequence of internal subset declarations
+ */
+case class DocType(name: String, extID: ExternalID, intSubset: Seq[dtd.Decl]) {
- if( !Utility.isName( name ) )
+ if (!Utility.isName(name))
throw new IllegalArgumentException(name+" must be an XML Name");
/** hashcode for this processing instruction */
- final override def hashCode() = name.hashCode() + 7 * extID.hashCode() + 41*intSubset.toList.hashCode();
+ final override def hashCode() =
+ name.hashCode() + 7 * extID.hashCode() + 41*intSubset.toList.hashCode();
/** returns "&lt;!DOCTYPE + name + extID? + ("["+intSubSet+"]")? >" */
final override def toString() = {
- val sb = new compat.StringBuilder().append("<!DOCTYPE ");
- sb.append( name );
- sb.append(' ');
- sb.append(extID.toString());
- if( intSubset.length > 0 ) {
- sb.append('[');
- for( val d <- intSubset ) {
- sb.append( d.toString() );
- }
- sb.append(']');
+ val sb = new StringBuilder("<!DOCTYPE ")
+ sb.append(name)
+ sb.append(' ')
+ sb.append(extID.toString())
+ if (intSubset.length > 0) {
+ sb.append('[')
+ for (d <- intSubset) sb.append(d.toString())
+ sb.append(']')
}
- sb.append('>');
- sb.toString();
+ sb.append('>')
+ sb.toString()
}
}
diff --git a/src/library/scala/xml/factory/Binder.scala b/src/library/scala/xml/factory/Binder.scala
index d6b6b3d08e..937f051bb0 100644
--- a/src/library/scala/xml/factory/Binder.scala
+++ b/src/library/scala/xml/factory/Binder.scala
@@ -1,7 +1,7 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2006, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
+** / __/ __// _ | / / / _ | (c) 2002-2007, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
@@ -9,47 +9,53 @@
// $Id$
-package scala.xml.factory;
+package scala.xml.factory
-import scala.xml.parsing.ValidatingMarkupHandler;
+import scala.xml.parsing.ValidatingMarkupHandler
+/**
+ * @author Burak Emir
+ */
abstract class Binder(val preserveWS: Boolean) extends ValidatingMarkupHandler {
- var result: NodeBuffer = new NodeBuffer();
+ var result: NodeBuffer = new NodeBuffer()
def reportSyntaxError(pos:Int, str:String) = {}
- final def procInstr(pos: Int, target: String, txt: String ) =
- ProcInstr(target, txt);
+ final def procInstr(pos: Int, target: String, txt: String) =
+ ProcInstr(target, txt)
- final def comment(pos: Int, txt: String ) =
- Comment( txt );
+ final def comment(pos: Int, txt: String) =
+ Comment(txt)
final def entityRef(pos: Int, n: String) =
- EntityRef( n );
+ EntityRef(n)
- final def text(pos: Int, txt:String) =
- Text( txt );
+ final def text(pos: Int, txt: String) =
+ Text(txt)
final def traverse(n:Node): Unit = n match {
- case x:ProcInstr => result &+ procInstr(0, x.target, x.text)
- case x:Comment => result &+ comment(0, x.text)
- case x:Text => result &+ text(0, x.data)
- case x:EntityRef => result &+ entityRef(0, x.entityName)
+ case x:ProcInstr =>
+ result &+ procInstr(0, x.target, x.text)
+ case x:Comment =>
+ result &+ comment(0, x.text)
+ case x:Text =>
+ result &+ text(0, x.data)
+ case x:EntityRef =>
+ result &+ entityRef(0, x.entityName)
case _ =>
- elemStart(0, n.prefix, n.label, n.attributes, n.scope);
- val old = result;
- result = new NodeBuffer();
- for(val m <- n.child)
- traverse(m);
+ elemStart(0, n.prefix, n.label, n.attributes, n.scope)
+ val old = result
+ result = new NodeBuffer()
+ for (m <- n.child) traverse(m)
result = old &+ elem(0, n.prefix, n.label, n.attributes, n.scope, NodeSeq.fromSeq(result)).toList;
- elemEnd(0, n.prefix, n.label);
+ elemEnd(0, n.prefix, n.label)
}
- final def validate(n:Node): Node = {
- this.rootLabel = n.label;
- traverse(n);
+ final def validate(n: Node): Node = {
+ this.rootLabel = n.label
+ traverse(n)
result(0)
}
}
diff --git a/src/manual/scala/tools/docutil/EmitManPage.scala b/src/manual/scala/tools/docutil/EmitManPage.scala
index 26efe4ee67..bb01dd0213 100644
--- a/src/manual/scala/tools/docutil/EmitManPage.scala
+++ b/src/manual/scala/tools/docutil/EmitManPage.scala
@@ -101,7 +101,7 @@ object EmitManPage {
out.println("\n.fi")
case lst:BulletList =>
- for(item <- lst.items) {
+ for (item <- lst.items) {
out.println(".IP")
emitText(item)
out.println