summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/compiler/scala/reflect/reify/phases/Reshape.scala2
-rw-r--r--src/compiler/scala/reflect/reify/utils/NodePrinters.scala4
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala4
-rw-r--r--src/compiler/scala/tools/nsc/MainTokenMetric.scala4
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala6
-rw-r--r--src/compiler/scala/tools/nsc/ast/Trees.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala6
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala12
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/GenICode.scala80
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala26
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala64
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Members.scala24
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala40
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Primitives.scala8
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Printers.scala46
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala46
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala4
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala14
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala54
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala4
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala12
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/Inliners.scala10
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala6
-rw-r--r--src/compiler/scala/tools/nsc/interactive/Global.scala4
-rw-r--r--src/compiler/scala/tools/nsc/interactive/REPL.scala4
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ILoop.scala2
-rw-r--r--src/compiler/scala/tools/nsc/io/Pickler.scala2
-rw-r--r--src/compiler/scala/tools/nsc/javac/JavaParsers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala2
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala2
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala69
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala6
-rw-r--r--src/compiler/scala/tools/nsc/transform/Constructors.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala4
-rw-r--r--src/compiler/scala/tools/nsc/transform/Flatten.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/LambdaLift.scala4
-rw-r--r--src/compiler/scala/tools/nsc/transform/Mixin.scala4
-rw-r--r--src/compiler/scala/tools/nsc/transform/OverridingPairs.scala12
-rw-r--r--src/compiler/scala/tools/nsc/transform/TailCalls.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala22
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala8
-rw-r--r--src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala4
-rw-r--r--src/compiler/scala/tools/nsc/util/ShowPickled.scala2
-rw-r--r--src/compiler/scala/tools/reflect/MacroImplementations.scala4
-rw-r--r--src/library/scala/beans/ScalaBeanInfo.scala4
-rw-r--r--src/library/scala/collection/SortedMapLike.scala2
-rw-r--r--src/library/scala/collection/generic/Signalling.scala4
-rw-r--r--src/library/scala/collection/generic/Sorted.scala16
-rw-r--r--src/library/scala/collection/generic/SortedSetFactory.scala2
-rw-r--r--src/library/scala/collection/immutable/HashMap.scala2
-rw-r--r--src/library/scala/collection/immutable/HashSet.scala4
-rw-r--r--src/library/scala/collection/immutable/IntMap.scala6
-rw-r--r--src/library/scala/collection/immutable/ListSet.scala6
-rw-r--r--src/library/scala/collection/immutable/LongMap.scala4
-rw-r--r--src/library/scala/collection/immutable/RedBlackTree.scala8
-rw-r--r--src/library/scala/collection/mutable/HashTable.scala16
-rw-r--r--src/library/scala/collection/mutable/ListBuffer.scala2
-rw-r--r--src/library/scala/collection/mutable/OpenHashMap.scala38
-rw-r--r--src/library/scala/collection/parallel/ParIterableLike.scala10
-rw-r--r--src/library/scala/collection/parallel/ParIterableViewLike.scala3
-rw-r--r--src/library/scala/collection/parallel/ParSeqLike.scala4
-rw-r--r--src/library/scala/collection/parallel/ParSeqViewLike.scala4
-rw-r--r--src/library/scala/collection/parallel/RemainsIterator.scala5
-rw-r--r--src/library/scala/collection/parallel/Tasks.scala2
-rw-r--r--src/library/scala/collection/parallel/immutable/ParRange.scala2
-rw-r--r--src/library/scala/collection/parallel/mutable/ParArray.scala6
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashMap.scala5
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashSet.scala9
-rw-r--r--src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala3
-rw-r--r--src/library/scala/io/ReadStdin.scala2
-rw-r--r--src/library/scala/ref/SoftReference.scala3
-rwxr-xr-xsrc/library/scala/reflect/NameTransformer.scala2
-rw-r--r--src/library/scala/text/Document.scala2
-rw-r--r--src/library/scala/util/MurmurHash.scala2
-rw-r--r--src/library/scala/util/matching/Regex.scala2
-rwxr-xr-xsrc/library/scala/xml/Utility.scala6
-rw-r--r--src/library/scala/xml/dtd/ContentModelParser.scala60
-rw-r--r--src/library/scala/xml/dtd/Decl.scala2
-rw-r--r--src/library/scala/xml/dtd/Scanner.scala6
-rw-r--r--src/library/scala/xml/dtd/ValidationException.scala2
-rwxr-xr-xsrc/library/scala/xml/factory/Binder.scala2
-rw-r--r--src/library/scala/xml/factory/LoggedNodeFactory.scala10
-rw-r--r--src/library/scala/xml/include/sax/XIncludeFilter.scala16
-rw-r--r--src/library/scala/xml/include/sax/XIncluder.scala30
-rwxr-xr-xsrc/library/scala/xml/parsing/MarkupParser.scala22
-rw-r--r--src/library/scala/xml/parsing/ValidatingMarkupHandler.scala6
-rw-r--r--src/library/scala/xml/transform/BasicTransformer.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Constants.scala2
-rw-r--r--src/reflect/scala/reflect/internal/InfoTransformers.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Kinds.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Names.scala8
-rw-r--r--src/reflect/scala/reflect/internal/Printers.scala4
-rw-r--r--src/reflect/scala/reflect/internal/Scopes.scala6
-rw-r--r--src/reflect/scala/reflect/internal/SymbolTable.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Trees.scala4
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala24
-rw-r--r--src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala4
-rw-r--r--src/reflect/scala/reflect/io/VirtualDirectory.scala2
-rw-r--r--src/reflect/scala/reflect/io/VirtualFile.scala2
104 files changed, 538 insertions, 519 deletions
diff --git a/src/compiler/scala/reflect/reify/phases/Reshape.scala b/src/compiler/scala/reflect/reify/phases/Reshape.scala
index 71fe4ddeea..4c27ba4da1 100644
--- a/src/compiler/scala/reflect/reify/phases/Reshape.scala
+++ b/src/compiler/scala/reflect/reify/phases/Reshape.scala
@@ -280,7 +280,7 @@ trait Reshape {
detectBeanAccessors("get")
detectBeanAccessors("set")
detectBeanAccessors("is")
- });
+ })
val stats1 = stats flatMap {
case vdef @ ValDef(mods, name, tpt, rhs) if !mods.isLazy =>
diff --git a/src/compiler/scala/reflect/reify/utils/NodePrinters.scala b/src/compiler/scala/reflect/reify/utils/NodePrinters.scala
index 0740f8d0b6..0903bc481c 100644
--- a/src/compiler/scala/reflect/reify/utils/NodePrinters.scala
+++ b/src/compiler/scala/reflect/reify/utils/NodePrinters.scala
@@ -71,10 +71,10 @@ trait NodePrinters {
s.trim
})
- val printout = scala.collection.mutable.ListBuffer[String]();
+ val printout = scala.collection.mutable.ListBuffer[String]()
printout += universe.trim
if (mirrorIsUsed) printout += mirror.replace("Mirror[", "scala.reflect.api.Mirror[").trim
- val imports = scala.collection.mutable.ListBuffer[String]();
+ val imports = scala.collection.mutable.ListBuffer[String]()
imports += nme.UNIVERSE_SHORT.toString
// if (buildIsUsed) imports += nme.build
if (mirrorIsUsed) imports += nme.MIRROR_SHORT.toString
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index 7c8dbc211e..304bdf1536 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -1256,8 +1256,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
// this handler should not be nessasary, but it seems that `fsc`
// eats exceptions if they appear here. Need to find out the cause for
// this and fix it.
- inform("[reset] exception happened: "+ex);
- ex.printStackTrace();
+ inform("[reset] exception happened: "+ex)
+ ex.printStackTrace()
throw ex
}
diff --git a/src/compiler/scala/tools/nsc/MainTokenMetric.scala b/src/compiler/scala/tools/nsc/MainTokenMetric.scala
index 9eb162a377..584805b37e 100644
--- a/src/compiler/scala/tools/nsc/MainTokenMetric.scala
+++ b/src/compiler/scala/tools/nsc/MainTokenMetric.scala
@@ -43,8 +43,8 @@ object MainTokenMetric {
} catch {
case ex @ FatalError(msg) =>
if (command.settings.debug.value)
- ex.printStackTrace();
- reporter.error(null, "fatal error: " + msg)
+ ex.printStackTrace()
+ reporter.error(null, "fatal error: " + msg)
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
index 329f0fa54b..b73016837d 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
@@ -32,7 +32,7 @@ abstract class TreeBrowsers {
val borderSize = 10
- def create(): SwingBrowser = new SwingBrowser();
+ def create(): SwingBrowser = new SwingBrowser()
/** Pseudo tree class, so that all JTree nodes are treated uniformly */
case class ProgramTree(units: List[UnitTree]) extends Tree {
@@ -189,7 +189,7 @@ abstract class TreeBrowsers {
frame.addWindowListener(new WindowAdapter() {
/** Release the lock, so compilation may resume after the window is closed. */
override def windowClosed(e: WindowEvent): Unit = lock.release()
- });
+ })
jTree = new JTree(treeModel) {
/** Return the string for a tree node. */
@@ -530,7 +530,7 @@ abstract class TreeBrowsers {
if ((s ne null) && (s != NoSymbol)) {
var str = s.flagString
- if (s.isStaticMember) str = str + " isStatic ";
+ if (s.isStaticMember) str = str + " isStatic "
(str + " annotations: " + s.annotations.mkString("", " ", "")
+ (if (s.isTypeSkolem) "\ndeSkolemized annotations: " + s.deSkolemize.annotations.mkString("", " ", "") else ""))
}
diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala
index 4b5e23e177..c8b878225e 100644
--- a/src/compiler/scala/tools/nsc/ast/Trees.scala
+++ b/src/compiler/scala/tools/nsc/ast/Trees.scala
@@ -122,7 +122,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
} else {
// convert (implicit ... ) to ()(implicit ... ) if its the only parameter section
if (vparamss1.isEmpty || !vparamss1.head.isEmpty && vparamss1.head.head.mods.isImplicit)
- vparamss1 = List() :: vparamss1;
+ vparamss1 = List() :: vparamss1
val superRef: Tree = atPos(superPos)(gen.mkSuperInitCall)
val superCall = pendingSuperCall // we can't know in advance which of the parents will end up as a superclass
// this requires knowing which of the parents is a type macro and which is not
diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
index add932441d..f361daa574 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
@@ -411,7 +411,7 @@ abstract class TreeBuilder {
ValFrom(pos, pat, makeCombination(rhs.pos union test.pos, nme.withFilter, rhs, pat.duplicate, test)) :: rest,
body)
case ValFrom(pos, pat, rhs) :: rest =>
- val valeqs = rest.take(definitions.MaxTupleArity - 1).takeWhile(_.isInstanceOf[ValEq]);
+ val valeqs = rest.take(definitions.MaxTupleArity - 1).takeWhile(_.isInstanceOf[ValEq])
assert(!valeqs.isEmpty)
val rest1 = rest.drop(valeqs.length)
val pats = valeqs map { case ValEq(_, pat, _) => pat }
diff --git a/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala b/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala
index f6b0701f86..1f9862596c 100644
--- a/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala
+++ b/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala
@@ -494,8 +494,8 @@ abstract class ScalaPrimitives {
def isArraySet(code: Int): Boolean = code match {
case ZARRAY_SET | BARRAY_SET | SARRAY_SET | CARRAY_SET |
IARRAY_SET | LARRAY_SET | FARRAY_SET | DARRAY_SET |
- OARRAY_SET | UPDATE => true;
- case _ => false;
+ OARRAY_SET | UPDATE => true
+ case _ => false
}
/** Check whether the given code is a comparison operator */
@@ -514,7 +514,7 @@ abstract class ScalaPrimitives {
DIV | MOD => true; // binary
case OR | XOR | AND |
LSL | LSR | ASR => true; // bitwise
- case _ => false;
+ case _ => false
}
def isLogicalOp(code: Int): Boolean = code match {
diff --git a/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala b/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala
index a872e9cd00..7243264773 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala
@@ -24,11 +24,11 @@ trait ExceptionHandlers {
class ExceptionHandler(val method: IMethod, val label: TermName, val cls: Symbol, val pos: Position) {
def loadExceptionClass = if (cls == NoSymbol) ThrowableClass else cls
- private var _startBlock: BasicBlock = _;
- var finalizer: Finalizer = _;
+ private var _startBlock: BasicBlock = _
+ var finalizer: Finalizer = _
def setStartBlock(b: BasicBlock) = {
- _startBlock = b;
+ _startBlock = b
b.exceptionHandlerStart = true
}
def startBlock = _startBlock
@@ -46,11 +46,11 @@ trait ExceptionHandlers {
/** The body of this exception handler. May contain 'dead' blocks (which will not
* make it into generated code because linearizers may not include them) */
- var blocks: List[BasicBlock] = Nil;
+ var blocks: List[BasicBlock] = Nil
- def addBlock(b: BasicBlock): Unit = blocks = b :: blocks;
+ def addBlock(b: BasicBlock): Unit = blocks = b :: blocks
- override def toString() = "exh_" + label + "(" + cls.simpleName + ")";
+ override def toString() = "exh_" + label + "(" + cls.simpleName + ")"
/** A standard copy constructor */
def this(other: ExceptionHandler) = {
diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
index f19fb56db0..122972039b 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
@@ -91,7 +91,7 @@ abstract class GenICode extends SubComponent {
debuglog("Generating class: " + tree.symbol.fullName)
val outerClass = ctx.clazz
ctx setClass (new IClass(tree.symbol) setCompilationUnit unit)
- addClassFields(ctx, tree.symbol);
+ addClassFields(ctx, tree.symbol)
classes += (tree.symbol -> ctx.clazz)
unit.icode += ctx.clazz
gen(impl, ctx)
@@ -119,7 +119,7 @@ abstract class GenICode extends SubComponent {
m.native = m.symbol.hasAnnotation(definitions.NativeAttr)
if (!m.isAbstractMethod && !m.native) {
- ctx1 = genLoad(rhs, ctx1, m.returnType);
+ ctx1 = genLoad(rhs, ctx1, m.returnType)
// reverse the order of the local variables, to match the source-order
m.locals = m.locals.reverse
@@ -224,10 +224,10 @@ abstract class GenICode extends SubComponent {
// binary operation
case rarg :: Nil =>
- resKind = getMaxType(larg.tpe :: rarg.tpe :: Nil);
+ resKind = getMaxType(larg.tpe :: rarg.tpe :: Nil)
if (scalaPrimitives.isShiftOp(code) || scalaPrimitives.isBitwiseOp(code))
assert(resKind.isIntegralType | resKind == BOOL,
- resKind.toString() + " incompatible with arithmetic modulo operation: " + ctx1);
+ resKind.toString() + " incompatible with arithmetic modulo operation: " + ctx1)
ctx1 = genLoad(larg, ctx1, resKind)
ctx1 = genLoad(rarg,
@@ -271,7 +271,7 @@ abstract class GenICode extends SubComponent {
if (scalaPrimitives.isArrayGet(code)) {
// load argument on stack
debugassert(args.length == 1,
- "Too many arguments for array get operation: " + tree);
+ "Too many arguments for array get operation: " + tree)
ctx1 = genLoad(args.head, ctx1, INT)
generatedType = elem
ctx1.bb.emit(LOAD_ARRAY_ITEM(elementType), tree.pos)
@@ -283,7 +283,7 @@ abstract class GenICode extends SubComponent {
}
else if (scalaPrimitives.isArraySet(code)) {
debugassert(args.length == 2,
- "Too many arguments for array set operation: " + tree);
+ "Too many arguments for array set operation: " + tree)
ctx1 = genLoad(args.head, ctx1, INT)
ctx1 = genLoad(args.tail.head, ctx1, toTypeKind(args.tail.head.tpe))
// the following line should really be here, but because of bugs in erasure
@@ -404,8 +404,8 @@ abstract class GenICode extends SubComponent {
(pat.symbol.tpe.typeSymbol, kind, {
ctx: Context =>
- ctx.bb.emit(STORE_LOCAL(exception), pat.pos);
- genLoad(body, ctx, kind);
+ ctx.bb.emit(STORE_LOCAL(exception), pat.pos)
+ genLoad(body, ctx, kind)
})
}
}
@@ -491,7 +491,7 @@ abstract class GenICode extends SubComponent {
val pair = (tree.symbol -> (new Label(tree.symbol) anchor ctx1.bb setParams (params map (_.symbol))))
debuglog("Adding label " + tree.symbol.fullLocationString + " in genLoad.")
ctx1.labels += pair
- ctx.method.addLocals(params map (p => new Local(p.symbol, toTypeKind(p.symbol.info), false)));
+ ctx.method.addLocals(params map (p => new Local(p.symbol, toTypeKind(p.symbol.info), false)))
}
ctx.bb.closeWith(JUMP(ctx1.bb), tree.pos)
@@ -509,13 +509,13 @@ abstract class GenICode extends SubComponent {
val local = ctx.method.addLocal(new Local(sym, toTypeKind(sym.info), false))
if (rhs == EmptyTree) {
- debuglog("Uninitialized variable " + tree + " at: " + (tree.pos));
+ debuglog("Uninitialized variable " + tree + " at: " + (tree.pos))
ctx.bb.emit(getZeroOf(local.kind))
}
var ctx1 = ctx
if (rhs != EmptyTree)
- ctx1 = genLoad(rhs, ctx, local.kind);
+ ctx1 = genLoad(rhs, ctx, local.kind)
ctx1.bb.emit(STORE_LOCAL(local), tree.pos)
ctx1.scope.add(local)
@@ -624,7 +624,7 @@ abstract class GenICode extends SubComponent {
} else {
genCast(l, r, ctx1, cast)
}
- generatedType = if (cast) r else BOOL;
+ generatedType = if (cast) r else BOOL
ctx1
}
genLoadApply1
@@ -637,7 +637,7 @@ abstract class GenICode extends SubComponent {
// on the stack (contrary to what the type in the AST says).
case Apply(fun @ Select(Super(_, mix), _), args) =>
def genLoadApply2 = {
- debuglog("Call to super: " + tree);
+ debuglog("Call to super: " + tree)
val invokeStyle = SuperCall(mix)
// if (fun.symbol.isConstructor) Static(true) else SuperCall(mix);
@@ -700,7 +700,7 @@ abstract class GenICode extends SubComponent {
case Apply(fun @ _, List(expr)) if (definitions.isBox(fun.symbol)) =>
def genLoadApply4 = {
- debuglog("BOX : " + fun.symbol.fullName);
+ debuglog("BOX : " + fun.symbol.fullName)
val ctx1 = genLoad(expr, ctx, toTypeKind(expr.tpe))
val nativeKind = toTypeKind(expr.tpe)
if (settings.Xdce.value) {
@@ -757,7 +757,7 @@ abstract class GenICode extends SubComponent {
generatedType = resKind
newCtx
} else { // normal method call
- debuglog("Gen CALL_METHOD with sym: " + sym + " isStaticSymbol: " + sym.isStaticMember);
+ debuglog("Gen CALL_METHOD with sym: " + sym + " isStaticSymbol: " + sym.isStaticMember)
val invokeStyle =
if (sym.isStaticMember)
Static(false)
@@ -889,16 +889,16 @@ abstract class GenICode extends SubComponent {
def genLoadLiteral = {
if (value.tag != UnitTag) (value.tag, expectedType) match {
case (IntTag, LONG) =>
- ctx.bb.emit(CONSTANT(Constant(value.longValue)), tree.pos);
+ ctx.bb.emit(CONSTANT(Constant(value.longValue)), tree.pos)
generatedType = LONG
case (FloatTag, DOUBLE) =>
- ctx.bb.emit(CONSTANT(Constant(value.doubleValue)), tree.pos);
+ ctx.bb.emit(CONSTANT(Constant(value.doubleValue)), tree.pos)
generatedType = DOUBLE
case (NullTag, _) =>
- ctx.bb.emit(CONSTANT(value), tree.pos);
+ ctx.bb.emit(CONSTANT(value), tree.pos)
generatedType = NullReference
case _ =>
- ctx.bb.emit(CONSTANT(value), tree.pos);
+ ctx.bb.emit(CONSTANT(value), tree.pos)
generatedType = toTypeKind(tree.tpe)
}
ctx
@@ -946,7 +946,7 @@ abstract class GenICode extends SubComponent {
case Match(selector, cases) =>
def genLoadMatch = {
- debuglog("Generating SWITCH statement.");
+ debuglog("Generating SWITCH statement.")
val ctx1 = genLoad(selector, ctx, INT) // TODO: Java 7 allows strings in switches (so, don't assume INT and don't convert the literals using intValue)
val afterCtx = ctx1.newBlock()
var caseCtx: Context = null
@@ -1379,7 +1379,7 @@ abstract class GenICode extends SubComponent {
}
}
- debuglog("Entering genCond with tree: " + tree);
+ debuglog("Entering genCond with tree: " + tree)
// the default emission
def default() = {
@@ -1582,14 +1582,14 @@ abstract class GenICode extends SubComponent {
case _ => None
}
if (block.size == 1 && optCont.isDefined) {
- val Some(cont) = optCont;
- val pred = block.predecessors;
- debuglog("Preds: " + pred + " of " + block + " (" + optCont + ")");
+ val Some(cont) = optCont
+ val pred = block.predecessors
+ debuglog("Preds: " + pred + " of " + block + " (" + optCont + ")")
pred foreach { p =>
changed = true
p.lastInstruction match {
case CJUMP(succ, fail, cond, kind) if (succ == block || fail == block) =>
- debuglog("Pruning empty if branch.");
+ debuglog("Pruning empty if branch.")
p.replaceInstruction(p.lastInstruction,
if (block == succ)
if (block == fail)
@@ -1602,7 +1602,7 @@ abstract class GenICode extends SubComponent {
abort("Could not find block in preds: " + method + " " + block + " " + pred + " " + p))
case CZJUMP(succ, fail, cond, kind) if (succ == block || fail == block) =>
- debuglog("Pruning empty ifz branch.");
+ debuglog("Pruning empty ifz branch.")
p.replaceInstruction(p.lastInstruction,
if (block == succ)
if (block == fail)
@@ -1615,12 +1615,12 @@ abstract class GenICode extends SubComponent {
abort("Could not find block in preds"))
case JUMP(b) if (b == block) =>
- debuglog("Pruning empty JMP branch.");
+ debuglog("Pruning empty JMP branch.")
val replaced = p.replaceInstruction(p.lastInstruction, JUMP(cont))
debugassert(replaced, "Didn't find p.lastInstruction")
case SWITCH(tags, labels) if (labels contains block) =>
- debuglog("Pruning empty SWITCH branch.");
+ debuglog("Pruning empty SWITCH branch.")
p.replaceInstruction(p.lastInstruction,
SWITCH(tags, labels map (l => if (l == block) cont else l)))
@@ -1636,7 +1636,7 @@ abstract class GenICode extends SubComponent {
e.covered = e.covered filter (_ != block)
e.blocks = e.blocks filter (_ != block)
if (e.startBlock eq block)
- e setStartBlock cont;
+ e setStartBlock cont
}
}
}
@@ -1648,7 +1648,7 @@ abstract class GenICode extends SubComponent {
method.blocks foreach prune0
} while (changed)
- debuglog("Prune fixpoint reached in " + n + " iterations.");
+ debuglog("Prune fixpoint reached in " + n + " iterations.")
}
def getMaxType(ts: List[Type]): TypeKind =
@@ -1820,7 +1820,7 @@ abstract class GenICode extends SubComponent {
}
def addFinalizer(f: Tree, ctx: Context): this.type = {
- cleanups = Finalizer(f, ctx) :: cleanups;
+ cleanups = Finalizer(f, ctx) :: cleanups
this
}
@@ -1868,7 +1868,7 @@ abstract class GenICode extends SubComponent {
val exh = new ExceptionHandler(method, newTermNameCached("" + handlerCount), cls, pos)
method.addHandler(exh)
handlers = exh :: handlers
- debuglog("added handler: " + exh);
+ debuglog("added handler: " + exh)
exh
}
@@ -1878,7 +1878,7 @@ abstract class GenICode extends SubComponent {
private def addActiveHandler(exh: ExceptionHandler) {
handlerCount += 1
handlers = exh :: handlers
- debuglog("added handler: " + exh);
+ debuglog("added handler: " + exh)
}
/** Return a new context for generating code for the given
@@ -1962,11 +1962,11 @@ abstract class GenICode extends SubComponent {
val ctx = finalizerCtx.enterExceptionHandler(exh)
val exception = ctx.makeLocal(finalizer.pos, ThrowableClass.tpe, "exc")
loadException(ctx, exh, finalizer.pos)
- ctx.bb.emit(STORE_LOCAL(exception));
- val ctx1 = genLoad(finalizer, ctx, UNIT);
- ctx1.bb.emit(LOAD_LOCAL(exception));
- ctx1.bb.emit(THROW(ThrowableClass));
- ctx1.bb.enterIgnoreMode();
+ ctx.bb.emit(STORE_LOCAL(exception))
+ val ctx1 = genLoad(finalizer, ctx, UNIT)
+ ctx1.bb.emit(LOAD_LOCAL(exception))
+ ctx1.bb.emit(THROW(ThrowableClass))
+ ctx1.bb.enterIgnoreMode()
ctx1.bb.close()
finalizerCtx.endHandler()
}
@@ -2028,7 +2028,7 @@ abstract class GenICode extends SubComponent {
/** Add an instruction that refers to this label. */
def addCallingInstruction(i: Instruction) =
- toPatch = i :: toPatch;
+ toPatch = i :: toPatch
/**
* Patch the code by replacing pseudo call instructions with
@@ -2090,7 +2090,7 @@ abstract class GenICode extends SubComponent {
// register with the given label
if (!label.anchored)
- label.addCallingInstruction(this);
+ label.addCallingInstruction(this)
}
case class PJUMP(whereto: Label) extends PseudoJUMP(whereto)
diff --git a/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala b/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala
index 8cd7c70bf0..fb1ef311d2 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala
@@ -233,8 +233,8 @@ abstract class ICodeCheckers {
}
if (preds.nonEmpty) {
- in(bl) = (preds map out.apply) reduceLeft meet2;
- log("Input changed for block: " + bl +" to: " + in(bl));
+ in(bl) = (preds map out.apply) reduceLeft meet2
+ log("Input changed for block: " + bl +" to: " + in(bl))
}
}
@@ -380,9 +380,9 @@ abstract class ICodeCheckers {
def checkField(obj: TypeKind, field: Symbol): Unit = obj match {
case REFERENCE(sym) =>
if (sym.info.member(field.name) == NoSymbol)
- icodeError(" " + field + " is not defined in class " + clasz);
+ icodeError(" " + field + " is not defined in class " + clasz)
case _ =>
- icodeError(" expected reference type, but " + obj + " found");
+ icodeError(" expected reference type, but " + obj + " found")
}
/** Checks that tpe is a subtype of one of the allowed types */
@@ -419,11 +419,11 @@ abstract class ICodeCheckers {
receiver match {
case REFERENCE(sym) =>
checkBool(sym.info.member(method.name) != NoSymbol,
- "Method " + method + " does not exist in " + sym.fullName);
+ "Method " + method + " does not exist in " + sym.fullName)
if (method.isPrivate)
checkBool(method.owner == clasz.symbol,
"Cannot call private method of " + method.owner.fullName
- + " from " + clasz.symbol.fullName);
+ + " from " + clasz.symbol.fullName)
else if (method.isProtected) {
val isProtectedOK = (
(clasz.symbol isSubClass method.owner) ||
@@ -432,7 +432,7 @@ abstract class ICodeCheckers {
checkBool(isProtectedOK,
"Cannot call protected method of " + method.owner.fullName
- + " from " + clasz.symbol.fullName);
+ + " from " + clasz.symbol.fullName)
}
case ARRAY(_) =>
@@ -465,7 +465,7 @@ abstract class ICodeCheckers {
pushStack(elem)
case (a, b) =>
icodeError(" expected an INT and an array reference, but " +
- a + ", " + b + " found");
+ a + ", " + b + " found")
}
case LOAD_LOCAL(local) =>
@@ -483,10 +483,10 @@ abstract class ICodeCheckers {
case LOAD_MODULE(module) =>
checkBool((module.isModule || module.isModuleClass),
- "Expected module: " + module + " flags: " + module.flagString);
- pushStack(toTypeKind(module.tpe));
+ "Expected module: " + module + " flags: " + module.flagString)
+ pushStack(toTypeKind(module.tpe))
- case STORE_THIS(kind) =>
+ case STORE_THIS(kind) =>
val actualType = popStack
if (actualType.isReferenceType) subtypeTest(actualType, kind)
else icodeError("Expected this reference but found: " + actualType)
@@ -498,7 +498,7 @@ abstract class ICodeCheckers {
subtypeTest(k, elem)
case (a, b, c) =>
icodeError(" expected and array reference, and int and " + kind +
- " but " + a + ", " + b + ", " + c + " found");
+ " but " + a + ", " + b + ", " + c + " found")
}
case STORE_LOCAL(local) =>
@@ -653,7 +653,7 @@ abstract class ICodeCheckers {
case RETURN(kind) =>
val top = popStack
if (kind.isValueType) checkType(top, kind)
- else checkBool(!top.isValueType, "" + kind + " is a reference type, but " + top + " is not");
+ else checkBool(!top.isValueType, "" + kind + " is a reference type, but " + top + " is not")
case THROW(clasz) =>
checkType(popStack, toTypeKind(clasz.tpe))
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala b/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala
index 35eedc3539..c5fe3228a3 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala
@@ -35,15 +35,15 @@ trait Linearizers {
var blocks: List[BasicBlock] = Nil
def linearize(m: IMethod): List[BasicBlock] = {
- val b = m.startBlock;
- blocks = Nil;
+ val b = m.startBlock
+ blocks = Nil
run {
- worklist pushAll (m.exh map (_.startBlock));
- worklist.push(b);
+ worklist pushAll (m.exh map (_.startBlock))
+ worklist.push(b)
}
- blocks.reverse;
+ blocks.reverse
}
def linearizeAt(m: IMethod, start: BasicBlock): List[BasicBlock] = {
@@ -55,30 +55,30 @@ trait Linearizers {
/** Linearize another subtree and append it to the existing blocks. */
def linearize(startBlock: BasicBlock): List[BasicBlock] = {
//blocks = startBlock :: Nil;
- run( { worklist.push(startBlock); } );
- blocks.reverse;
+ run( { worklist.push(startBlock); } )
+ blocks.reverse
}
def processElement(b: BasicBlock) =
if (b.nonEmpty) {
- add(b);
+ add(b)
b.lastInstruction match {
case JUMP(whereto) =>
- add(whereto);
+ add(whereto)
case CJUMP(success, failure, _, _) =>
- add(success);
- add(failure);
+ add(success)
+ add(failure)
case CZJUMP(success, failure, _, _) =>
- add(success);
- add(failure);
+ add(success)
+ add(failure)
case SWITCH(_, labels) =>
- add(labels);
- case RETURN(_) => ();
- case THROW(clasz) => ();
+ add(labels)
+ case RETURN(_) => ()
+ case THROW(clasz) => ()
}
}
- def dequeue: Elem = worklist.pop();
+ def dequeue: Elem = worklist.pop()
/**
* Prepend b to the list, if not already scheduled.
@@ -88,25 +88,25 @@ trait Linearizers {
if (blocks.contains(b))
()
else {
- blocks = b :: blocks;
- worklist push b;
+ blocks = b :: blocks
+ worklist push b
}
}
- def add(bs: List[BasicBlock]): Unit = bs foreach add;
+ def add(bs: List[BasicBlock]): Unit = bs foreach add
}
/**
* Linearize code using a depth first traversal.
*/
class DepthFirstLinerizer extends Linearizer {
- var blocks: List[BasicBlock] = Nil;
+ var blocks: List[BasicBlock] = Nil
def linearize(m: IMethod): List[BasicBlock] = {
- blocks = Nil;
+ blocks = Nil
- dfs(m.startBlock);
- m.exh foreach (b => dfs(b.startBlock));
+ dfs(m.startBlock)
+ m.exh foreach (b => dfs(b.startBlock))
blocks.reverse
}
@@ -119,7 +119,7 @@ trait Linearizers {
def dfs(b: BasicBlock): Unit =
if (b.nonEmpty && add(b))
- b.successors foreach dfs;
+ b.successors foreach dfs
/**
* Prepend b to the list, if not already scheduled.
@@ -128,7 +128,7 @@ trait Linearizers {
*/
def add(b: BasicBlock): Boolean =
!(blocks contains b) && {
- blocks = b :: blocks;
+ blocks = b :: blocks
true
}
}
@@ -144,12 +144,12 @@ trait Linearizers {
val added = new mutable.BitSet
def linearize(m: IMethod): List[BasicBlock] = {
- blocks = Nil;
+ blocks = Nil
visited.clear()
- added.clear();
+ added.clear()
- m.exh foreach (b => rpo(b.startBlock));
- rpo(m.startBlock);
+ m.exh foreach (b => rpo(b.startBlock))
+ rpo(m.startBlock)
// if the start block has predecessors, it won't be the first one
// in the linearization, so we need to enforce it here
@@ -170,7 +170,7 @@ trait Linearizers {
def rpo(b: BasicBlock): Unit =
if (b.nonEmpty && !visited(b)) {
- visited += b;
+ visited += b
b.successors foreach rpo
add(b)
}
@@ -184,7 +184,7 @@ trait Linearizers {
if (!added(b.label)) {
added += b.label
- blocks = b :: blocks;
+ blocks = b :: blocks
}
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Members.scala b/src/compiler/scala/tools/nsc/backend/icode/Members.scala
index fe837216ed..5c90fbf366 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Members.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Members.scala
@@ -80,7 +80,7 @@ trait Members {
}
/** This methods returns a string representation of the ICode */
- override def toString = "ICode '" + name + "'";
+ override def toString = "ICode '" + name + "'"
/* Compute a unique new label */
def nextLabel: Int = {
@@ -92,8 +92,8 @@ trait Members {
*/
def newBlock(): BasicBlock = {
touched = true
- val block = new BasicBlock(nextLabel, method);
- blocks += block;
+ val block = new BasicBlock(nextLabel, method)
+ blocks += block
block
}
}
@@ -115,17 +115,17 @@ trait Members {
var cunit: CompilationUnit = _
def addField(f: IField): this.type = {
- fields = f :: fields;
+ fields = f :: fields
this
}
def addMethod(m: IMethod): this.type = {
- methods = m :: methods;
+ methods = m :: methods
this
}
def setCompilationUnit(unit: CompilationUnit): this.type = {
- this.cunit = unit;
+ this.cunit = unit
this
}
@@ -180,7 +180,7 @@ trait Members {
def hasCode = code ne NoCode
def setCode(code: Code): IMethod = {
- this.code = code;
+ this.code = code
this
}
@@ -220,10 +220,10 @@ trait Members {
val nextBlock: mutable.Map[BasicBlock, BasicBlock] = mutable.HashMap.empty
for (b <- code.blocks.toList
if b.successors.length == 1;
- succ = b.successors.head;
- if succ ne b;
- if succ.predecessors.length == 1;
- if succ.predecessors.head eq b;
+ succ = b.successors.head
+ if succ ne b
+ if succ.predecessors.length == 1
+ if succ.predecessors.head eq b
if !(exh.exists { (e: ExceptionHandler) =>
(e.covers(succ) && !e.covers(b)) || (e.covers(b) && !e.covers(succ)) })) {
nextBlock(b) = succ
@@ -235,7 +235,7 @@ trait Members {
bb.open()
var succ = bb
do {
- succ = nextBlock(succ);
+ succ = nextBlock(succ)
val lastInstr = bb.lastInstruction
/* Ticket SI-5672
* Besides removing the control-flow instruction at the end of `bb` (usually a JUMP), we have to pop any values it pushes.
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
index 137e2b556f..d8aac8e9db 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
@@ -64,7 +64,7 @@ import scala.reflect.internal.util.{Position,NoPosition}
* in the source files.
*/
trait Opcodes { self: ICodes =>
- import global.{Symbol, NoSymbol, Name, Constant};
+ import global.{Symbol, NoSymbol, Name, Constant}
// categories of ICode instructions
final val localsCat = 1
@@ -195,7 +195,7 @@ trait Opcodes { self: ICodes =>
case class LOAD_FIELD(field: Symbol, isStatic: Boolean) extends Instruction {
/** Returns a string representation of this instruction */
override def toString(): String =
- "LOAD_FIELD " + (if (isStatic) field.fullName else field.toString());
+ "LOAD_FIELD " + (if (isStatic) field.fullName else field.toString())
override def consumed = if (isStatic) 0 else 1
override def produced = 1
@@ -257,16 +257,17 @@ trait Opcodes { self: ICodes =>
case class STORE_FIELD(field: Symbol, isStatic: Boolean) extends Instruction {
/** Returns a string representation of this instruction */
override def toString(): String =
- "STORE_FIELD "+field + (if (isStatic) " (static)" else " (dynamic)");
+ "STORE_FIELD "+field + (if (isStatic) " (static)" else " (dynamic)")
- override def consumed = if(isStatic) 1 else 2;
- override def produced = 0;
+ override def consumed = if(isStatic) 1 else 2
+
+ override def produced = 0
override def consumedTypes =
if (isStatic)
toTypeKind(field.tpe) :: Nil
else
- REFERENCE(field.owner) :: toTypeKind(field.tpe) :: Nil;
+ REFERENCE(field.owner) :: toTypeKind(field.tpe) :: Nil
override def category = fldsCat
}
@@ -420,10 +421,12 @@ trait Opcodes { self: ICodes =>
*/
case class NEW(kind: REFERENCE) extends Instruction {
/** Returns a string representation of this instruction */
- override def toString(): String = "NEW "+ kind;
+ override def toString(): String = "NEW "+ kind
+
+ override def consumed = 0
+
+ override def produced = 1
- override def consumed = 0;
- override def produced = 1;
override def producedTypes = kind :: Nil
/** The corresponding constructor call. */
@@ -439,11 +442,13 @@ trait Opcodes { self: ICodes =>
*/
case class CREATE_ARRAY(elem: TypeKind, dims: Int) extends Instruction {
/** Returns a string representation of this instruction */
- override def toString(): String ="CREATE_ARRAY "+elem + " x " + dims;
+ override def toString(): String ="CREATE_ARRAY "+elem + " x " + dims
+
+ override def consumed = dims
- override def consumed = dims;
override def consumedTypes = List.fill(dims)(INT)
- override def produced = 1;
+ override def produced = 1
+
override def producedTypes = ARRAY(elem) :: Nil
override def category = arraysCat
@@ -532,7 +537,7 @@ trait Opcodes { self: ICodes =>
override def toString(): String = (
"CJUMP (" + kind + ")" +
cond + " ? "+successBlock.label+" : "+failureBlock.label
- );
+ )
override def consumed = 2
override def produced = 0
@@ -555,7 +560,7 @@ trait Opcodes { self: ICodes =>
override def toString(): String = (
"CZJUMP (" + kind + ")" +
cond + " ? "+successBlock.label+" : "+failureBlock.label
- );
+ )
override def consumed = 1
override def produced = 0
@@ -647,10 +652,11 @@ trait Opcodes { self: ICodes =>
*/
case class MONITOR_EXIT() extends Instruction {
/** Returns a string representation of this instruction */
- override def toString(): String ="MONITOR_EXIT";
+ override def toString(): String ="MONITOR_EXIT"
- override def consumed = 1;
- override def produced = 0;
+ override def consumed = 1
+
+ override def produced = 0
override def consumedTypes = ObjectReference :: Nil
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala b/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala
index 351d99f51a..5eceb1cf6b 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala
@@ -6,9 +6,9 @@
package scala.tools.nsc
package backend
-package icode;
+package icode
-import java.io.PrintWriter;
+import java.io.PrintWriter
trait Primitives { self: ICodes =>
@@ -51,12 +51,12 @@ trait Primitives { self: ICodes =>
// type : (src) => dst
// range: src,dst <- { Ix, Ux, Rx }
// jvm : i2{l, f, d}, l2{i, f, d}, f2{i, l, d}, d2{i, l, f}, i2{b, c, s}
- case class Conversion(src: TypeKind, dst: TypeKind) extends Primitive;
+ case class Conversion(src: TypeKind, dst: TypeKind) extends Primitive
// type : (Array[REF]) => I4
// range: type <- { BOOL, Ix, Ux, Rx, REF }
// jvm : arraylength
- case class ArrayLength(kind: TypeKind) extends Primitive;
+ case class ArrayLength(kind: TypeKind) extends Primitive
// type : (buf,el) => buf
// range: lf,rg <- { BOOL, Ix, Ux, Rx, REF, STR }
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Printers.scala b/src/compiler/scala/tools/nsc/backend/icode/Printers.scala
index 253f766469..5b47e3cfff 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Printers.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Printers.scala
@@ -27,7 +27,7 @@ trait Printers { self: ICodes =>
def print(o: Any) { print(o.toString()) }
def println(s: String) {
- print(s);
+ print(s)
println()
}
@@ -35,7 +35,7 @@ trait Printers { self: ICodes =>
out.println()
var i = 0
while (i < margin) {
- print(" ");
+ print(" ")
i += 1
}
}
@@ -53,26 +53,26 @@ trait Printers { self: ICodes =>
}
def printClass(cls: IClass) {
- print(cls.symbol.toString()); print(" extends ");
- printList(cls.symbol.info.parents, ", ");
- indent(); println(" {");
- println("// fields:");
- cls.fields.foreach(printField); println();
- println("// methods");
- cls.methods.foreach(printMethod);
- undent(); println();
+ print(cls.symbol.toString()); print(" extends ")
+ printList(cls.symbol.info.parents, ", ")
+ indent(); println(" {")
+ println("// fields:")
+ cls.fields.foreach(printField); println()
+ println("// methods")
+ cls.methods.foreach(printMethod)
+ undent(); println()
println("}")
}
def printField(f: IField) {
- print(f.symbol.keyString); print(" ");
- print(f.symbol.nameString); print(": ");
- println(f.symbol.info.toString());
+ print(f.symbol.keyString); print(" ")
+ print(f.symbol.nameString); print(": ")
+ println(f.symbol.info.toString())
}
def printMethod(m: IMethod) {
- print("def "); print(m.symbol.name);
- print("("); printList(printParam)(m.params, ", "); print(")");
+ print("def "); print(m.symbol.name)
+ print("("); printList(printParam)(m.params, ", "); print(")")
print(": "); print(m.symbol.info.resultType)
if (!m.isAbstractMethod) {
@@ -93,23 +93,23 @@ trait Printers { self: ICodes =>
}
def printParam(p: Local) {
- print(p.sym.name); print(": "); print(p.sym.info);
+ print(p.sym.name); print(": "); print(p.sym.info)
print(" ("); print(p.kind); print(")")
}
def printExceptionHandler(e: ExceptionHandler) {
- indent();
- println("catch (" + e.cls.simpleName + ") in " + e.covered.toSeq.sortBy(_.label) + " starting at: " + e.startBlock);
- println("consisting of blocks: " + e.blocks);
- undent();
- println("with finalizer: " + e.finalizer);
-// linearizer.linearize(e.startBlock) foreach printBlock;
+ indent()
+ println("catch (" + e.cls.simpleName + ") in " + e.covered.toSeq.sortBy(_.label) + " starting at: " + e.startBlock)
+ println("consisting of blocks: " + e.blocks)
+ undent()
+ println("with finalizer: " + e.finalizer)
+ // linearizer.linearize(e.startBlock) foreach printBlock;
}
def printBlock(bb: BasicBlock) {
print(bb.label)
if (bb.loopHeader) print("[loop header]")
- print(": ");
+ print(": ")
if (settings.debug.value) print("pred: " + bb.predecessors + " succs: " + bb.successors + " flags: " + bb.flagsString)
indent(); println()
bb.toList foreach printInstruction
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
index 7f32b2b764..941d200d13 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
@@ -123,7 +123,7 @@ abstract class CopyPropagation {
}
override def toString(): String =
- "\nBindings: " + bindings + "\nStack: " + stack;
+ "\nBindings: " + bindings + "\nStack: " + stack
def dup: State = {
val b: Bindings = mutable.HashMap()
@@ -164,7 +164,7 @@ abstract class CopyPropagation {
val resBindings = mutable.HashMap[Location, Value]()
for ((k, v) <- a.bindings if b.bindings.isDefinedAt(k) && v == b.bindings(k))
- resBindings += (k -> v);
+ resBindings += (k -> v)
new State(resBindings, resStack)
}
}
@@ -189,11 +189,11 @@ abstract class CopyPropagation {
debuglog("CopyAnalysis added point: " + b)
}
m.exh foreach { e =>
- in(e.startBlock) = new copyLattice.State(copyLattice.emptyBinding, copyLattice.exceptionHandlerStack);
+ in(e.startBlock) = new copyLattice.State(copyLattice.emptyBinding, copyLattice.exceptionHandlerStack)
}
// first block is special: it's not bottom, but a precisely defined state with no bindings
- in(m.startBlock) = new lattice.State(lattice.emptyBinding, Nil);
+ in(m.startBlock) = new lattice.State(lattice.emptyBinding, Nil)
}
}
@@ -202,7 +202,7 @@ abstract class CopyPropagation {
if (settings.debug.value) {
linearizer.linearize(method).foreach(b => if (b != method.startBlock)
assert(in(b) != lattice.bottom,
- "Block " + b + " in " + this.method + " has input equal to bottom -- not visited?"));
+ "Block " + b + " in " + this.method + " has input equal to bottom -- not visited?"))
}
}
@@ -227,7 +227,7 @@ abstract class CopyPropagation {
case CONSTANT(k) =>
if (k.tag != UnitTag)
- out.stack = Const(k) :: out.stack;
+ out.stack = Const(k) :: out.stack
case LOAD_ARRAY_ITEM(_) =>
out.stack = (Unknown :: out.stack.drop(2))
@@ -276,14 +276,14 @@ abstract class CopyPropagation {
v match {
case Deref(LocalVar(other)) =>
if (other != local)
- out.bindings += (LocalVar(local) -> v);
+ out.bindings += (LocalVar(local) -> v)
case _ =>
out.bindings += (LocalVar(local) -> v)
}
case Nil =>
sys.error("Incorrect icode in " + method + ". Expecting something on the stack.")
}
- out.stack = out.stack drop 1;
+ out.stack = out.stack drop 1
case STORE_THIS(_) =>
cleanReferencesTo(out, This)
@@ -291,14 +291,14 @@ abstract class CopyPropagation {
case STORE_FIELD(field, isStatic) =>
if (isStatic)
- out.stack = out.stack.drop(1);
+ out.stack = out.stack.drop(1)
else {
- out.stack = out.stack.drop(2);
- cleanReferencesTo(out, Field(AllRecords, field));
+ out.stack = out.stack.drop(2)
+ cleanReferencesTo(out, Field(AllRecords, field))
in.stack match {
case v :: Record(_, bindings) :: vs =>
bindings += (field -> v)
- case _ => ();
+ case _ => ()
}
}
@@ -319,7 +319,7 @@ abstract class CopyPropagation {
case Record(_, bindings) =>
for (v <- out.stack.take(method.info.paramTypes.length + 1)
if v ne obj) {
- bindings ++= getBindingsForPrimaryCtor(in, method);
+ bindings ++= getBindingsForPrimaryCtor(in, method)
}
case _ => ()
}
@@ -390,7 +390,7 @@ abstract class CopyPropagation {
out.stack = out.stack.head :: out.stack
case MONITOR_ENTER() =>
- out.stack = out.stack.drop(1);
+ out.stack = out.stack.drop(1)
case MONITOR_EXIT() =>
out.stack = out.stack.drop(1)
@@ -438,7 +438,7 @@ abstract class CopyPropagation {
case Deref(loc1) if (loc1 == target) => false
case Boxed(loc1) if (loc1 == target) => false
case rec @ Record(_, _) =>
- cleanRecord(rec);
+ cleanRecord(rec)
true
case _ => true
}) &&
@@ -454,12 +454,12 @@ abstract class CopyPropagation {
* If the method is impure, all bindings to record fields are cleared.
*/
final def simulateCall(state: copyLattice.State, method: Symbol, static: Boolean): copyLattice.State = {
- val out = new copyLattice.State(state.bindings, state.stack);
- out.stack = out.stack.drop(method.info.paramTypes.length + (if (static) 0 else 1));
+ val out = new copyLattice.State(state.bindings, state.stack)
+ out.stack = out.stack.drop(method.info.paramTypes.length + (if (static) 0 else 1))
if (method.info.resultType != definitions.UnitClass.tpe && !method.isConstructor)
- out.stack = Unknown :: out.stack;
+ out.stack = Unknown :: out.stack
if (!isPureMethod(method))
- invalidateRecords(out);
+ invalidateRecords(out)
out
}
@@ -500,8 +500,8 @@ abstract class CopyPropagation {
* they are passed on the stack. It works for primary constructors.
*/
private def getBindingsForPrimaryCtor(in: copyLattice.State, ctor: Symbol): mutable.Map[Symbol, Value] = {
- val paramAccessors = ctor.owner.constrParamAccessors;
- var values = in.stack.take(1 + ctor.info.paramTypes.length).reverse.drop(1);
+ val paramAccessors = ctor.owner.constrParamAccessors
+ var values = in.stack.take(1 + ctor.info.paramTypes.length).reverse.drop(1)
val bindings = mutable.HashMap[Symbol, Value]()
debuglog("getBindings for: " + ctor + " acc: " + paramAccessors)
@@ -527,8 +527,8 @@ abstract class CopyPropagation {
// + " having acc: " + (paramAccessors map (_.tpe))+ " vs. params" + paramTypes
// + "\n\t failed at pos " + i + " with " + p.tpe + " == " + paramTypes(i))
if (p.tpe == paramTypes(i))
- bindings += (p -> values.head);
- values = values.tail;
+ bindings += (p -> values.head)
+ values = values.tail
}
debuglog("\t" + bindings)
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala
index a9783b43dc..704439e178 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala
@@ -30,7 +30,7 @@ trait DataFlowAnalysis[L <: SemiLattice] {
/* Implement this function to initialize the worklist. */
def init(f: => Unit): Unit = {
iterations = 0
- in.clear(); out.clear(); worklist.clear(); visited.clear();
+ in.clear(); out.clear(); worklist.clear(); visited.clear()
f
}
@@ -46,7 +46,7 @@ trait DataFlowAnalysis[L <: SemiLattice] {
while (!worklist.isEmpty) {
if (stat) iterations += 1
//Console.println("worklist in: " + worklist);
- val point = worklist.iterator.next(); worklist -= point; visited += point;
+ val point = worklist.iterator.next(); worklist -= point; visited += point
//Console.println("taking out point: " + point + " worklist out: " + worklist);
val output = f(point, in(point))
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala
index abda639dec..14b57f287f 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala
@@ -77,7 +77,7 @@ abstract class Liveness {
if (settings.debug.value) {
linearizer.linearize(method).foreach(b => if (b != method.startBlock)
assert(lattice.bottom != in(b),
- "Block " + b + " in " + this.method + " has input equal to bottom -- not visited?"));
+ "Block " + b + " in " + this.method + " has input equal to bottom -- not visited?"))
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala
index 48755d4424..2d29e6b14f 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala
@@ -147,7 +147,7 @@ abstract class ReachingDefinitions {
"Block " + b + " in " + this.method + " has input equal to bottom -- not visited? " + in(b)
+ ": bot: " + lattice.bottom
+ "\nin(b) == bottom: " + (in(b) == lattice.bottom)
- + "\nbottom == in(b): " + (lattice.bottom == in(b))));
+ + "\nbottom == in(b): " + (lattice.bottom == in(b))))
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
index 7b0627294e..227c1064ea 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
@@ -139,7 +139,7 @@ abstract class TypeFlowAnalysis {
if (settings.debug.value) {
linearizer.linearize(method).foreach(b => if (b != method.startBlock)
assert(visited.contains(b),
- "Block " + b + " in " + this.method + " has input equal to bottom -- not visited? .." + visited));
+ "Block " + b + " in " + this.method + " has input equal to bottom -- not visited? .." + visited))
}
// log("" + method.symbol.fullName + " [" + method.code.blocks.size + " blocks] "
// + "\n\t" + iterations + " iterations: " + t + " ms."
@@ -207,7 +207,7 @@ abstract class TypeFlowAnalysis {
case Test(_, kind, zero) =>
stack.pop
if (!zero) { stack.pop }
- stack push BOOL;
+ stack push BOOL
case Comparison(_, _) => stack.pop2; stack push INT
@@ -396,7 +396,7 @@ abstract class TypeFlowAnalysis {
override def blockTransfer(b: BasicBlock, in: lattice.Elem): lattice.Elem = {
var result = lattice.IState(new VarBinding(in.vars), new TypeStack(in.stack))
- val stopAt = if(isOnPerimeter(b)) lastInstruction(b) else null;
+ val stopAt = if(isOnPerimeter(b)) lastInstruction(b) else null
var isPastLast = false
var instrs = b.toList
@@ -598,7 +598,7 @@ abstract class TypeFlowAnalysis {
return
} else if(staleOut.isEmpty && inlined.isEmpty && staleIn.isEmpty) {
// this promotes invoking reinit if in doubt, no performance degradation will ensue!
- return;
+ return
}
worklist.clear() // calling reinit(f: => Unit) would also clear visited, thus forgetting about blocks visited before reinit.
@@ -665,14 +665,14 @@ abstract class TypeFlowAnalysis {
override def forwardAnalysis(f: (P, lattice.Elem) => lattice.Elem): Unit = {
while (!worklist.isEmpty && relevantBBs.nonEmpty) {
if (stat) iterations += 1
- val point = worklist.iterator.next(); worklist -= point;
+ val point = worklist.iterator.next(); worklist -= point
if(relevantBBs(point)) {
shrinkedWatchlist = false
val output = f(point, in(point))
- visited += point;
+ visited += point
if(isOnPerimeter(point)) {
if(shrinkedWatchlist && !isWatching(point)) {
- relevantBBs -= point;
+ relevantBBs -= point
populatePerimeter()
}
} else {
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
index 909c82ff23..8440a6cb49 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
@@ -456,8 +456,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
def createJAttribute(name: String, b: Array[Byte], offset: Int, len: Int): asm.Attribute = {
- val dest = new Array[Byte](len);
- System.arraycopy(b, offset, dest, 0, len);
+ val dest = new Array[Byte](len)
+ System.arraycopy(b, offset, dest, 0, len)
new asm.CustomAttr(name, dest)
}
@@ -606,7 +606,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
def javaType(s: Symbol): asm.Type = {
if (s.isMethod) {
- val resT: asm.Type = if (s.isClassConstructor) asm.Type.VOID_TYPE else javaType(s.tpe.resultType);
+ val resT: asm.Type = if (s.isClassConstructor) asm.Type.VOID_TYPE else javaType(s.tpe.resultType)
asm.Type.getMethodType( resT, (s.tpe.paramTypes map javaType): _*)
} else { javaType(s.tpe) }
}
@@ -1297,7 +1297,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
val ps = c.symbol.info.parents
- val superInterfaces0: List[Symbol] = if(ps.isEmpty) Nil else c.symbol.mixinClasses;
+ val superInterfaces0: List[Symbol] = if(ps.isEmpty) Nil else c.symbol.mixinClasses
val superInterfaces = (superInterfaces0 ++ c.symbol.annotations.flatMap(ann => newParentForAttr(ann.symbol))).distinct
if(superInterfaces.isEmpty) EMPTY_STRING_ARRAY
@@ -1322,7 +1322,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
thisName = javaName(c.symbol) // the internal name of the class being emitted
val ps = c.symbol.info.parents
- val superClass: String = if(ps.isEmpty) JAVA_LANG_OBJECT.getInternalName else javaName(ps.head.typeSymbol);
+ val superClass: String = if(ps.isEmpty) JAVA_LANG_OBJECT.getInternalName else javaName(ps.head.typeSymbol)
val ifaces = getSuperInterfaces(c)
@@ -1680,7 +1680,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
val kind = toTypeKind(const.typeValue)
val toPush: asm.Type =
if (kind.isValueType) classLiteral(kind)
- else javaType(kind);
+ else javaType(kind)
mv.visitLdcInsn(toPush)
case EnumTag =>
@@ -1703,7 +1703,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
*/
object jcode {
- import asm.Opcodes;
+ import asm.Opcodes
final def boolconst(b: Boolean) { iconst(if(b) 1 else 0) }
@@ -1867,10 +1867,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
// use a table in which holes are filled with defaultBranch.
val keyRange = (keyMax - keyMin + 1)
val newBranches = new Array[asm.Label](keyRange)
- var oldPos = 0;
+ var oldPos = 0
var i = 0
while(i < keyRange) {
- val key = keyMin + i;
+ val key = keyMin + i
if (keys(oldPos) == key) {
newBranches(i) = branches(oldPos)
oldPos += 1
@@ -2069,7 +2069,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
// TODO in that case, ExceptionHandler.cls doesn't go through javaName(). What if cls is an inner class?
for (e <- this.method.exh ; if e.covered.nonEmpty ; p <- intervals(e)) {
debuglog("Adding exception handler " + e + "at block: " + e.startBlock + " for " + method +
- " from: " + p.start + " to: " + p.end + " catching: " + e.cls);
+ " from: " + p.start + " to: " + p.end + " catching: " + e.cls)
val cls: String = if (e.cls == NoSymbol || e.cls == ThrowableClass) null
else javaName(e.cls)
jmethod.visitTryCatchBlock(labels(p.start), linNext(p.end), labels(e.startBlock), cls)
@@ -2093,8 +2093,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
def overlaps(that: Interval): Boolean = { !(this.precedes(that) || that.precedes(this)) }
def mergeWith(that: Interval): Interval = {
- val newStart = if(this.start <= that.start) this.lstart else that.lstart;
- val newEnd = if(this.end <= that.end) that.lend else this.lend;
+ val newStart = if(this.start <= that.start) this.lstart else that.lstart
+ val newEnd = if(this.end <= that.end) that.lend else this.lend
Interval(newStart, newEnd)
}
@@ -2150,7 +2150,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
def getMerged(): scala.collection.Map[Local, List[Interval]] = {
// TODO should but isn't: unbalanced start(s) of scope(s)
- val shouldBeEmpty = pending filter { p => val Pair(_, st) = p; st.nonEmpty };
+ val shouldBeEmpty = pending filter { p => val Pair(_, st) = p; st.nonEmpty }
val merged = mutable.Map[Local, List[Interval]]()
def addToMerged(lv: Local, start: Label, end: Label) {
val intv = Interval(start, end)
@@ -2168,10 +2168,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
if(merged.isDefinedAt(k)) {
val balancedStart = merged(k).head.lstart
if(balancedStart.getOffset < start.getOffset) {
- start = balancedStart;
+ start = balancedStart
}
}
- val endOpt: Option[Label] = for(ranges <- merged.get(k)) yield ranges.last.lend;
+ val endOpt: Option[Label] = for(ranges <- merged.get(k)) yield ranges.last.lend
val end = endOpt.getOrElse(onePastLast)
addToMerged(k, start, end)
}
@@ -2204,7 +2204,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
for(Pair(local, ranges) <- scoping.getMerged()) {
var name = javaName(local.sym)
if (name == null) {
- anonCounter += 1;
+ anonCounter += 1
name = "<anon" + anonCounter + ">"
}
for(intrvl <- ranges) {
@@ -2372,7 +2372,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
case LOAD_MODULE(module) =>
// assert(module.isModule, "Expected module: " + module)
- debuglog("generating LOAD_MODULE for: " + module + " flags: " + module.flagString);
+ debuglog("generating LOAD_MODULE for: " + module + " flags: " + module.flagString)
if (clasz.symbol == module.moduleClass && jMethodName != nme.readResolve.toString) {
jmethod.visitVarInsn(Opcodes.ALOAD, 0)
} else {
@@ -2502,7 +2502,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
while (restTagss.nonEmpty) {
val currLabel = labels(restBranches.head)
for (cTag <- restTagss.head) {
- flatKeys(k) = cTag;
+ flatKeys(k) = cTag
flatBranches(k) = currLabel
k += 1
}
@@ -2701,7 +2701,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
def genPrimitive(primitive: Primitive, pos: Position) {
- import asm.Opcodes;
+ import asm.Opcodes
primitive match {
@@ -2879,7 +2879,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
* *Does not assume the parameters come first!*
*/
def computeLocalVarsIndex(m: IMethod) {
- var idx = if (m.symbol.isStaticMember) 0 else 1;
+ var idx = if (m.symbol.isStaticMember) 0 else 1
for (l <- m.params) {
debuglog("Index value for " + l + "{" + l.## + "}: " + idx)
@@ -2901,7 +2901,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
class JMirrorBuilder(bytecodeWriter: BytecodeWriter) extends JCommonBuilder(bytecodeWriter) {
private var cunit: CompilationUnit = _
- def getCurrentCUnit(): CompilationUnit = cunit;
+ def getCurrentCUnit(): CompilationUnit = cunit
/** Generate a mirror class for a top-level module. A mirror class is a class
* containing only static methods that forward to the corresponding method
@@ -2994,8 +2994,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
for (f <- clasz.fields if f.symbol.hasGetter;
g = f.symbol.getter(clasz.symbol);
- s = f.symbol.setter(clasz.symbol);
- if g.isPublic && !(f.symbol.name startsWith "$")
+ s = f.symbol.setter(clasz.symbol)
+ if g.isPublic && !(f.symbol.name startsWith "$")
) {
// inserting $outer breaks the bean
fieldList = javaName(f.symbol) :: javaName(g) :: (if (s != NoSymbol) javaName(s) else null) :: fieldList
@@ -3180,7 +3180,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
// leave infinite-loops in place
return (dest, hops filterNot (dest eq _))
}
- prev = dest;
+ prev = dest
false
case None => true
}
@@ -3268,11 +3268,11 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
/* remove from all containers that may contain a reference to */
def elide(redu: BasicBlock) {
assert(m.startBlock != redu, "startBlock should have been re-wired by now")
- m.code.removeBlock(redu);
+ m.code.removeBlock(redu)
}
var wasReduced = false
- val entryPoints: List[BasicBlock] = m.startBlock :: (m.exh map (_.startBlock));
+ val entryPoints: List[BasicBlock] = m.startBlock :: (m.exh map (_.startBlock))
val elided = mutable.Set.empty[BasicBlock] // debug
val newTargets = mutable.Set.empty[BasicBlock] // debug
@@ -3303,7 +3303,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
def normalize(m: IMethod) {
if(!m.hasCode) { return }
collapseJumpOnlyBlocks(m)
- var wasReduced = false;
+ var wasReduced = false
do {
wasReduced = false
// Prune from an exception handler those covered blocks which are jump-only.
diff --git a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
index 2d53eb2ed9..8f439fc800 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
@@ -108,7 +108,7 @@ abstract class ClosureElimination extends SubComponent {
val t = info.getBinding(l)
t match {
case Deref(This) | Const(_) =>
- bb.replaceInstruction(i, valueToInstruction(t));
+ bb.replaceInstruction(i, valueToInstruction(t))
debuglog(s"replaced $i with $t")
case _ =>
@@ -226,7 +226,7 @@ abstract class ClosureElimination extends SubComponent {
h = t.head
t = t.tail
}
- } while (redo);
+ } while (redo)
b fromList newInstructions
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
index b998e3fbd2..3b94e2bd8d 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
@@ -54,7 +54,7 @@ abstract class DeadCodeElimination extends SubComponent {
}
}
- val rdef = new reachingDefinitions.ReachingDefinitionsAnalysis;
+ val rdef = new reachingDefinitions.ReachingDefinitionsAnalysis
/** Use-def chain: give the reaching definitions at the beginning of given instruction. */
var defs: immutable.Map[InstrLoc, immutable.Set[rdef.lattice.Definition]] = immutable.HashMap.empty
@@ -82,7 +82,7 @@ abstract class DeadCodeElimination extends SubComponent {
def dieCodeDie(m: IMethod) {
if (m.hasCode) {
- debuglog("dead code elimination on " + m);
+ debuglog("dead code elimination on " + m)
dropOf.clear()
localStores.clear()
clobbers.clear()
@@ -104,13 +104,13 @@ abstract class DeadCodeElimination extends SubComponent {
/** collect reaching definitions and initial useful instructions for this method. */
def collectRDef(m: IMethod): Unit = if (m.hasCode) {
- defs = immutable.HashMap.empty; worklist.clear(); useful.clear();
- rdef.init(m);
- rdef.run();
+ defs = immutable.HashMap.empty; worklist.clear(); useful.clear()
+ rdef.init(m)
+ rdef.run()
m foreachBlock { bb =>
useful(bb) = new mutable.BitSet(bb.size)
- var rd = rdef.in(bb);
+ var rd = rdef.in(bb)
for (Pair(i, idx) <- bb.toList.zipWithIndex) {
// utility for adding to worklist
diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
index c834607203..010f5b8319 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
@@ -969,7 +969,7 @@ abstract class Inliners extends SubComponent {
}
if(sameSymbols) { // TODO but this also amounts to recursive, ie should lead to adding to tfa.knownNever, right?
- tfa.knownUnsafe += inc.sym;
+ tfa.knownUnsafe += inc.sym
return DontInlineHere("sameSymbols (ie caller == callee)")
}
@@ -1043,9 +1043,9 @@ abstract class Inliners extends SubComponent {
if (caller.isInClosure) score -= 2
else if (caller.inlinedCalls < 1) score -= 1 // only monadic methods can trigger the first inline
- if (inc.isSmall) score += 1;
+ if (inc.isSmall) score += 1
// if (inc.hasClosureParam) score += 2
- if (inc.isLarge) score -= 1;
+ if (inc.isLarge) score -= 1
if (caller.isSmall && isLargeSum) {
score -= 1
debuglog(s"inliner score decreased to $score because small caller $caller would become large")
@@ -1054,8 +1054,8 @@ abstract class Inliners extends SubComponent {
if (inc.isMonadic) score += 3
else if (inc.isHigherOrder) score += 1
- if (inc.isInClosure) score += 2;
- if (inlinedMethodCount(inc.sym) > 2) score -= 2;
+ if (inc.isInClosure) score += 2
+ if (inlinedMethodCount(inc.sym) > 2) score -= 2
score
}
}
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala
index 14b7b80ea5..92dd05e70a 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala
@@ -31,7 +31,7 @@ class DotDiagramGenerator(settings: doc.Settings) extends DiagramGenerator {
private var counter = 0
def generate(diagram: Diagram, template: DocTemplateEntity, page: HtmlPage):NodeSeq = {
- counter = counter + 1;
+ counter = counter + 1
this.page = page
pathToLib = "../" * (page.templateToPath(template).size - 1) + "lib/"
val dot = generateDot(diagram)
@@ -207,7 +207,7 @@ class DotDiagramGenerator(settings: doc.Settings) extends DiagramGenerator {
private def node2Dot(node: Node) = {
// escape HTML characters in node names
- def escape(name: String) = name.replace("&", "&amp;").replace("<", "&lt;").replace(">", "&gt;");
+ def escape(name: String) = name.replace("&", "&amp;").replace("<", "&lt;").replace(">", "&gt;")
// assemble node attribues in a map
val attr = scala.collection.mutable.Map[String, String]()
@@ -319,7 +319,7 @@ class DotDiagramGenerator(settings: doc.Settings) extends DiagramGenerator {
var tSVG = -System.currentTimeMillis
val result = if (dotOutput != null) {
- val src = scala.io.Source.fromString(dotOutput);
+ val src = scala.io.Source.fromString(dotOutput)
try {
val cpa = scala.xml.parsing.ConstructingParser.fromSource(src, false)
val doc = cpa.document()
diff --git a/src/compiler/scala/tools/nsc/interactive/Global.scala b/src/compiler/scala/tools/nsc/interactive/Global.scala
index b0318f40c4..fa1d4a38b9 100644
--- a/src/compiler/scala/tools/nsc/interactive/Global.scala
+++ b/src/compiler/scala/tools/nsc/interactive/Global.scala
@@ -366,7 +366,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
.format(waitLoadedTypeResponses.size, getParsedEnteredResponses.size))
checkNoResponsesOutstanding()
- log.flush();
+ log.flush()
scheduler = new NoWorkScheduler
throw ShutdownReq
}
@@ -1025,7 +1025,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
getUnit(source) match {
case Some(unit) =>
if (unit.isUpToDate) {
- debugLog("already typed");
+ debugLog("already typed")
response set unit.body
} else if (ignoredFiles(source.file)) {
response.raise(lastException.getOrElse(CancelException))
diff --git a/src/compiler/scala/tools/nsc/interactive/REPL.scala b/src/compiler/scala/tools/nsc/interactive/REPL.scala
index be1c656c81..71dd0d3bbf 100644
--- a/src/compiler/scala/tools/nsc/interactive/REPL.scala
+++ b/src/compiler/scala/tools/nsc/interactive/REPL.scala
@@ -51,8 +51,8 @@ object REPL {
} catch {
case ex @ FatalError(msg) =>
if (true || command.settings.debug.value) // !!!
- ex.printStackTrace();
- reporter.error(null, "fatal error: " + msg)
+ ex.printStackTrace()
+ reporter.error(null, "fatal error: " + msg)
}
}
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala
index c7e682cb08..d08c9cb36c 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala
@@ -256,7 +256,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
private def findToolsJar() = {
val jdkPath = Directory(jdkHome)
- val jar = jdkPath / "lib" / "tools.jar" toFile;
+ val jar = jdkPath / "lib" / "tools.jar" toFile
if (jar isFile)
Some(jar)
diff --git a/src/compiler/scala/tools/nsc/io/Pickler.scala b/src/compiler/scala/tools/nsc/io/Pickler.scala
index 5d32c10143..862046eb66 100644
--- a/src/compiler/scala/tools/nsc/io/Pickler.scala
+++ b/src/compiler/scala/tools/nsc/io/Pickler.scala
@@ -167,7 +167,7 @@ object Pickler {
*/
def labelledPickler[T](label: String, p: Pickler[T]): Pickler[T] = new Pickler[T] {
def pickle(wr: Writer, x: T) = {
- wr.write(quoted(label));
+ wr.write(quoted(label))
wr.write("(")
p.pickle(wr, x)
wr.write(")")
diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
index 0a6716e396..8f5dca2702 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
@@ -845,7 +845,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
/** CompilationUnit ::= [package QualId semi] TopStatSeq
*/
def compilationUnit(): Tree = {
- var pos = in.currentPos;
+ var pos = in.currentPos
val pkg: RefTree =
if (in.token == AT || in.token == PACKAGE) {
annotations()
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
index 129331f435..5b5118a94f 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
@@ -178,7 +178,7 @@ abstract class SymbolLoaders {
if (!settings.isScaladoc)
globalError(
if (msg eq null) "i/o error while loading " + root.name
- else "error while loading " + root.name + ", " + msg);
+ else "error while loading " + root.name + ", " + msg)
}
try {
val start = currentTime
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index 9f89f47240..a7e4006fbe 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -1128,7 +1128,7 @@ abstract class ClassfileParser {
case tpnme.ScalaSignatureATTR =>
isScala = true
val pbuf = new PickleBuffer(in.buf, in.bp, in.bp + attrLen)
- pbuf.readNat(); pbuf.readNat();
+ pbuf.readNat(); pbuf.readNat()
if (pbuf.readNat == 0) // a scala signature attribute with no entries means that the actual scala signature
isScalaAnnot = true // is in a ScalaSignature annotation.
in.skip(attrLen)
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
index 7871ac8f20..39788ee3e7 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
@@ -65,7 +65,7 @@ abstract class ICodeReader extends ClassfileParser {
val fieldCount = in.nextChar
for (i <- 0 until fieldCount) parseField()
val methodCount = in.nextChar
- for (i <- 0 until methodCount) parseMethod();
+ for (i <- 0 until methodCount) parseMethod()
instanceCode.methods = instanceCode.methods.reverse
staticCode.methods = staticCode.methods.reverse
}
@@ -131,13 +131,13 @@ abstract class ICodeReader extends ClassfileParser {
val attributeCount = in.nextChar
for (i <- 0 until attributeCount) parseAttribute()
} else {
- debuglog("Skipping non-existent method.");
- skipAttributes();
+ debuglog("Skipping non-existent method.")
+ skipAttributes()
}
} catch {
case e: MissingRequirementError =>
in.bp = beginning; skipAttributes()
- debuglog("Skipping non-existent method. " + e.msg);
+ debuglog("Skipping non-existent method. " + e.msg)
}
}
@@ -247,9 +247,9 @@ abstract class ICodeReader extends ClassfileParser {
case JVM.aload =>
val local = in.nextByte.toInt; size += 1
if (local == 0 && !method.isStatic)
- code.emit(THIS(method.symbol.owner));
+ code.emit(THIS(method.symbol.owner))
else
- code.emit(LOAD_LOCAL(code.getLocal(local, ObjectReference)));
+ code.emit(LOAD_LOCAL(code.getLocal(local, ObjectReference)))
case JVM.iload_0 => code.emit(LOAD_LOCAL(code.getLocal(0, INT)))
case JVM.iload_1 => code.emit(LOAD_LOCAL(code.getLocal(1, INT)))
@@ -269,9 +269,9 @@ abstract class ICodeReader extends ClassfileParser {
case JVM.dload_3 => code.emit(LOAD_LOCAL(code.getLocal(3, DOUBLE)))
case JVM.aload_0 =>
if (!method.isStatic)
- code.emit(THIS(method.symbol.owner));
+ code.emit(THIS(method.symbol.owner))
else
- code.emit(LOAD_LOCAL(code.getLocal(0, ObjectReference)));
+ code.emit(LOAD_LOCAL(code.getLocal(0, ObjectReference)))
case JVM.aload_1 => code.emit(LOAD_LOCAL(code.getLocal(1, ObjectReference)))
case JVM.aload_2 => code.emit(LOAD_LOCAL(code.getLocal(2, ObjectReference)))
case JVM.aload_3 => code.emit(LOAD_LOCAL(code.getLocal(3, ObjectReference)))
@@ -491,7 +491,7 @@ abstract class ICodeReader extends ClassfileParser {
case JVM.invokespecial =>
val m = pool.getMemberSymbol(in.nextChar, false); size += 2
val style = if (m.name == nme.CONSTRUCTOR || m.isPrivate) Static(true)
- else SuperCall(m.owner.name);
+ else SuperCall(m.owner.name)
code.emit(CALL_METHOD(m, style))
case JVM.invokestatic =>
val m = pool.getMemberSymbol(in.nextChar, true); size += 2
@@ -722,36 +722,36 @@ abstract class ICodeReader extends ClassfileParser {
i match {
case DUP_X1 =>
val (one, two) = stack.pop2
- push(one); push(two); push(one);
+ push(one); push(two); push(one)
case DUP_X2 =>
val (one, two, three) = stack.pop3
- push(one); push(three); push(two); push(one);
+ push(one); push(three); push(two); push(one)
case DUP2_X1 =>
val (one, two) = stack.pop2
if (one.isWideType) {
- push(one); push(two); push(one);
+ push(one); push(two); push(one)
} else {
val three = stack.pop
- push(two); push(one); push(three); push(two); push(one);
+ push(two); push(one); push(three); push(two); push(one)
}
case DUP2_X2 =>
val (one, two) = stack.pop2
if (one.isWideType && two.isWideType) {
- push(one); push(two); push(one);
+ push(one); push(two); push(one)
} else if (one.isWideType) {
val three = stack.pop
assert(!three.isWideType, "Impossible")
- push(one); push(three); push(two); push(one);
+ push(one); push(three); push(two); push(one)
} else {
val three = stack.pop
if (three.isWideType) {
- push(two); push(one); push(one); push(three); push(two); push(one);
+ push(two); push(one); push(one); push(three); push(two); push(one)
} else {
val four = stack.pop
- push(two); push(one); push(four); push(one); push(three); push(two); push(one);
+ push(two); push(one); push(four); push(one); push(three); push(two); push(one)
}
}
@@ -779,7 +779,7 @@ abstract class ICodeReader extends ClassfileParser {
STORE_LOCAL(tmp2),
LOAD_LOCAL(tmp1),
LOAD_LOCAL(tmp2),
- LOAD_LOCAL(tmp1)));
+ LOAD_LOCAL(tmp1)))
case DUP_X2 =>
val one = info.stack.types(0)
@@ -792,30 +792,30 @@ abstract class ICodeReader extends ClassfileParser {
STORE_LOCAL(tmp2),
LOAD_LOCAL(tmp1),
LOAD_LOCAL(tmp2),
- LOAD_LOCAL(tmp1)));
+ LOAD_LOCAL(tmp1)))
else {
- val tmp3 = freshLocal(info.stack.types(2));
+ val tmp3 = freshLocal(info.stack.types(2))
bb.replaceInstruction(i, List(STORE_LOCAL(tmp1),
STORE_LOCAL(tmp2),
STORE_LOCAL(tmp3),
LOAD_LOCAL(tmp1),
LOAD_LOCAL(tmp3),
LOAD_LOCAL(tmp2),
- LOAD_LOCAL(tmp1)));
+ LOAD_LOCAL(tmp1)))
}
case DUP2_X1 =>
val one = info.stack.types(0)
val two = info.stack.types(1)
- val tmp1 = freshLocal(one);
- val tmp2 = freshLocal(two);
+ val tmp1 = freshLocal(one)
+ val tmp2 = freshLocal(two)
if (one.isWideType) {
assert(!two.isWideType, "Impossible")
bb.replaceInstruction(i, List(STORE_LOCAL(tmp1),
STORE_LOCAL(tmp2),
LOAD_LOCAL(tmp1),
LOAD_LOCAL(tmp2),
- LOAD_LOCAL(tmp1)));
+ LOAD_LOCAL(tmp1)))
} else {
val tmp3 = freshLocal(info.stack.types(2))
bb.replaceInstruction(i, List(STORE_LOCAL(tmp1),
@@ -824,7 +824,7 @@ abstract class ICodeReader extends ClassfileParser {
LOAD_LOCAL(tmp1),
LOAD_LOCAL(tmp3),
LOAD_LOCAL(tmp2),
- LOAD_LOCAL(tmp1)));
+ LOAD_LOCAL(tmp1)))
}
case DUP2_X2 =>
@@ -837,21 +837,21 @@ abstract class ICodeReader extends ClassfileParser {
STORE_LOCAL(tmp2),
LOAD_LOCAL(tmp1),
LOAD_LOCAL(tmp2),
- LOAD_LOCAL(tmp1)));
+ LOAD_LOCAL(tmp1)))
} else if (one.isWideType) {
val three = info.stack.types(2)
assert(!two.isWideType && !three.isWideType, "Impossible")
- val tmp3 = freshLocal(three);
+ val tmp3 = freshLocal(three)
bb.replaceInstruction(i, List(STORE_LOCAL(tmp1),
STORE_LOCAL(tmp2),
STORE_LOCAL(tmp3),
LOAD_LOCAL(tmp1),
LOAD_LOCAL(tmp3),
LOAD_LOCAL(tmp2),
- LOAD_LOCAL(tmp1)));
+ LOAD_LOCAL(tmp1)))
} else {
val three = info.stack.types(2)
- val tmp3 = freshLocal(three);
+ val tmp3 = freshLocal(three)
if (three.isWideType) {
bb.replaceInstruction(i, List(STORE_LOCAL(tmp1),
STORE_LOCAL(tmp2),
@@ -860,10 +860,10 @@ abstract class ICodeReader extends ClassfileParser {
LOAD_LOCAL(tmp1),
LOAD_LOCAL(tmp3),
LOAD_LOCAL(tmp2),
- LOAD_LOCAL(tmp1)));
+ LOAD_LOCAL(tmp1)))
} else {
val four = info.stack.types(3)
- val tmp4 = freshLocal(three);
+ val tmp4 = freshLocal(three)
assert(!four.isWideType, "Impossible")
bb.replaceInstruction(i, List(STORE_LOCAL(tmp1),
STORE_LOCAL(tmp2),
@@ -874,7 +874,7 @@ abstract class ICodeReader extends ClassfileParser {
LOAD_LOCAL(tmp4),
LOAD_LOCAL(tmp3),
LOAD_LOCAL(tmp2),
- LOAD_LOCAL(tmp1)));
+ LOAD_LOCAL(tmp1)))
}
}
case _ =>
@@ -954,7 +954,7 @@ abstract class ICodeReader extends ClassfileParser {
/** Return a fresh Local variable for the given index.
*/
private def freshLocal(idx: Int, kind: TypeKind, isArg: Boolean) = {
- val sym = method.symbol.newVariable(newTermName("loc" + idx)).setInfo(kind.toType);
+ val sym = method.symbol.newVariable(newTermName("loc" + idx)).setInfo(kind.toType)
val l = new Local(sym, kind, isArg)
method.addLocal(l)
l
@@ -984,7 +984,8 @@ abstract class ICodeReader extends ClassfileParser {
jmpTargets += pc
}
- case class LJUMP(pc: Int) extends LazyJump(pc);
+ case class LJUMP(pc: Int) extends LazyJump(pc)
+
case class LCJUMP(success: Int, failure: Int, cond: TestOp, kind: TypeKind)
extends LazyJump(success) {
override def toString(): String = "LCJUMP (" + kind + ") " + success + " : " + failure
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
index c8b7fcee8f..79d0df5a29 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
@@ -179,7 +179,7 @@ abstract class Pickler extends SubComponent {
putSymbol(sym.privateWithin)
putType(sym.info)
if (sym.thisSym.tpeHK != sym.tpeHK)
- putType(sym.typeOfThis);
+ putType(sym.typeOfThis)
putSymbol(sym.alias)
if (!sym.children.isEmpty) {
val (locals, globals) = sym.children partition (_.isLocalClass)
@@ -246,8 +246,8 @@ abstract class Pickler extends SubComponent {
// val savedBoundSyms = boundSyms // boundSyms are known to be local based on the EXISTENTIAL flag (see isLocal)
// boundSyms = tparams ::: boundSyms
// try {
- putType(restpe);
-// } finally {
+ putType(restpe)
+ // } finally {
// boundSyms = savedBoundSyms
// }
putSymbols(tparams)
diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala
index 79dd36803d..a4a6c3ff31 100644
--- a/src/compiler/scala/tools/nsc/transform/Constructors.scala
+++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala
@@ -188,7 +188,7 @@ abstract class Constructors extends Transform with ast.TreeDSL {
// Lazy vals don't get the assignment in the constructor.
if (!stat.symbol.tpe.isInstanceOf[ConstantType]) {
if (rhs != EmptyTree && !stat.symbol.isLazy) {
- val rhs1 = intoConstructor(stat.symbol, rhs);
+ val rhs1 = intoConstructor(stat.symbol, rhs)
(if (canBeMoved(stat)) constrPrefixBuf else constrStatBuf) += mkAssign(
stat.symbol, rhs1)
}
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index 60eab773aa..141a63d36e 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -745,7 +745,7 @@ abstract class Erasure extends AddInterfaces
tree.symbol = NoSymbol
selectFrom(qual1)
} else if (isMethodTypeWithEmptyParams(qual1.tpe)) {
- assert(qual1.symbol.isStable, qual1.symbol);
+ assert(qual1.symbol.isStable, qual1.symbol)
val applied = Apply(qual1, List()) setPos qual1.pos setType qual1.tpe.resultType
adaptMember(selectFrom(applied))
} else if (!(qual1.isInstanceOf[Super] || (qual1.tpe.typeSymbol isSubClass tree.symbol.owner))) {
@@ -806,7 +806,7 @@ abstract class Erasure extends AddInterfaces
newCdef setType newCdef.body.tpe
}
def adaptBranch(branch: Tree): Tree =
- if (branch == EmptyTree) branch else adaptToType(branch, tree1.tpe);
+ if (branch == EmptyTree) branch else adaptToType(branch, tree1.tpe)
tree1 match {
case If(cond, thenp, elsep) =>
diff --git a/src/compiler/scala/tools/nsc/transform/Flatten.scala b/src/compiler/scala/tools/nsc/transform/Flatten.scala
index a370b45be0..44d39de205 100644
--- a/src/compiler/scala/tools/nsc/transform/Flatten.scala
+++ b/src/compiler/scala/tools/nsc/transform/Flatten.scala
@@ -85,7 +85,7 @@ abstract class Flatten extends InfoTransform {
val restp1 = apply(restp)
if (restp1 eq restp) tp else copyMethodType(tp, params, restp1)
case PolyType(tparams, restp) =>
- val restp1 = apply(restp);
+ val restp1 = apply(restp)
if (restp1 eq restp) tp else PolyType(tparams, restp1)
case _ =>
mapOver(tp)
diff --git a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
index a4b725d313..60815da967 100644
--- a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
+++ b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
@@ -143,7 +143,7 @@ abstract class LambdaLift extends InfoTransform {
ss addEntry sym
renamable addEntry sym
changedFreeVars = true
- debuglog("" + sym + " is free in " + enclosure);
+ debuglog("" + sym + " is free in " + enclosure)
if (sym.isVariable) sym setFlag CAPTURED
}
!enclosure.isClass
@@ -161,7 +161,7 @@ abstract class LambdaLift extends InfoTransform {
private val freeVarTraverser = new Traverser {
override def traverse(tree: Tree) {
try { //debug
- val sym = tree.symbol;
+ val sym = tree.symbol
tree match {
case ClassDef(_, _, _, _) =>
liftedDefs(tree.symbol) = Nil
diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala
index b6d4bdb0c5..e33d665cd0 100644
--- a/src/compiler/scala/tools/nsc/transform/Mixin.scala
+++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala
@@ -126,7 +126,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
" " + mixinClass + " " + base.info.baseClasses + "/" + bcs)
while (!bcs.isEmpty && sym == NoSymbol) {
if (settings.debug.value) {
- val other = bcs.head.info.nonPrivateDecl(member.name);
+ val other = bcs.head.info.nonPrivateDecl(member.name)
debuglog("rebindsuper " + bcs.head + " " + other + " " + other.tpe +
" " + other.isDeferred)
}
@@ -242,7 +242,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
}
}
}
- debuglog("new defs of " + clazz + " = " + clazz.info.decls);
+ debuglog("new defs of " + clazz + " = " + clazz.info.decls)
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
index 822ef79cd0..2610679542 100644
--- a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
+++ b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
@@ -86,10 +86,10 @@ abstract class OverridingPairs {
{ def fillDecls(bcs: List[Symbol], deferredflag: Int) {
if (!bcs.isEmpty) {
fillDecls(bcs.tail, deferredflag)
- var e = bcs.head.info.decls.elems;
+ var e = bcs.head.info.decls.elems
while (e ne null) {
if (e.sym.getFlag(DEFERRED) == deferredflag.toLong && !exclude(e.sym))
- decls enter e.sym;
+ decls enter e.sym
e = e.next
}
}
@@ -134,7 +134,7 @@ abstract class OverridingPairs {
private val subParents = new Array[BitSet](size)
{ for (i <- List.range(0, size))
- subParents(i) = new BitSet(size);
+ subParents(i) = new BitSet(size)
for (p <- parents) {
val pIndex = index(p.typeSymbol)
if (pIndex >= 0)
@@ -190,7 +190,7 @@ abstract class OverridingPairs {
if (nextEntry ne null) {
do {
do {
- nextEntry = decls.lookupNextEntry(nextEntry);
+ nextEntry = decls.lookupNextEntry(nextEntry)
/* DEBUG
if ((nextEntry ne null) &&
!(nextEntry.sym hasFlag PRIVATE) &&
@@ -208,12 +208,12 @@ abstract class OverridingPairs {
// overriding and nextEntry.sym
} while ((nextEntry ne null) && (hasCommonParentAsSubclass(overriding, nextEntry.sym)))
if (nextEntry ne null) {
- overridden = nextEntry.sym;
+ overridden = nextEntry.sym
//Console.println("yield: " + overriding + overriding.locationString + " / " + overridden + overridden.locationString);//DEBUG
} else {
do {
curEntry = curEntry.next
- } while ((curEntry ne null) && (visited contains curEntry));
+ } while ((curEntry ne null) && (visited contains curEntry))
nextEntry = curEntry
next()
}
diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
index c375bc4362..b2d05f98b1 100644
--- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala
+++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
@@ -31,7 +31,7 @@ abstract class TailCalls extends Transform {
class Phase(prev: scala.tools.nsc.Phase) extends StdPhase(prev) {
def apply(unit: global.CompilationUnit) {
if (!(settings.debuginfo.value == "notailcalls")) {
- newTransformer(unit).transformUnit(unit);
+ newTransformer(unit).transformUnit(unit)
}
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
index 3baa88002f..3ee9009116 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
@@ -173,7 +173,7 @@ trait MatchTranslation { self: PatternMatching =>
(caseScrutSym, propagateSubstitution(translateCase(caseScrutSym, pt)(caseDef), EmptySubstitution))
}
- for(cases <- emitTypeSwitch(bindersAndCases, pt).toList;
+ for(cases <- emitTypeSwitch(bindersAndCases, pt).toList
if cases forall treeInfo.isCatchCase; // must check again, since it's not guaranteed -- TODO: can we eliminate this? e.g., a type test could test for a trait or a non-trivial prefix, which are not handled by the back-end
cse <- cases) yield fixerUpper(matchOwner, pos)(cse).asInstanceOf[CaseDef]
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index b070bd1b49..eb91251930 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -590,7 +590,7 @@ trait Contexts { self: Analyzer =>
def restoreTypeBounds(tp: Type): Type = {
var current = tp
for ((sym, info) <- savedTypeBounds) {
- debuglog("resetting " + sym + " to " + info);
+ debuglog("resetting " + sym + " to " + info)
sym.info match {
case TypeBounds(lo, hi) if (hi <:< lo && lo <:< hi) =>
current = current.instantiateTypeParams(List(sym), List(lo))
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index 0bd164a0cb..b7221a78ec 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -95,7 +95,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
class RefCheckTransformer(unit: CompilationUnit) extends Transformer {
- var localTyper: analyzer.Typer = typer;
+ var localTyper: analyzer.Typer = typer
var currentApplication: Tree = EmptyTree
var inPattern: Boolean = false
var checkedCombinations = Set[List[Type]]()
@@ -386,11 +386,11 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
if (!isOverrideAccessOK) {
overrideAccessError()
} else if (other.isClass) {
- overrideError("cannot be used here - class definitions cannot be overridden");
+ overrideError("cannot be used here - class definitions cannot be overridden")
} else if (!other.isDeferred && member.isClass) {
- overrideError("cannot be used here - classes can only override abstract types");
+ overrideError("cannot be used here - classes can only override abstract types")
} else if (other.isEffectivelyFinal) { // (1.2)
- overrideError("cannot override final member");
+ overrideError("cannot override final member")
} else if (!other.isDeferred && !member.isAnyOverride && !member.isSynthetic) { // (*)
// (*) Synthetic exclusion for (at least) default getters, fixes SI-5178. We cannot assign the OVERRIDE flag to
// the default getter: one default getter might sometimes override, sometimes not. Example in comment on ticket.
@@ -449,7 +449,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
// @M: substSym
if( !(sameLength(member.typeParams, other.typeParams) && (memberTp.substSym(member.typeParams, other.typeParams) =:= otherTp)) ) // (1.6)
- overrideTypeError();
+ overrideTypeError()
}
else if (other.isAbstractType) {
//if (!member.typeParams.isEmpty) // (1.7) @MAT
@@ -502,7 +502,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
case rt: RefinedType if !(rt =:= otherTp) && !(checkedCombinations contains rt.parents) =>
// might mask some inconsistencies -- check overrides
checkedCombinations += rt.parents
- val tsym = rt.typeSymbol;
+ val tsym = rt.typeSymbol
if (tsym.pos == NoPosition) tsym setPos member.pos
checkAllOverrides(tsym, typesOnly = true)
case _ =>
@@ -523,7 +523,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
val opc = new overridingPairs.Cursor(clazz)
while (opc.hasNext) {
//Console.println(opc.overriding/* + ":" + opc.overriding.tpe*/ + " in "+opc.overriding.fullName + " overrides " + opc.overridden/* + ":" + opc.overridden.tpe*/ + " in "+opc.overridden.fullName + "/"+ opc.overridden.hasFlag(DEFERRED));//debug
- if (!opc.overridden.isClass) checkOverride(opc.overriding, opc.overridden);
+ if (!opc.overridden.isClass) checkOverride(opc.overriding, opc.overridden)
opc.next()
}
@@ -785,7 +785,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
// for (bc <- clazz.info.baseClasses.tail) Console.println("" + bc + " has " + bc.info.decl(member.name) + ":" + bc.info.decl(member.name).tpe);//DEBUG
val nonMatching: List[Symbol] = clazz.info.member(member.name).alternatives.filterNot(_.owner == clazz).filterNot(_.isFinal)
- def issueError(suffix: String) = unit.error(member.pos, member.toString() + " overrides nothing" + suffix);
+ def issueError(suffix: String) = unit.error(member.pos, member.toString() + " overrides nothing" + suffix)
nonMatching match {
case Nil =>
issueError("")
@@ -840,7 +840,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
case tp1 :: tp2 :: _ =>
unit.error(clazz.pos, "illegal inheritance;\n " + clazz +
" inherits different type instances of " + baseClass +
- ":\n" + tp1 + " and " + tp2);
+ ":\n" + tp1 + " and " + tp2)
explainTypes(tp1, tp2)
explainTypes(tp2, tp1)
}
@@ -905,7 +905,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
val e = currentLevel.scope.lookupEntry(sym.name)
if ((e ne null) && sym == e.sym) {
var l = currentLevel
- while (l.scope != e.owner) l = l.outer;
+ while (l.scope != e.owner) l = l.outer
val symindex = symIndex(sym)
if (l.maxindex < symindex) {
l.refpos = pos
@@ -1093,7 +1093,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
/* Convert a reference to a case factory of type `tpe` to a new of the class it produces. */
def toConstructor(pos: Position, tpe: Type): Tree = {
val rtpe = tpe.finalResultType
- assert(rtpe.typeSymbol hasFlag CASE, tpe);
+ assert(rtpe.typeSymbol hasFlag CASE, tpe)
localTyper.typedOperator {
atPos(pos) {
Select(New(TypeTree(rtpe)), rtpe.typeSymbol.primaryConstructor)
diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
index f2129992e5..d8cedd119b 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
@@ -129,11 +129,11 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
val clazz = sup.symbol
if (sym.isDeferred) {
- val member = sym.overridingSymbol(clazz);
+ val member = sym.overridingSymbol(clazz)
if (mix != tpnme.EMPTY || member == NoSymbol ||
!(member.isAbstractOverride && member.isIncompleteIn(clazz)))
unit.error(sel.pos, ""+sym.fullLocationString+" is accessed from super. It may not be abstract "+
- "unless it is overridden by a member declared `abstract' and `override'");
+ "unless it is overridden by a member declared `abstract' and `override'")
} else if (mix == tpnme.EMPTY && !sym.owner.isTrait){
// SI-4989 Check if an intermediate class between `clazz` and `sym.owner` redeclares the method as abstract.
val intermediateClasses = clazz.info.baseClasses.tail.takeWhile(_ != sym.owner)
@@ -332,8 +332,8 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
lhs.symbol.isJavaDefined &&
needsProtectedAccessor(lhs.symbol, tree.pos)) {
debuglog("Adding protected setter for " + tree)
- val setter = makeSetter(lhs);
- debuglog("Replaced " + tree + " with " + setter);
+ val setter = makeSetter(lhs)
+ debuglog("Replaced " + tree + " with " + setter)
transform(localTyper.typed(Apply(setter, List(qual, rhs))))
} else
super.transform(tree)
diff --git a/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala b/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala
index fc3dd2bac2..4f11d11e8f 100644
--- a/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala
+++ b/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala
@@ -49,13 +49,13 @@ class JavaCharArrayReader(buf: IndexedSeq[Char], start: Int, /* startline: int,
def udigit: Int = {
val d = digit2int(buf(bp), 16)
if (d >= 0) bp += 1
- else error("error in unicode escape");
+ else error("error in unicode escape")
d
}
if (buf(bp) == 'u' && decodeUni && evenSlashPrefix) {
do {
bp += 1 //; nextcol += 1
- } while (buf(bp) == 'u');
+ } while (buf(bp) == 'u')
val code = udigit << 12 | udigit << 8 | udigit << 4 | udigit
ch = code.asInstanceOf[Char]
isUnicode = true
diff --git a/src/compiler/scala/tools/nsc/util/ShowPickled.scala b/src/compiler/scala/tools/nsc/util/ShowPickled.scala
index b060ea90b8..4bc393bd0b 100644
--- a/src/compiler/scala/tools/nsc/util/ShowPickled.scala
+++ b/src/compiler/scala/tools/nsc/util/ShowPickled.scala
@@ -250,7 +250,7 @@ object ShowPickled extends Names {
case SYMANNOT =>
printSymbolRef(); printTypeRef(); buf.until(end, printAnnotArgRef)
case ANNOTATEDtpe =>
- printTypeRef(); buf.until(end, printAnnotInfoRef);
+ printTypeRef(); buf.until(end, printAnnotInfoRef)
case ANNOTINFO =>
printTypeRef(); buf.until(end, printAnnotArgRef)
case ANNOTARGARRAY =>
diff --git a/src/compiler/scala/tools/reflect/MacroImplementations.scala b/src/compiler/scala/tools/reflect/MacroImplementations.scala
index 47ffbda6ca..002a3fce82 100644
--- a/src/compiler/scala/tools/reflect/MacroImplementations.scala
+++ b/src/compiler/scala/tools/reflect/MacroImplementations.scala
@@ -26,7 +26,7 @@ abstract class MacroImplementations {
"too many arguments for interpolated string")
}
val stringParts = parts map {
- case Literal(Constant(s: String)) => s;
+ case Literal(Constant(s: String)) => s
case _ => throw new IllegalArgumentException("argument parts must be a list of string literals")
}
@@ -141,7 +141,7 @@ abstract class MacroImplementations {
Literal(Constant(fstring)),
newTermName("format")),
List(ids: _* )
- );
+ )
Block(evals.toList, atPos(origApplyPos.focus)(expr)) setPos origApplyPos.makeTransparent
}
diff --git a/src/library/scala/beans/ScalaBeanInfo.scala b/src/library/scala/beans/ScalaBeanInfo.scala
index 3a95335d71..c192a990f1 100644
--- a/src/library/scala/beans/ScalaBeanInfo.scala
+++ b/src/library/scala/beans/ScalaBeanInfo.scala
@@ -35,10 +35,10 @@ abstract class ScalaBeanInfo(clazz: java.lang.Class[_],
// override def getAdditionalBeanInfo() = Array(Introspector getBeanInfo clazz.getSuperclass)
private def init() {
- var i = 0;
+ var i = 0
while (i < props.length) {
pd(i/3) = new PropertyDescriptor(props(i), clazz, props(i+1), props(i+2))
- i = i + 3;
+ i = i + 3
}
}
diff --git a/src/library/scala/collection/SortedMapLike.scala b/src/library/scala/collection/SortedMapLike.scala
index 3c3e6095df..934ed831f5 100644
--- a/src/library/scala/collection/SortedMapLike.scala
+++ b/src/library/scala/collection/SortedMapLike.scala
@@ -69,7 +69,7 @@ self =>
* @param elems the remaining elements to add.
*/
override def + [B1 >: B] (elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *): SortedMap[A, B1] = {
- var m = this + elem1 + elem2;
+ var m = this + elem1 + elem2
for (e <- elems) m = m + e
m
}
diff --git a/src/library/scala/collection/generic/Signalling.scala b/src/library/scala/collection/generic/Signalling.scala
index 498db7f8fa..1f2f224283 100644
--- a/src/library/scala/collection/generic/Signalling.scala
+++ b/src/library/scala/collection/generic/Signalling.scala
@@ -140,7 +140,7 @@ trait AtomicIndexFlag extends Signalling {
val old = intflag.get
if (f <= old) loop = false
else if (intflag.compareAndSet(old, f)) loop = false
- } while (loop);
+ } while (loop)
}
abstract override def setIndexFlagIfLesser(f: Int) = {
var loop = true
@@ -148,7 +148,7 @@ trait AtomicIndexFlag extends Signalling {
val old = intflag.get
if (f >= old) loop = false
else if (intflag.compareAndSet(old, f)) loop = false
- } while (loop);
+ } while (loop)
}
}
diff --git a/src/library/scala/collection/generic/Sorted.scala b/src/library/scala/collection/generic/Sorted.scala
index b3847fffc9..997a136d30 100644
--- a/src/library/scala/collection/generic/Sorted.scala
+++ b/src/library/scala/collection/generic/Sorted.scala
@@ -95,16 +95,16 @@ trait Sorted[K, +This <: Sorted[K, This]] {
val i = keySet.iterator
if (i.isEmpty) return j.isEmpty
- var in = i.next;
+ var in = i.next
while (j.hasNext) {
- val jn = j.next;
+ val jn = j.next
while ({
- val n = compare(jn, in);
- if (n == 0) false;
- else if (n < 0) return false;
- else if (!i.hasNext) return false;
- else true;
- }) in = i.next;
+ val n = compare(jn, in)
+ if (n == 0) false
+ else if (n < 0) return false
+ else if (!i.hasNext) return false
+ else true
+ }) in = i.next
}
true
}
diff --git a/src/library/scala/collection/generic/SortedSetFactory.scala b/src/library/scala/collection/generic/SortedSetFactory.scala
index 08bca04e42..2993209628 100644
--- a/src/library/scala/collection/generic/SortedSetFactory.scala
+++ b/src/library/scala/collection/generic/SortedSetFactory.scala
@@ -27,7 +27,7 @@ abstract class SortedSetFactory[CC[A] <: SortedSet[A] with SortedSetLike[A, CC[A
def newBuilder[A](implicit ord: Ordering[A]): Builder[A, CC[A]] = new SetBuilder[A, CC[A]](empty)
- implicit def newCanBuildFrom[A](implicit ord : Ordering[A]) : CanBuildFrom[Coll, A, CC[A]] = new SortedSetCanBuildFrom()(ord);
+ implicit def newCanBuildFrom[A](implicit ord : Ordering[A]) : CanBuildFrom[Coll, A, CC[A]] = new SortedSetCanBuildFrom()(ord)
class SortedSetCanBuildFrom[A](implicit ord: Ordering[A]) extends CanBuildFrom[Coll, A, CC[A]] {
def apply(from: Coll) = newBuilder[A](ord)
diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala
index 83f0d2c8a2..44e5304e09 100644
--- a/src/library/scala/collection/immutable/HashMap.scala
+++ b/src/library/scala/collection/immutable/HashMap.scala
@@ -395,7 +395,7 @@ time { mNew.iterator.foreach( p => ()) }
*/
override def foreach[U](f: ((A, B)) => U): Unit = {
- var i = 0;
+ var i = 0
while (i < elems.length) {
elems(i).foreach(f)
i += 1
diff --git a/src/library/scala/collection/immutable/HashSet.scala b/src/library/scala/collection/immutable/HashSet.scala
index 87995f705f..e17f07c87b 100644
--- a/src/library/scala/collection/immutable/HashSet.scala
+++ b/src/library/scala/collection/immutable/HashSet.scala
@@ -301,8 +301,8 @@ time { mNew.iterator.foreach( p => ()) }
*/
override def foreach[U](f: A => U): Unit = {
- var i = 0;
- while (i < elems.length) {
+ var i = 0
+ while (i < elems.length) {
elems(i).foreach(f)
i += 1
}
diff --git a/src/library/scala/collection/immutable/IntMap.scala b/src/library/scala/collection/immutable/IntMap.scala
index ab1faf363e..83356b4932 100644
--- a/src/library/scala/collection/immutable/IntMap.scala
+++ b/src/library/scala/collection/immutable/IntMap.scala
@@ -50,8 +50,10 @@ object IntMap {
def apply(): Builder[(Int, B), IntMap[B]] = new MapBuilder[Int, B, IntMap[B]](empty[B])
}
- def empty[T] : IntMap[T] = IntMap.Nil;
- def singleton[T](key: Int, value: T): IntMap[T] = IntMap.Tip(key, value);
+ def empty[T] : IntMap[T] = IntMap.Nil
+
+ def singleton[T](key: Int, value: T): IntMap[T] = IntMap.Tip(key, value)
+
def apply[T](elems: (Int, T)*): IntMap[T] =
elems.foldLeft(empty[T])((x, y) => x.updated(y._1, y._2))
diff --git a/src/library/scala/collection/immutable/ListSet.scala b/src/library/scala/collection/immutable/ListSet.scala
index 6cf6c4259e..fd23276c8d 100644
--- a/src/library/scala/collection/immutable/ListSet.scala
+++ b/src/library/scala/collection/immutable/ListSet.scala
@@ -75,7 +75,7 @@ class ListSet[A] extends AbstractSet[A]
* @return number of set elements.
*/
override def size: Int = 0
- override def isEmpty: Boolean = true;
+ override def isEmpty: Boolean = true
/** Checks if this set contains element `elem`.
*
@@ -126,12 +126,12 @@ class ListSet[A] extends AbstractSet[A]
/**
* @throws Predef.NoSuchElementException
*/
- override def head: A = throw new NoSuchElementException("Set has no elements");
+ override def head: A = throw new NoSuchElementException("Set has no elements")
/**
* @throws Predef.NoSuchElementException
*/
- override def tail: ListSet[A] = throw new NoSuchElementException("Next of an empty set");
+ override def tail: ListSet[A] = throw new NoSuchElementException("Next of an empty set")
override def stringPrefix = "ListSet"
diff --git a/src/library/scala/collection/immutable/LongMap.scala b/src/library/scala/collection/immutable/LongMap.scala
index 60300c2a9e..506546c5ba 100644
--- a/src/library/scala/collection/immutable/LongMap.scala
+++ b/src/library/scala/collection/immutable/LongMap.scala
@@ -97,7 +97,7 @@ private[immutable] abstract class LongMapIterator[V, T](it: LongMap[V]) extends
buffer(index) = x.asInstanceOf[AnyRef]
index += 1
}
- push(it);
+ push(it)
/**
* What value do we assign to a tip?
@@ -178,7 +178,7 @@ extends AbstractMap[Long, T]
*/
override final def foreach[U](f: ((Long, T)) => U): Unit = this match {
case LongMap.Bin(_, _, left, right) => { left.foreach(f); right.foreach(f) }
- case LongMap.Tip(key, value) => f((key, value));
+ case LongMap.Tip(key, value) => f((key, value))
case LongMap.Nil =>
}
diff --git a/src/library/scala/collection/immutable/RedBlackTree.scala b/src/library/scala/collection/immutable/RedBlackTree.scala
index 1cd0128c05..d3ce3ab58c 100644
--- a/src/library/scala/collection/immutable/RedBlackTree.scala
+++ b/src/library/scala/collection/immutable/RedBlackTree.scala
@@ -273,13 +273,13 @@ object RedBlackTree {
}
private[this] def doRange[A, B](tree: Tree[A, B], from: A, until: A)(implicit ordering: Ordering[A]): Tree[A, B] = {
if (tree eq null) return null
- if (ordering.lt(tree.key, from)) return doRange(tree.right, from, until);
- if (ordering.lteq(until, tree.key)) return doRange(tree.left, from, until);
+ if (ordering.lt(tree.key, from)) return doRange(tree.right, from, until)
+ if (ordering.lteq(until, tree.key)) return doRange(tree.left, from, until)
val newLeft = doFrom(tree.left, from)
val newRight = doUntil(tree.right, until)
if ((newLeft eq tree.left) && (newRight eq tree.right)) tree
- else if (newLeft eq null) upd(newRight, tree.key, tree.value, false);
- else if (newRight eq null) upd(newLeft, tree.key, tree.value, false);
+ else if (newLeft eq null) upd(newRight, tree.key, tree.value, false)
+ else if (newRight eq null) upd(newLeft, tree.key, tree.value, false)
else rebalance(tree, newLeft, newRight)
}
diff --git a/src/library/scala/collection/mutable/HashTable.scala b/src/library/scala/collection/mutable/HashTable.scala
index 8fef1be66b..23b68b7969 100644
--- a/src/library/scala/collection/mutable/HashTable.scala
+++ b/src/library/scala/collection/mutable/HashTable.scala
@@ -382,7 +382,7 @@ private[collection] object HashTable {
/** The load factor for the hash table (in 0.001 step).
*/
private[collection] final def defaultLoadFactor: Int = 750 // corresponds to 75%
- private[collection] final def loadFactorDenum = 1000;
+ private[collection] final def loadFactorDenum = 1000
private[collection] final def newThreshold(_loadFactor: Int, size: Int) = ((size.toLong * _loadFactor) / loadFactorDenum).toInt
@@ -457,13 +457,13 @@ private[collection] object HashTable {
*/
private[collection] def powerOfTwo(target: Int): Int = {
/* See http://bits.stephan-brumme.com/roundUpToNextPowerOfTwo.html */
- var c = target - 1;
- c |= c >>> 1;
- c |= c >>> 2;
- c |= c >>> 4;
- c |= c >>> 8;
- c |= c >>> 16;
- c + 1;
+ var c = target - 1
+ c |= c >>> 1
+ c |= c >>> 2
+ c |= c >>> 4
+ c |= c >>> 8
+ c |= c >>> 16
+ c + 1
}
class Contents[A, Entry >: Null <: HashEntry[A, Entry]](
diff --git a/src/library/scala/collection/mutable/ListBuffer.scala b/src/library/scala/collection/mutable/ListBuffer.scala
index 97d469bca2..af1d7e4183 100644
--- a/src/library/scala/collection/mutable/ListBuffer.scala
+++ b/src/library/scala/collection/mutable/ListBuffer.scala
@@ -137,7 +137,7 @@ final class ListBuffer[A]
if (n < 0 || n >= len) throw new IndexOutOfBoundsException(n.toString)
if (exported) copy()
if (n == 0) {
- val newElem = new :: (x, start.tail);
+ val newElem = new :: (x, start.tail)
if (last0 eq start) {
last0 = newElem
}
diff --git a/src/library/scala/collection/mutable/OpenHashMap.scala b/src/library/scala/collection/mutable/OpenHashMap.scala
index 8b3e52470a..ad001fd79c 100644
--- a/src/library/scala/collection/mutable/OpenHashMap.scala
+++ b/src/library/scala/collection/mutable/OpenHashMap.scala
@@ -27,7 +27,7 @@ object OpenHashMap {
var value: Option[Value])
extends HashEntry[Key, OpenEntry[Key, Value]]
- private[mutable] def nextPowerOfTwo(i : Int) = highestOneBit(i) << 1;
+ private[mutable] def nextPowerOfTwo(i : Int) = highestOneBit(i) << 1
}
/** A mutable hash map based on an open hashing scheme. The precise scheme is
@@ -78,8 +78,8 @@ extends AbstractMap[Key, Value]
/** Returns a mangled hash code of the provided key. */
protected def hashOf(key: Key) = {
var h = key.##
- h ^= ((h >>> 20) ^ (h >>> 12));
- h ^ (h >>> 7) ^ (h >>> 4);
+ h ^= ((h >>> 20) ^ (h >>> 12))
+ h ^ (h >>> 7) ^ (h >>> 4)
}
private[this] def growTable() = {
@@ -89,7 +89,7 @@ extends AbstractMap[Key, Value]
table = new Array[Entry](newSize)
mask = newSize - 1
oldTable.foreach( entry =>
- if (entry != null && entry.value != None) addEntry(entry));
+ if (entry != null && entry.value != None) addEntry(entry))
deleted = 0
}
@@ -128,14 +128,14 @@ extends AbstractMap[Key, Value]
val index = findIndex(key, hash)
val entry = table(index)
if (entry == null) {
- table(index) = new OpenEntry(key, hash, Some(value));
+ table(index) = new OpenEntry(key, hash, Some(value))
modCount += 1
size += 1
None
} else {
val res = entry.value
if (entry.value == None) { size += 1; modCount += 1 }
- entry.value = Some(value);
+ entry.value = Some(value)
res
}
}
@@ -161,13 +161,13 @@ extends AbstractMap[Key, Value]
while(entry != null){
if (entry.hash == hash &&
entry.key == key){
- return entry.value;
+ return entry.value
}
- j = 5 * j + 1 + perturb;
- perturb >>= 5;
- index = j & mask;
- entry = table(index);
+ j = 5 * j + 1 + perturb
+ perturb >>= 5
+ index = j & mask
+ entry = table(index)
}
None
}
@@ -182,8 +182,8 @@ extends AbstractMap[Key, Value]
val initialModCount = modCount
private[this] def advance() {
- if (initialModCount != modCount) sys.error("Concurrent modification");
- while((index <= mask) && (table(index) == null || table(index).value == None)) index+=1;
+ if (initialModCount != modCount) sys.error("Concurrent modification")
+ while((index <= mask) && (table(index) == null || table(index).value == None)) index+=1
}
def hasNext = {advance(); index <= mask }
@@ -198,7 +198,7 @@ extends AbstractMap[Key, Value]
override def clone() = {
val it = new OpenHashMap[Key, Value]
- foreachUndeletedEntry(entry => it.put(entry.key, entry.hash, entry.value.get));
+ foreachUndeletedEntry(entry => it.put(entry.key, entry.hash, entry.value.get))
it
}
@@ -213,24 +213,24 @@ extends AbstractMap[Key, Value]
* @param f The function to apply to each key, value mapping.
*/
override def foreach[U](f : ((Key, Value)) => U) {
- val startModCount = modCount;
+ val startModCount = modCount
foreachUndeletedEntry(entry => {
if (modCount != startModCount) sys.error("Concurrent Modification")
f((entry.key, entry.value.get))}
- );
+ )
}
private[this] def foreachUndeletedEntry(f : Entry => Unit){
- table.foreach(entry => if (entry != null && entry.value != None) f(entry));
+ table.foreach(entry => if (entry != null && entry.value != None) f(entry))
}
override def transform(f : (Key, Value) => Value) = {
- foreachUndeletedEntry(entry => entry.value = Some(f(entry.key, entry.value.get)));
+ foreachUndeletedEntry(entry => entry.value = Some(f(entry.key, entry.value.get)))
this
}
override def retain(f : (Key, Value) => Boolean) = {
- foreachUndeletedEntry(entry => if (!f(entry.key, entry.value.get)) {entry.value = None; size -= 1; deleted += 1} );
+ foreachUndeletedEntry(entry => if (!f(entry.key, entry.value.get)) {entry.value = None; size -= 1; deleted += 1} )
this
}
diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala
index 6eda29e6b0..33af99067d 100644
--- a/src/library/scala/collection/parallel/ParIterableLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableLike.scala
@@ -820,7 +820,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
def zip[U >: T, S, That](that: GenIterable[S])(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf(repr).isCombiner && that.isParSeq) {
val thatseq = that.asParSeq
- tasksupport.executeAndWaitResult(new Zip(combinerFactory(() => bf(repr).asCombiner), splitter, thatseq.splitter) mapResult { _.resultWithTaskSupport });
+ tasksupport.executeAndWaitResult(new Zip(combinerFactory(() => bf(repr).asCombiner), splitter, thatseq.splitter) mapResult { _.resultWithTaskSupport })
} else setTaskSupport(seq.zip(that)(bf2seq(bf)), tasksupport)
def zipWithIndex[U >: T, That](implicit bf: CanBuildFrom[Repr, (U, Int), That]): That = this zip immutable.ParRange(0, size, 1, false)
@@ -831,11 +831,11 @@ self: ParIterableLike[T, Repr, Sequential] =>
new ZipAll(size max thatseq.length, thisElem, thatElem, combinerFactory(() => bf(repr).asCombiner), splitter, thatseq.splitter) mapResult {
_.resultWithTaskSupport
}
- );
+ )
} else setTaskSupport(seq.zipAll(that, thisElem, thatElem)(bf2seq(bf)), tasksupport)
protected def toParCollection[U >: T, That](cbf: () => Combiner[U, That]): That = {
- tasksupport.executeAndWaitResult(new ToParCollection(combinerFactory(cbf), splitter) mapResult { _.resultWithTaskSupport });
+ tasksupport.executeAndWaitResult(new ToParCollection(combinerFactory(cbf), splitter) mapResult { _.resultWithTaskSupport })
}
protected def toParMap[K, V, That](cbf: () => Combiner[(K, V), That])(implicit ev: T <:< (K, V)): That = {
@@ -1474,9 +1474,9 @@ self: ParIterableLike[T, Repr, Sequential] =>
/* alias methods */
- def /:[S](z: S)(op: (S, T) => S): S = foldLeft(z)(op);
+ def /:[S](z: S)(op: (S, T) => S): S = foldLeft(z)(op)
- def :\[S](z: S)(op: (T, S) => S): S = foldRight(z)(op);
+ def :\[S](z: S)(op: (T, S) => S): S = foldRight(z)(op)
/* debug information */
diff --git a/src/library/scala/collection/parallel/ParIterableViewLike.scala b/src/library/scala/collection/parallel/ParIterableViewLike.scala
index 0ecd6bd9ec..b2105e1e9e 100644
--- a/src/library/scala/collection/parallel/ParIterableViewLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableViewLike.scala
@@ -50,7 +50,8 @@ extends GenIterableView[T, Coll]
self =>
override def foreach[U](f: T => U): Unit = super[ParIterableLike].foreach(f)
- override protected[this] def newCombiner: Combiner[T, This] = throw new UnsupportedOperationException(this + ".newCombiner");
+ override protected[this] def newCombiner: Combiner[T, This] = throw new UnsupportedOperationException(this + ".newCombiner")
+
protected[this] def viewIdentifier: String
protected[this] def viewIdString: String
diff --git a/src/library/scala/collection/parallel/ParSeqLike.scala b/src/library/scala/collection/parallel/ParSeqLike.scala
index 874cf6fee9..4aaadbaac5 100644
--- a/src/library/scala/collection/parallel/ParSeqLike.scala
+++ b/src/library/scala/collection/parallel/ParSeqLike.scala
@@ -252,7 +252,7 @@ self =>
def padTo[U >: T, That](len: Int, elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = if (length < len) {
patch(length, new immutable.Repetition(elem, len - length), 0)
- } else patch(length, Nil, 0);
+ } else patch(length, Nil, 0)
override def zip[U >: T, S, That](that: GenIterable[S])(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf(repr).isCombiner && that.isParSeq) {
val thatseq = that.asParSeq
@@ -260,7 +260,7 @@ self =>
new Zip(length min thatseq.length, combinerFactory(() => bf(repr).asCombiner), splitter, thatseq.splitter) mapResult {
_.resultWithTaskSupport
}
- );
+ )
} else super.zip(that)(bf)
/** Tests whether every element of this $coll relates to the
diff --git a/src/library/scala/collection/parallel/ParSeqViewLike.scala b/src/library/scala/collection/parallel/ParSeqViewLike.scala
index 04369d8fde..d03b377860 100644
--- a/src/library/scala/collection/parallel/ParSeqViewLike.scala
+++ b/src/library/scala/collection/parallel/ParSeqViewLike.scala
@@ -125,8 +125,8 @@ self =>
}
protected def newReversed: Transformed[T] = new Reversed { }
protected def newPatched[U >: T](_from: Int, _patch: GenSeq[U], _replaced: Int): Transformed[U] = new {
- val from = _from;
- val patch = _patch;
+ val from = _from
+ val patch = _patch
val replaced = _replaced
} with Patched[U]
diff --git a/src/library/scala/collection/parallel/RemainsIterator.scala b/src/library/scala/collection/parallel/RemainsIterator.scala
index 732ebc3709..726f5a2e93 100644
--- a/src/library/scala/collection/parallel/RemainsIterator.scala
+++ b/src/library/scala/collection/parallel/RemainsIterator.scala
@@ -517,7 +517,8 @@ self =>
def next = if (self.hasNext) {
if (that.hasNext) (self.next, that.next)
else (self.next, thatelem)
- } else (thiselem, that.next);
+ } else (thiselem, that.next)
+
def remaining = self.remaining max that.remaining
def dup: IterableSplitter[(U, S)] = self.dup.zipAllParSeq(that, thiselem, thatelem)
def split: Seq[IterableSplitter[(U, S)]] = {
@@ -606,7 +607,7 @@ self =>
} else Seq(sz)
}
val (selfszfrom, thatszfrom) = splitsizes.zip(szcum.init).span(_._2 < selfrem)
- val (selfsizes, thatsizes) = (selfszfrom map { _._1 }, thatszfrom map { _._1 });
+ val (selfsizes, thatsizes) = (selfszfrom map { _._1 }, thatszfrom map { _._1 })
// split iterators
val selfs = self.psplit(selfsizes: _*)
diff --git a/src/library/scala/collection/parallel/Tasks.scala b/src/library/scala/collection/parallel/Tasks.scala
index 4e350a2adf..ec1bcbb27a 100644
--- a/src/library/scala/collection/parallel/Tasks.scala
+++ b/src/library/scala/collection/parallel/Tasks.scala
@@ -191,7 +191,7 @@ trait AdaptiveWorkStealingTasks extends Tasks {
last = t
t.start()
}
- } while (head.body.shouldSplitFurther);
+ } while (head.body.shouldSplitFurther)
head.next = last
head
}
diff --git a/src/library/scala/collection/parallel/immutable/ParRange.scala b/src/library/scala/collection/parallel/immutable/ParRange.scala
index 0c9f82ba2a..a3f473c6a7 100644
--- a/src/library/scala/collection/parallel/immutable/ParRange.scala
+++ b/src/library/scala/collection/parallel/immutable/ParRange.scala
@@ -42,7 +42,7 @@ self =>
@inline final def length = range.length
- @inline final def apply(idx: Int) = range.apply(idx);
+ @inline final def apply(idx: Int) = range.apply(idx)
def splitter = new ParRangeIterator
diff --git a/src/library/scala/collection/parallel/mutable/ParArray.scala b/src/library/scala/collection/parallel/mutable/ParArray.scala
index 770599e9d3..0e9eac62e2 100644
--- a/src/library/scala/collection/parallel/mutable/ParArray.scala
+++ b/src/library/scala/collection/parallel/mutable/ParArray.scala
@@ -611,7 +611,8 @@ self =>
class ScanToArray[U >: T](tree: ScanTree[U], z: U, op: (U, U) => U, targetarr: Array[Any])
extends Task[Unit, ScanToArray[U]] {
- var result = ();
+ var result = ()
+
def leaf(prev: Option[Unit]) = iterate(tree)
private def iterate(tree: ScanTree[U]): Unit = tree match {
case ScanNode(left, right) =>
@@ -647,7 +648,8 @@ self =>
}
class Map[S](f: T => S, targetarr: Array[Any], offset: Int, howmany: Int) extends Task[Unit, Map[S]] {
- var result = ();
+ var result = ()
+
def leaf(prev: Option[Unit]) = {
val tarr = targetarr
val sarr = array
diff --git a/src/library/scala/collection/parallel/mutable/ParHashMap.scala b/src/library/scala/collection/parallel/mutable/ParHashMap.scala
index 541d75290b..e94db89865 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashMap.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashMap.scala
@@ -97,7 +97,8 @@ self =>
class ParHashMapIterator(start: Int, untilIdx: Int, totalSize: Int, e: DefaultEntry[K, V])
extends EntryIterator[(K, V), ParHashMapIterator](start, untilIdx, totalSize, e) {
- def entry2item(entry: DefaultEntry[K, V]) = (entry.key, entry.value);
+ def entry2item(entry: DefaultEntry[K, V]) = (entry.key, entry.value)
+
def newIterator(idxFrom: Int, idxUntil: Int, totalSz: Int, es: DefaultEntry[K, V]) =
new ParHashMapIterator(idxFrom, idxUntil, totalSz, es)
}
@@ -303,7 +304,7 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau
private[parallel] object ParHashMapCombiner {
private[mutable] val discriminantbits = 5
private[mutable] val numblocks = 1 << discriminantbits
- private[mutable] val discriminantmask = ((1 << discriminantbits) - 1);
+ private[mutable] val discriminantmask = ((1 << discriminantbits) - 1)
private[mutable] val nonmasklength = 32 - discriminantbits
def apply[K, V] = new ParHashMapCombiner[K, V](HashTable.defaultLoadFactor) {} // was: with EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]]
diff --git a/src/library/scala/collection/parallel/mutable/ParHashSet.scala b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
index e5de6182e6..2431baf3e7 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashSet.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
@@ -159,8 +159,8 @@ with scala.collection.mutable.FlatHashTable.HashUtils[T] {
sizeMapInit(table.length)
seedvalue = ParHashSetCombiner.this.seedvalue
for {
- buffer <- buckets;
- if buffer ne null;
+ buffer <- buckets
+ if buffer ne null
entry <- buffer
} addEntry(entry)
}
@@ -235,7 +235,8 @@ with scala.collection.mutable.FlatHashTable.HashUtils[T] {
class FillBlocks(buckets: Array[UnrolledBuffer[AnyRef]], table: AddingFlatHashTable, val offset: Int, val howmany: Int)
extends Task[(Int, UnrolledBuffer[AnyRef]), FillBlocks] {
- var result = (Int.MinValue, new UnrolledBuffer[AnyRef]);
+ var result = (Int.MinValue, new UnrolledBuffer[AnyRef])
+
def leaf(prev: Option[(Int, UnrolledBuffer[AnyRef])]) {
var i = offset
var totalinserts = 0
@@ -319,7 +320,7 @@ with scala.collection.mutable.FlatHashTable.HashUtils[T] {
private[parallel] object ParHashSetCombiner {
private[mutable] val discriminantbits = 5
private[mutable] val numblocks = 1 << discriminantbits
- private[mutable] val discriminantmask = ((1 << discriminantbits) - 1);
+ private[mutable] val discriminantmask = ((1 << discriminantbits) - 1)
private[mutable] val nonmasklength = 32 - discriminantbits
def apply[T] = new ParHashSetCombiner[T](FlatHashTable.defaultLoadFactor) {} //with EnvironmentPassingCombiner[T, ParHashSet[T]]
diff --git a/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala b/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
index c3a379485d..f5c0b10526 100644
--- a/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
+++ b/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
@@ -69,7 +69,8 @@ extends Combiner[T, ParArray[T]] {
class CopyUnrolledToArray(array: Array[Any], offset: Int, howmany: Int)
extends Task[Unit, CopyUnrolledToArray] {
- var result = ();
+ var result = ()
+
def leaf(prev: Option[Unit]) = if (howmany > 0) {
var totalleft = howmany
val (startnode, startpos) = findStart(offset)
diff --git a/src/library/scala/io/ReadStdin.scala b/src/library/scala/io/ReadStdin.scala
index 429d7cec75..e82c26ef7a 100644
--- a/src/library/scala/io/ReadStdin.scala
+++ b/src/library/scala/io/ReadStdin.scala
@@ -218,7 +218,7 @@ private[scala] trait ReadStdin {
case x: java.lang.Float => x.floatValue()
case x: java.lang.Double => x.doubleValue()
case x => x
- }) :: res;
+ }) :: res
i -= 1
}
res
diff --git a/src/library/scala/ref/SoftReference.scala b/src/library/scala/ref/SoftReference.scala
index b414db6e97..e4ce667981 100644
--- a/src/library/scala/ref/SoftReference.scala
+++ b/src/library/scala/ref/SoftReference.scala
@@ -13,7 +13,8 @@ package scala.ref
* @author Sean McDirmid
*/
class SoftReference[+T <: AnyRef](value : T, queue : ReferenceQueue[T]) extends ReferenceWrapper[T] {
- def this(value : T) = this(value, null);
+ def this(value : T) = this(value, null)
+
val underlying: java.lang.ref.SoftReference[_ <: T] =
new SoftReferenceWithWrapper[T](value, queue, this)
}
diff --git a/src/library/scala/reflect/NameTransformer.scala b/src/library/scala/reflect/NameTransformer.scala
index 0beb840bed..8a1cce6b02 100755
--- a/src/library/scala/reflect/NameTransformer.scala
+++ b/src/library/scala/reflect/NameTransformer.scala
@@ -94,7 +94,7 @@ object NameTransformer {
def decode(name0: String): String = {
//System.out.println("decode: " + name);//DEBUG
val name = if (name0.endsWith("<init>")) name0.stripSuffix("<init>") + "this"
- else name0;
+ else name0
var buf: StringBuilder = null
val len = name.length()
var i = 0
diff --git a/src/library/scala/text/Document.scala b/src/library/scala/text/Document.scala
index b74fd152b5..59d5b1bf93 100644
--- a/src/library/scala/text/Document.scala
+++ b/src/library/scala/text/Document.scala
@@ -80,7 +80,7 @@ abstract class Document {
fmt(k, (i + ii, b, d) :: z)
case (i, true, DocBreak) :: z =>
writer write "\n"
- spaces(i);
+ spaces(i)
fmt(i, z)
case (i, false, DocBreak) :: z =>
writer write " "
diff --git a/src/library/scala/util/MurmurHash.scala b/src/library/scala/util/MurmurHash.scala
index a5bc8faf8d..b82259c217 100644
--- a/src/library/scala/util/MurmurHash.scala
+++ b/src/library/scala/util/MurmurHash.scala
@@ -164,7 +164,7 @@ object MurmurHash {
var k = hiddenMagicB
var j = 0
while (j+1 < s.length) {
- val i = (s.charAt(j)<<16) + s.charAt(j+1);
+ val i = (s.charAt(j)<<16) + s.charAt(j+1)
h = extendHash(h,i,c,k)
c = nextMagicA(c)
k = nextMagicB(k)
diff --git a/src/library/scala/util/matching/Regex.scala b/src/library/scala/util/matching/Regex.scala
index 7af75173d3..0cd0cfd7f6 100644
--- a/src/library/scala/util/matching/Regex.scala
+++ b/src/library/scala/util/matching/Regex.scala
@@ -233,7 +233,7 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends
new Iterator[Match] {
def hasNext = matchIterator.hasNext
def next: Match = {
- matchIterator.next;
+ matchIterator.next
new Match(matchIterator.source, matchIterator.matcher, matchIterator.groupNames).force
}
}
diff --git a/src/library/scala/xml/Utility.scala b/src/library/scala/xml/Utility.scala
index 9429e9caa7..f3c162fcc8 100755
--- a/src/library/scala/xml/Utility.scala
+++ b/src/library/scala/xml/Utility.scala
@@ -311,14 +311,14 @@ object Utility extends AnyRef with parsing.TokenTests {
while (i < value.length) {
value.charAt(i) match {
case '<' =>
- return "< not allowed in attribute value";
+ return "< not allowed in attribute value"
case '&' =>
val n = getName(value, i+1)
if (n eq null)
- return "malformed entity reference in attribute value ["+value+"]";
+ return "malformed entity reference in attribute value ["+value+"]"
i = i + n.length + 1
if (i >= value.length || value.charAt(i) != ';')
- return "malformed entity reference in attribute value ["+value+"]";
+ return "malformed entity reference in attribute value ["+value+"]"
case _ =>
}
i = i + 1
diff --git a/src/library/scala/xml/dtd/ContentModelParser.scala b/src/library/scala/xml/dtd/ContentModelParser.scala
index ace02193da..6bc9c05832 100644
--- a/src/library/scala/xml/dtd/ContentModelParser.scala
+++ b/src/library/scala/xml/dtd/ContentModelParser.scala
@@ -21,10 +21,10 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning #
if (token != tok) {
if ((tok == STAR) && (token == END)) // common mistake
scala.sys.error("in DTDs, \n"+
- "mixed content models must be like (#PCDATA|Name|Name|...)*");
+ "mixed content models must be like (#PCDATA|Name|Name|...)*")
else
scala.sys.error("expected "+token2string(tok)+
- ", got unexpected token:"+token2string(token));
+ ", got unexpected token:"+token2string(token))
}
nextToken
}
@@ -44,43 +44,43 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning #
case NAME => value match {
case "ANY" => ANY
case "EMPTY" => EMPTY
- case _ => scala.sys.error("expected ANY, EMPTY or '(' instead of " + value );
+ case _ => scala.sys.error("expected ANY, EMPTY or '(' instead of " + value )
}
case LPAREN =>
- nextToken;
- sOpt;
+ nextToken
+ sOpt
if (token != TOKEN_PCDATA)
- ELEMENTS(regexp);
+ ELEMENTS(regexp)
else {
- nextToken;
+ nextToken
token match {
case RPAREN =>
PCDATA
case CHOICE =>
- val res = MIXED(choiceRest(Eps));
- sOpt;
- accept( RPAREN );
- accept( STAR );
+ val res = MIXED(choiceRest(Eps))
+ sOpt
+ accept( RPAREN )
+ accept( STAR )
res
case _ =>
- scala.sys.error("unexpected token:" + token2string(token) );
+ scala.sys.error("unexpected token:" + token2string(token) )
}
}
case _ =>
- scala.sys.error("unexpected token:" + token2string(token) );
- }
+ scala.sys.error("unexpected token:" + token2string(token) )
+ }
// sopt ::= S?
- def sOpt() = if( token == S ) nextToken;
+ def sOpt() = if( token == S ) nextToken
// (' S? mixed ::= '#PCDATA' S? ')'
// | '#PCDATA' (S? '|' S? atom)* S? ')*'
// '(' S? regexp ::= cp S? [seqRest|choiceRest] ')' [ '+' | '*' | '?' ]
def regexp: RegExp = {
- val p = particle;
- sOpt;
+ val p = particle
+ sOpt
maybeSuffix(token match {
case RPAREN => nextToken; p
case CHOICE => val q = choiceRest( p );accept( RPAREN ); q
@@ -90,24 +90,24 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning #
// seqRest ::= (',' S? cp S?)+
def seqRest(p: RegExp) = {
- var k = List(p);
+ var k = List(p)
while( token == COMMA ) {
- nextToken;
- sOpt;
- k = particle::k;
- sOpt;
+ nextToken
+ sOpt
+ k = particle::k
+ sOpt
}
Sequ( k.reverse:_* )
}
// choiceRest ::= ('|' S? cp S?)+
def choiceRest( p:RegExp ) = {
- var k = List( p );
+ var k = List( p )
while( token == CHOICE ) {
- nextToken;
- sOpt;
- k = particle::k;
- sOpt;
+ nextToken
+ sOpt
+ k = particle::k
+ sOpt
}
Alt( k.reverse:_* )
}
@@ -115,14 +115,14 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning #
// particle ::= '(' S? regexp
// | name [ '+' | '*' | '?' ]
def particle = token match {
- case LPAREN => nextToken; sOpt; regexp;
+ case LPAREN => nextToken; sOpt; regexp
case NAME => val a = Letter(ElemName(value)); nextToken; maybeSuffix(a)
- case _ => scala.sys.error("expected '(' or Name, got:"+token2string(token));
+ case _ => scala.sys.error("expected '(' or Name, got:"+token2string(token))
}
// atom ::= name
def atom = token match {
case NAME => val a = Letter(ElemName(value)); nextToken; a
- case _ => scala.sys.error("expected Name, got:"+token2string(token));
+ case _ => scala.sys.error("expected Name, got:"+token2string(token))
}
}
diff --git a/src/library/scala/xml/dtd/Decl.scala b/src/library/scala/xml/dtd/Decl.scala
index dc4cb93ddf..fd2eaa30ba 100644
--- a/src/library/scala/xml/dtd/Decl.scala
+++ b/src/library/scala/xml/dtd/Decl.scala
@@ -123,7 +123,7 @@ case class ExtDef(extID:ExternalID) extends EntityDef {
/** a parsed entity reference */
case class PEReference(ent:String) extends MarkupDecl {
if( !Utility.isName( ent ))
- throw new IllegalArgumentException("ent must be an XML Name");
+ throw new IllegalArgumentException("ent must be an XML Name")
override def buildString(sb: StringBuilder): StringBuilder =
sb append '%' append ent append ';'
diff --git a/src/library/scala/xml/dtd/Scanner.scala b/src/library/scala/xml/dtd/Scanner.scala
index 9b64cc61e2..d4d648c8df 100644
--- a/src/library/scala/xml/dtd/Scanner.scala
+++ b/src/library/scala/xml/dtd/Scanner.scala
@@ -39,12 +39,12 @@ class Scanner extends Tokens with parsing.TokenTests {
// todo: see XML specification... probably isLetter,isDigit is fine
final def isIdentChar = ( ('a' <= c && c <= 'z')
- || ('A' <= c && c <= 'Z'));
+ || ('A' <= c && c <= 'Z'))
final def next() = if (it.hasNext) c = it.next else c = ENDCH
final def acc(d: Char) {
- if (c == d) next else scala.sys.error("expected '"+d+"' found '"+c+"' !");
+ if (c == d) next else scala.sys.error("expected '"+d+"' found '"+c+"' !")
}
final def accS(ds: Seq[Char]) { ds foreach acc }
@@ -70,7 +70,7 @@ class Scanner extends Tokens with parsing.TokenTests {
final def name = {
val sb = new StringBuilder()
- do { sb.append(c); next } while (isNameChar(c));
+ do { sb.append(c); next } while (isNameChar(c))
value = sb.toString()
NAME
}
diff --git a/src/library/scala/xml/dtd/ValidationException.scala b/src/library/scala/xml/dtd/ValidationException.scala
index 243db69ab7..15640e2da7 100644
--- a/src/library/scala/xml/dtd/ValidationException.scala
+++ b/src/library/scala/xml/dtd/ValidationException.scala
@@ -33,7 +33,7 @@ object MakeValidationException {
def fromMissingAttribute(allKeys: Set[String]) = {
val sb = new StringBuilder("missing value for REQUIRED attribute")
- if (allKeys.size > 1) sb.append('s');
+ if (allKeys.size > 1) sb.append('s')
allKeys foreach (k => sb append "'%s'".format(k))
new ValidationException(sb.toString())
}
diff --git a/src/library/scala/xml/factory/Binder.scala b/src/library/scala/xml/factory/Binder.scala
index bad4a4ea09..b463fda5ba 100755
--- a/src/library/scala/xml/factory/Binder.scala
+++ b/src/library/scala/xml/factory/Binder.scala
@@ -48,7 +48,7 @@ abstract class Binder(val preserveWS: Boolean) extends ValidatingMarkupHandler {
val old = result
result = new NodeBuffer()
for (m <- x.child) traverse(m)
- result = old &+ elem(0, x.prefix, x.label, x.attributes, x.scope, x.minimizeEmpty, NodeSeq.fromSeq(result)).toList;
+ result = old &+ elem(0, x.prefix, x.label, x.attributes, x.scope, x.minimizeEmpty, NodeSeq.fromSeq(result)).toList
elemEnd(0, x.prefix, x.label)
}
diff --git a/src/library/scala/xml/factory/LoggedNodeFactory.scala b/src/library/scala/xml/factory/LoggedNodeFactory.scala
index cac61acc39..49a6d622a7 100644
--- a/src/library/scala/xml/factory/LoggedNodeFactory.scala
+++ b/src/library/scala/xml/factory/LoggedNodeFactory.scala
@@ -46,7 +46,7 @@ trait LoggedNodeFactory[A <: Node] extends NodeFactory[A] with scala.util.loggin
override def makeNode(pre: String, label: String, attrSeq: MetaData,
scope: NamespaceBinding, children: Seq[Node]): A = {
if (logNode)
- log("[makeNode for "+label+"]");
+ log("[makeNode for "+label+"]")
val hash = Utility.hashCode(pre, label, attrSeq.##, scope.##, children)
@@ -59,26 +59,26 @@ trait LoggedNodeFactory[A <: Node] extends NodeFactory[A] with scala.util.loggin
}
*/
if (!cache.get( hash ).isEmpty && (logCompressLevel >= CACHE))
- log("[cache hit !]");
+ log("[cache hit !]")
super.makeNode(pre, label, attrSeq, scope, children)
}
override def makeText(s: String) = {
if (logText)
- log("[makeText:\""+s+"\"]");
+ log("[makeText:\""+s+"\"]")
super.makeText(s)
}
override def makeComment(s: String): Seq[Comment] = {
if (logComment)
- log("[makeComment:\""+s+"\"]");
+ log("[makeComment:\""+s+"\"]")
super.makeComment(s)
}
override def makeProcInstr(t: String, s: String): Seq[ProcInstr] = {
if (logProcInstr)
- log("[makeProcInstr:\""+t+" "+ s+"\"]");
+ log("[makeProcInstr:\""+t+" "+ s+"\"]")
super.makeProcInstr(t, s)
}
diff --git a/src/library/scala/xml/include/sax/XIncludeFilter.scala b/src/library/scala/xml/include/sax/XIncludeFilter.scala
index 103cddcb11..9079b5f9c7 100644
--- a/src/library/scala/xml/include/sax/XIncludeFilter.scala
+++ b/src/library/scala/xml/include/sax/XIncludeFilter.scala
@@ -147,10 +147,10 @@ class XIncludeFilter extends XMLFilterImpl {
if (parse equals "text") {
val encoding = atts getValue "encoding"
- includeTextDocument(href, encoding);
+ includeTextDocument(href, encoding)
}
else if (parse equals "xml") {
- includeXMLDocument(href);
+ includeXMLDocument(href)
}
// Need to check this also in DOM and JDOM????
else {
@@ -184,7 +184,7 @@ class XIncludeFilter extends XMLFilterImpl {
}
}
- private var depth = 0;
+ private var depth = 0
override def startDocument() {
level = 0
@@ -240,7 +240,7 @@ class XIncludeFilter extends XMLFilterImpl {
}
locationString = (" in document included from " + publicID
+ " at " + systemID
- + " at line " + line + ", column " + column);
+ + " at line " + line + ", column " + column)
locationString
}
@@ -258,7 +258,7 @@ class XIncludeFilter extends XMLFilterImpl {
*/
private def includeTextDocument(url: String, encoding1: String) {
var encoding = encoding1
- if (encoding == null || encoding.trim().equals("")) encoding = "UTF-8";
+ if (encoding == null || encoding.trim().equals("")) encoding = "UTF-8"
var source: URL = null
try {
val base = bases.peek().asInstanceOf[URL]
@@ -284,13 +284,13 @@ class XIncludeFilter extends XMLFilterImpl {
// MIME types are case-insensitive
// Java may be picking this up from file URL
if (contentType != null) {
- contentType = contentType.toLowerCase();
+ contentType = contentType.toLowerCase()
if (contentType.equals("text/xml")
|| contentType.equals("application/xml")
|| (contentType.startsWith("text/") && contentType.endsWith("+xml") )
|| (contentType.startsWith("application/") && contentType.endsWith("+xml"))) {
- encoding = EncodingHeuristics.readEncodingFromStream(in);
- }
+ encoding = EncodingHeuristics.readEncodingFromStream(in)
+ }
}
}
val reader = new InputStreamReader(in, encoding)
diff --git a/src/library/scala/xml/include/sax/XIncluder.scala b/src/library/scala/xml/include/sax/XIncluder.scala
index 81c5613541..8fcd66d4c0 100644
--- a/src/library/scala/xml/include/sax/XIncluder.scala
+++ b/src/library/scala/xml/include/sax/XIncluder.scala
@@ -28,7 +28,7 @@ class XIncluder(outs: OutputStream, encoding: String) extends ContentHandler wit
def startDocument() {
try {
out.write("<?xml version='1.0' encoding='"
- + encoding + "'?>\r\n");
+ + encoding + "'?>\r\n")
}
catch {
case e:IOException =>
@@ -52,16 +52,16 @@ class XIncluder(outs: OutputStream, encoding: String) extends ContentHandler wit
def startElement(namespaceURI: String, localName: String, qualifiedName: String, atts: Attributes) = {
try {
- out.write("<" + qualifiedName);
+ out.write("<" + qualifiedName)
var i = 0; while (i < atts.getLength()) {
- out.write(" ");
- out.write(atts.getQName(i));
- out.write("='");
- val value = atts.getValue(i);
+ out.write(" ")
+ out.write(atts.getQName(i))
+ out.write("='")
+ val value = atts.getValue(i)
// @todo Need to use character references if the encoding
// can't support the character
out.write(scala.xml.Utility.escape(value))
- out.write("'");
+ out.write("'")
i += 1
}
out.write(">")
@@ -87,20 +87,20 @@ class XIncluder(outs: OutputStream, encoding: String) extends ContentHandler wit
def characters(ch: Array[Char], start: Int, length: Int) {
try {
var i = 0; while (i < length) {
- val c = ch(start+i);
- if (c == '&') out.write("&amp;");
- else if (c == '<') out.write("&lt;");
+ val c = ch(start+i)
+ if (c == '&') out.write("&amp;")
+ else if (c == '<') out.write("&lt;")
// This next fix is normally not necessary.
// However, it is required if text contains ]]>
// (The end CDATA section delimiter)
- else if (c == '>') out.write("&gt;");
- else out.write(c);
+ else if (c == '>') out.write("&gt;")
+ else out.write(c)
i += 1
}
}
catch {
case e: IOException =>
- throw new SAXException("Write failed", e);
+ throw new SAXException("Write failed", e)
}
}
@@ -138,8 +138,8 @@ class XIncluder(outs: OutputStream, encoding: String) extends ContentHandler wit
// if this is the source document, output a DOCTYPE declaration
if (entities.isEmpty) {
var id = ""
- if (publicID != null) id = " PUBLIC \"" + publicID + "\" \"" + systemID + '"';
- else if (systemID != null) id = " SYSTEM \"" + systemID + '"';
+ if (publicID != null) id = " PUBLIC \"" + publicID + "\" \"" + systemID + '"'
+ else if (systemID != null) id = " SYSTEM \"" + systemID + '"'
try {
out.write("<!DOCTYPE " + name + id + ">\r\n")
}
diff --git a/src/library/scala/xml/parsing/MarkupParser.scala b/src/library/scala/xml/parsing/MarkupParser.scala
index 6b8f58dca3..228043e183 100755
--- a/src/library/scala/xml/parsing/MarkupParser.scala
+++ b/src/library/scala/xml/parsing/MarkupParser.scala
@@ -105,7 +105,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
lastChRead = curInput.next
pos = curInput.pos
} else {
- val ilen = inpStack.length;
+ val ilen = inpStack.length
//Console.println(" ilen = "+ilen+ " extIndex = "+extIndex);
if ((ilen != extIndex) && (ilen > 0)) {
/** for external source, inpStack == Nil ! need notify of eof! */
@@ -141,7 +141,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
xSpace
val (md,scp) = xAttributes(TopScope)
if (scp != TopScope)
- reportSyntaxError("no xmlns definitions here, please.");
+ reportSyntaxError("no xmlns definitions here, please.")
xToken('?')
xToken('>')
md
@@ -247,7 +247,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
case _:ProcInstr =>
case _:Comment =>
case _:EntityRef => // todo: fix entities, shouldn't be "special"
- reportSyntaxError("no entity references allowed here");
+ reportSyntaxError("no entity references allowed here")
case s:SpecialNode =>
if (s.toString.trim().length > 0) //non-empty text nodes not allowed
elemCount += 2
@@ -328,7 +328,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
}
if(!aMap.wellformed(scope))
- reportSyntaxError( "double attribute");
+ reportSyntaxError( "double attribute")
(aMap,scope)
}
@@ -389,10 +389,10 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
/* todo: move this into the NodeBuilder class */
def appendText(pos: Int, ts: NodeBuffer, txt: String): Unit = {
if (preserveWS)
- ts &+ handle.text(pos, txt);
+ ts &+ handle.text(pos, txt)
else
for (t <- TextBuffer.fromString(txt).toText) {
- ts &+ handle.text(pos, t.text);
+ ts &+ handle.text(pos, t.text)
}
}
@@ -446,7 +446,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
case '#' => // CharacterRef
nextch
val theChar = handle.text(tmppos, xCharRef(() => ch, () => nextch))
- xToken(';');
+ xToken(';')
ts &+ theChar
case _ => // EntityRef
val n = xName
@@ -597,7 +597,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
def systemLiteral(): String = {
val endch = ch
if (ch != '\'' && ch != '"')
- reportSyntaxError("quote ' or \" expected");
+ reportSyntaxError("quote ' or \" expected")
nextch
while (ch != endch && !eof) {
putChar(ch)
@@ -615,7 +615,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
def pubidLiteral(): String = {
val endch = ch
if (ch!='\'' && ch != '"')
- reportSyntaxError("quote ' or \" expected");
+ reportSyntaxError("quote ' or \" expected")
nextch
while (ch != endch && !eof) {
putChar(ch)
@@ -889,10 +889,10 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
val sysID = if (ch != '>')
systemLiteral()
else
- null;
+ null
new PublicID(pubID, sysID)
} else {
- reportSyntaxError("PUBLIC or SYSTEM expected");
+ reportSyntaxError("PUBLIC or SYSTEM expected")
scala.sys.error("died parsing notationdecl")
}
xSpaceOpt
diff --git a/src/library/scala/xml/parsing/ValidatingMarkupHandler.scala b/src/library/scala/xml/parsing/ValidatingMarkupHandler.scala
index 0edea043a5..018ae4d2cd 100644
--- a/src/library/scala/xml/parsing/ValidatingMarkupHandler.scala
+++ b/src/library/scala/xml/parsing/ValidatingMarkupHandler.scala
@@ -50,8 +50,8 @@ abstract class ValidatingMarkupHandler extends MarkupHandler with Logged {
log("advanceDFA(trans): " + trans)
trans.get(ContentModel.ElemName(label)) match {
case Some(qNew) => qCurrent = qNew
- case _ => reportValidationError(pos, "DTD says, wrong element, expected one of "+trans.keys);
- }
+ case _ => reportValidationError(pos, "DTD says, wrong element, expected one of "+trans.keys)
+ }
}
// advance in current automaton
log("[qCurrent = "+qCurrent+" visiting "+label+"]")
@@ -106,7 +106,7 @@ abstract class ValidatingMarkupHandler extends MarkupHandler with Logged {
}
final override def notationDecl(notat: String, extID: ExternalID) {
- decls = NotationDecl(notat, extID) :: decls;
+ decls = NotationDecl(notat, extID) :: decls
}
final override def peReference(name: String) {
diff --git a/src/library/scala/xml/transform/BasicTransformer.scala b/src/library/scala/xml/transform/BasicTransformer.scala
index 1402ccd6aa..e427071177 100644
--- a/src/library/scala/xml/transform/BasicTransformer.scala
+++ b/src/library/scala/xml/transform/BasicTransformer.scala
@@ -53,7 +53,7 @@ abstract class BasicTransformer extends Function1[Node,Node]
def apply(n: Node): Node = {
val seq = transform(n)
if (seq.length > 1)
- throw new UnsupportedOperationException("transform must return single node for root");
+ throw new UnsupportedOperationException("transform must return single node for root")
else seq.head
}
}
diff --git a/src/reflect/scala/reflect/internal/Constants.scala b/src/reflect/scala/reflect/internal/Constants.scala
index 28bc3e1dd0..5ed2f675b2 100644
--- a/src/reflect/scala/reflect/internal/Constants.scala
+++ b/src/reflect/scala/reflect/internal/Constants.scala
@@ -94,7 +94,7 @@ trait Constants extends api.Constants {
def booleanValue: Boolean =
if (tag == BooleanTag) value.asInstanceOf[Boolean]
- else throw new Error("value " + value + " is not a boolean");
+ else throw new Error("value " + value + " is not a boolean")
def byteValue: Byte = tag match {
case ByteTag => value.asInstanceOf[Byte]
diff --git a/src/reflect/scala/reflect/internal/InfoTransformers.scala b/src/reflect/scala/reflect/internal/InfoTransformers.scala
index 82904b0b68..4e84a29fd0 100644
--- a/src/reflect/scala/reflect/internal/InfoTransformers.scala
+++ b/src/reflect/scala/reflect/internal/InfoTransformers.scala
@@ -43,7 +43,7 @@ trait InfoTransformers {
if (from == this.pid) this
else if (from < this.pid)
if (prev.pid < from) this
- else prev.nextFrom(from);
+ else prev.nextFrom(from)
else if (next.pid == NoPhase.id) next
else next.nextFrom(from)
}
diff --git a/src/reflect/scala/reflect/internal/Kinds.scala b/src/reflect/scala/reflect/internal/Kinds.scala
index 5fecc06128..5d7df8c367 100644
--- a/src/reflect/scala/reflect/internal/Kinds.scala
+++ b/src/reflect/scala/reflect/internal/Kinds.scala
@@ -36,7 +36,7 @@ trait Kinds {
private def varStr(s: Symbol): String =
if (s.isCovariant) "covariant"
else if (s.isContravariant) "contravariant"
- else "invariant";
+ else "invariant"
private def qualify(a0: Symbol, b0: Symbol): String = if (a0.toString != b0.toString) "" else {
if((a0 eq b0) || (a0.owner eq b0.owner)) ""
diff --git a/src/reflect/scala/reflect/internal/Names.scala b/src/reflect/scala/reflect/internal/Names.scala
index b60d1e619f..8b64bf7a32 100644
--- a/src/reflect/scala/reflect/internal/Names.scala
+++ b/src/reflect/scala/reflect/internal/Names.scala
@@ -34,7 +34,7 @@ trait Names extends api.Names {
cs(offset) * (41 * 41) +
cs(offset + len - 1) * 41 +
cs(offset + (len >> 1)))
- else 0;
+ else 0
/** Is (the ASCII representation of) name at given index equal to
* cs[offset..offset+len-1]?
@@ -42,7 +42,7 @@ trait Names extends api.Names {
private def equals(index: Int, cs: Array[Char], offset: Int, len: Int): Boolean = {
var i = 0
while ((i < len) && (chrs(index + i) == cs(offset + i)))
- i += 1;
+ i += 1
i == len
}
@@ -275,7 +275,7 @@ trait Names extends api.Names {
var i = 0
while (i < prefix.length && start + i < len &&
chrs(index + start + i) == chrs(prefix.start + i))
- i += 1;
+ i += 1
i == prefix.length
}
@@ -287,7 +287,7 @@ trait Names extends api.Names {
var i = 1
while (i <= suffix.length && i <= end &&
chrs(index + end - i) == chrs(suffix.start + suffix.length - i))
- i += 1;
+ i += 1
i > suffix.length
}
diff --git a/src/reflect/scala/reflect/internal/Printers.scala b/src/reflect/scala/reflect/internal/Printers.scala
index a745a78a4f..9e72fb9145 100644
--- a/src/reflect/scala/reflect/internal/Printers.scala
+++ b/src/reflect/scala/reflect/internal/Printers.scala
@@ -208,7 +208,7 @@ trait Printers extends api.Printers { self: SymbolTable =>
case ModuleDef(mods, name, impl) =>
printAnnotations(tree)
- printModifiers(tree, mods);
+ printModifiers(tree, mods)
print("object " + symName(tree, name), " extends ", impl)
case ValDef(mods, name, tp, rhs) =>
@@ -423,7 +423,7 @@ trait Printers extends api.Printers { self: SymbolTable =>
printOpt(" >: ", lo); printOpt(" <: ", hi)
case ExistentialTypeTree(tpt, whereClauses) =>
- print(tpt);
+ print(tpt)
printColumn(whereClauses, " forSome { ", ";", "}")
// SelectFromArray is no longer visible in reflect.internal.
diff --git a/src/reflect/scala/reflect/internal/Scopes.scala b/src/reflect/scala/reflect/internal/Scopes.scala
index b1cfaa4774..850c497d4b 100644
--- a/src/reflect/scala/reflect/internal/Scopes.scala
+++ b/src/reflect/scala/reflect/internal/Scopes.scala
@@ -188,7 +188,7 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
if (e1 == e) {
hashtable(index) = e.tail
} else {
- while (e1.tail != e) e1 = e1.tail;
+ while (e1.tail != e) e1 = e1.tail
e1.tail = e.tail
}
}
@@ -199,7 +199,7 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
def unlink(sym: Symbol) {
var e = lookupEntry(sym.name)
while (e ne null) {
- if (e.sym == sym) unlink(e);
+ if (e.sym == sym) unlink(e)
e = lookupNextEntry(e)
}
}
@@ -300,7 +300,7 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
if (hashtable ne null)
do { e = e.tail } while ((e ne null) && e.sym.name != entry.sym.name)
else
- do { e = e.next } while ((e ne null) && e.sym.name != entry.sym.name);
+ do { e = e.next } while ((e ne null) && e.sym.name != entry.sym.name)
e
}
diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala
index b3a398a8d7..9b5778b9da 100644
--- a/src/reflect/scala/reflect/internal/SymbolTable.scala
+++ b/src/reflect/scala/reflect/internal/SymbolTable.scala
@@ -222,7 +222,7 @@ abstract class SymbolTable extends macros.Universe
def noChangeInBaseClasses(it: InfoTransformer, limit: Phase#Id): Boolean = (
it.pid >= limit ||
!it.changesBaseClasses && noChangeInBaseClasses(it.next, limit)
- );
+ )
period != 0 && runId(period) == currentRunId && {
val pid = phaseId(period)
if (phase.id > pid) noChangeInBaseClasses(infoTransformers.nextFrom(pid), phase.id)
diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala
index 408c7c648f..ce33fd8408 100644
--- a/src/reflect/scala/reflect/internal/Trees.scala
+++ b/src/reflect/scala/reflect/internal/Trees.scala
@@ -522,7 +522,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
case t => t
}
- orig = followOriginal(tree); setPos(tree.pos);
+ orig = followOriginal(tree); setPos(tree.pos)
this
}
@@ -1425,7 +1425,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
def subst(from: List[Symbol], to: List[Tree]): Tree =
if (from.isEmpty) tree
else if (tree.symbol == from.head) to.head.shallowDuplicate // TODO: does it ever make sense *not* to perform a shallowDuplicate on `to.head`?
- else subst(from.tail, to.tail);
+ else subst(from.tail, to.tail)
subst(from, to)
case _ =>
super.transform(tree)
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index 5cb6f78874..09f78d1d5b 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -908,7 +908,7 @@ trait Types extends api.Types { self: SymbolTable =>
(this eq that) ||
(if (explainSwitch) explain("=", isSameType, this, that)
else isSameType(this, that))
- );
+ )
/** Is this type close enough to that type so that members
* with the two type would override each other?
@@ -1488,7 +1488,7 @@ trait Types extends api.Types { self: SymbolTable =>
tpe.underlyingPeriod = currentPeriod
if (!isValid(period)) {
// [Eugene to Paul] needs review
- tpe.underlyingCache = if (tpe.sym == NoSymbol) ThisType(rootMirror.RootClass) else tpe.pre.memberType(tpe.sym).resultType;
+ tpe.underlyingCache = if (tpe.sym == NoSymbol) ThisType(rootMirror.RootClass) else tpe.pre.memberType(tpe.sym).resultType
assert(tpe.underlyingCache ne tpe, tpe)
}
}
@@ -1500,7 +1500,8 @@ trait Types extends api.Types { self: SymbolTable =>
if (trivial == UNKNOWN) trivial = fromBoolean(thistpe.isTrivial && supertpe.isTrivial)
toBoolean(trivial)
}
- override def isNotNull = true;
+ override def isNotNull = true
+
override def typeSymbol = thistpe.typeSymbol
override def underlying = supertpe
override def prefix: Type = supertpe.prefix
@@ -1637,8 +1638,8 @@ trait Types extends api.Types { self: SymbolTable =>
var bcs = sbcs
def isNew(clazz: Symbol): Boolean = (
superclazz.baseTypeIndex(clazz) < 0 &&
- { var p = bcs;
- while ((p ne sbcs) && (p.head != clazz)) p = p.tail;
+ { var p = bcs
+ while ((p ne sbcs) && (p.head != clazz)) p = p.tail
p eq sbcs
}
)
@@ -2874,7 +2875,8 @@ trait Types extends api.Types { self: SymbolTable =>
*/
case class AntiPolyType(pre: Type, targs: List[Type]) extends Type {
override def safeToString =
- pre.toString + targs.mkString("(with type arguments ", ", ", ")");
+ pre.toString + targs.mkString("(with type arguments ", ", ", ")")
+
override def memberType(sym: Symbol) = appliedType(pre.memberType(sym), targs)
override def kind = "AntiPolyType"
}
@@ -4976,7 +4978,7 @@ trait Types extends api.Types { self: SymbolTable =>
sym1.name == sym2.name && (sym1.isPackageClass || corresponds(sym1.owner, sym2.owner))
if (!corresponds(sym.owner, rebind0.owner)) {
debuglog("ADAPT1 pre = "+pre+", sym = "+sym.fullLocationString+", rebind = "+rebind0.fullLocationString)
- val bcs = pre.baseClasses.dropWhile(bc => !corresponds(bc, sym.owner));
+ val bcs = pre.baseClasses.dropWhile(bc => !corresponds(bc, sym.owner))
if (bcs.isEmpty)
assert(pre.typeSymbol.isRefinementClass, pre) // if pre is a refinementclass it might be a structural type => OK to leave it in.
else
@@ -6569,7 +6571,7 @@ trait Types extends api.Types { self: SymbolTable =>
}
}
def refines(tp: Type, sym: Symbol): Boolean = {
- val syms = tp.nonPrivateMember(sym.name).alternatives;
+ val syms = tp.nonPrivateMember(sym.name).alternatives
!syms.isEmpty && (syms forall (alt =>
// todo alt != sym is strictly speaking not correct, but without it we lose
// efficiency.
@@ -6708,8 +6710,8 @@ trait Types extends api.Types { self: SymbolTable =>
def glbsym(proto: Symbol): Symbol = {
val prototp = glbThisType.memberInfo(proto)
val syms = for (t <- ts;
- alt <- (t.nonPrivateMember(proto.name).alternatives);
- if glbThisType.memberInfo(alt) matches prototp
+ alt <- (t.nonPrivateMember(proto.name).alternatives)
+ if glbThisType.memberInfo(alt) matches prototp
) yield alt
val symtypes = syms map glbThisType.memberInfo
assert(!symtypes.isEmpty)
@@ -6891,7 +6893,7 @@ trait Types extends api.Types { self: SymbolTable =>
if (sym.isTerm)
for (alt <- tp.nonPrivateDecl(sym.name).alternatives)
if (specializesSym(thistp, sym, thistp, alt, depth))
- tp.decls unlink alt;
+ tp.decls unlink alt
tp.decls enter sym
}
}
diff --git a/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala b/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala
index 34c6fe234c..c9dfb7fe71 100644
--- a/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala
+++ b/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala
@@ -106,7 +106,7 @@ class PickleBuffer(data: Array[Byte], from: Int, to: Int) {
do {
b = readByte()
x = (x << 7) + (b & 0x7f)
- } while ((b & 0x80) != 0L);
+ } while ((b & 0x80) != 0L)
x
}
@@ -150,7 +150,7 @@ class PickleBuffer(data: Array[Byte], from: Int, to: Int) {
* Concatenate results into a list.
*/
def until[T](end: Int, op: () => T): List[T] =
- if (readIndex == end) List() else op() :: until(end, op);
+ if (readIndex == end) List() else op() :: until(end, op)
/** Perform operation `op` the number of
* times specified. Concatenate the results into a list.
diff --git a/src/reflect/scala/reflect/io/VirtualDirectory.scala b/src/reflect/scala/reflect/io/VirtualDirectory.scala
index 589076d693..09b99087e6 100644
--- a/src/reflect/scala/reflect/io/VirtualDirectory.scala
+++ b/src/reflect/scala/reflect/io/VirtualDirectory.scala
@@ -68,6 +68,6 @@ extends AbstractFile {
}
def clear() {
- files.clear();
+ files.clear()
}
}
diff --git a/src/reflect/scala/reflect/io/VirtualFile.scala b/src/reflect/scala/reflect/io/VirtualFile.scala
index 0dfa7d5473..6f98b8385b 100644
--- a/src/reflect/scala/reflect/io/VirtualFile.scala
+++ b/src/reflect/scala/reflect/io/VirtualFile.scala
@@ -41,7 +41,7 @@ class VirtualFile(val name: String, override val path: String) extends AbstractF
override def sizeOption: Option[Int] = Some(content.size)
- def input : InputStream = new ByteArrayInputStream(content);
+ def input : InputStream = new ByteArrayInputStream(content)
override def output: OutputStream = {
new ByteArrayOutputStream() {