summaryrefslogtreecommitdiff
path: root/src/library
diff options
context:
space:
mode:
authorPaul Phillips <paulp@improving.org>2012-11-03 06:29:38 -0700
committerPaul Phillips <paulp@improving.org>2012-11-03 07:01:15 -0700
commit9c09c170998f74fba03990977b285e3121db32a6 (patch)
treeee12a85233e6927e5ec3698fc83f26fb46ad43e6 /src/library
parentd3da3ef83293c0e174e07aba643b3a1f46c110c5 (diff)
downloadscala-9c09c170998f74fba03990977b285e3121db32a6.tar.gz
scala-9c09c170998f74fba03990977b285e3121db32a6.tar.bz2
scala-9c09c170998f74fba03990977b285e3121db32a6.zip
Removing unused locals and making vars into vals.
According to "git diff" the difference from master to this commit includes: Minus: 112 vals, 135 vars Plus: 165 vals, 2 vars Assuming all the removed ones were vals, which is true from 10K feet, it suggests I removed 80 unused vals and turned 133 vars into vals. There are a few other -Xlint driven improvements bundled with this, like putting double-parentheses around Some((x, y)) so it doesn't trigger the "adapting argument list" warning.
Diffstat (limited to 'src/library')
-rw-r--r--src/library/scala/collection/SeqLike.scala10
-rw-r--r--src/library/scala/collection/concurrent/TrieMap.scala2
-rw-r--r--src/library/scala/collection/immutable/HashMap.scala3
-rw-r--r--src/library/scala/collection/immutable/TrieIterator.scala1
-rw-r--r--src/library/scala/collection/immutable/Vector.scala16
-rw-r--r--src/library/scala/collection/mutable/FlatHashTable.scala2
-rw-r--r--src/library/scala/collection/mutable/ListBuffer.scala1
-rw-r--r--src/library/scala/collection/parallel/Tasks.scala2
-rw-r--r--src/library/scala/collection/parallel/mutable/ParArray.scala8
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashMap.scala2
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashSet.scala4
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashTable.scala2
-rw-r--r--src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala2
-rw-r--r--src/library/scala/collection/script/Message.scala2
-rw-r--r--src/library/scala/util/automata/WordBerrySethi.scala1
-rw-r--r--src/library/scala/util/parsing/input/OffsetPosition.scala2
-rwxr-xr-xsrc/library/scala/xml/PrettyPrinter.scala1
-rw-r--r--src/library/scala/xml/dtd/ElementValidator.scala2
-rw-r--r--src/library/scala/xml/include/sax/XIncludeFilter.scala2
-rwxr-xr-xsrc/library/scala/xml/parsing/MarkupParser.scala10
20 files changed, 29 insertions, 46 deletions
diff --git a/src/library/scala/collection/SeqLike.scala b/src/library/scala/collection/SeqLike.scala
index a1749a480b..33b6ab4165 100644
--- a/src/library/scala/collection/SeqLike.scala
+++ b/src/library/scala/collection/SeqLike.scala
@@ -103,7 +103,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
def segmentLength(p: A => Boolean, from: Int): Int = {
var i = 0
- var it = iterator.drop(from)
+ val it = iterator.drop(from)
while (it.hasNext && p(it.next()))
i += 1
i
@@ -111,7 +111,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
def indexWhere(p: A => Boolean, from: Int): Int = {
var i = from
- var it = iterator.drop(from)
+ val it = iterator.drop(from)
while (it.hasNext) {
if (p(it.next())) return i
else i += 1
@@ -177,10 +177,10 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
result
}
private def swap(i: Int, j: Int) {
- var tmpI = idxs(i)
+ val tmpI = idxs(i)
idxs(i) = idxs(j)
idxs(j) = tmpI
- var tmpE = elms(i)
+ val tmpE = elms(i)
elms(i) = elms(j)
elms(j) = tmpE
}
@@ -777,7 +777,7 @@ object SeqLike {
val iter = S.iterator.drop(m0)
val Wopt = kmpOptimizeWord(W, n0, n1, true)
val T = kmpJumpTable(Wopt, n1-n0)
- var cache = new Array[AnyRef](n1-n0) // Ring buffer--need a quick way to do a look-behind
+ val cache = new Array[AnyRef](n1-n0) // Ring buffer--need a quick way to do a look-behind
var largest = 0
var i, m = 0
var answer = -1
diff --git a/src/library/scala/collection/concurrent/TrieMap.scala b/src/library/scala/collection/concurrent/TrieMap.scala
index b0736ecace..231f8157e4 100644
--- a/src/library/scala/collection/concurrent/TrieMap.scala
+++ b/src/library/scala/collection/concurrent/TrieMap.scala
@@ -545,7 +545,7 @@ private[collection] final class CNode[K, V](val bitmap: Int, val array: Array[Ba
// removed (those existing when the op began)
// - if there are only null-i-nodes below, returns null
def toCompressed(ct: TrieMap[K, V], lev: Int, gen: Gen) = {
- var bmp = bitmap
+ val bmp = bitmap
var i = 0
val arr = array
val tmparray = new Array[BasicNode](arr.length)
diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala
index ee41e2aa3c..9b6183c0a4 100644
--- a/src/library/scala/collection/immutable/HashMap.scala
+++ b/src/library/scala/collection/immutable/HashMap.scala
@@ -471,9 +471,6 @@ time { mNew.iterator.foreach( p => ()) }
// condition below is due to 2 things:
// 1) no unsigned int compare on JVM
// 2) 0 (no lsb) should always be greater in comparison
- val a = thislsb - 1
- val b = thatlsb - 1
-
if (unsignedCompare(thislsb - 1, thatlsb - 1)) {
val m = thiselems(thisi)
totalelems += m.size
diff --git a/src/library/scala/collection/immutable/TrieIterator.scala b/src/library/scala/collection/immutable/TrieIterator.scala
index e8e904f1f9..3f1c5ea57a 100644
--- a/src/library/scala/collection/immutable/TrieIterator.scala
+++ b/src/library/scala/collection/immutable/TrieIterator.scala
@@ -177,7 +177,6 @@ private[collection] abstract class TrieIterator[+T](elems: Array[Iterable[T]]) e
if (depth > 0) {
// 2) topmost comes before (is not) arrayD
// steal a portion of top to create a new iterator
- val topmost = arrayStack(0)
if (posStack(0) == arrayStack(0).length - 1) {
// 2a) only a single entry left on top
// this means we have to modify this iterator - pop topmost
diff --git a/src/library/scala/collection/immutable/Vector.scala b/src/library/scala/collection/immutable/Vector.scala
index 1f90436636..7e1f3eadd0 100644
--- a/src/library/scala/collection/immutable/Vector.scala
+++ b/src/library/scala/collection/immutable/Vector.scala
@@ -242,8 +242,8 @@ override def companion: GenericCompanion[Vector] = Vector
private[immutable] def appendFront[B>:A](value: B): Vector[B] = {
if (endIndex != startIndex) {
- var blockIndex = (startIndex - 1) & ~31
- var lo = (startIndex - 1) & 31
+ val blockIndex = (startIndex - 1) & ~31
+ val lo = (startIndex - 1) & 31
if (startIndex != blockIndex + 32) {
val s = new Vector(startIndex - 1, endIndex, blockIndex)
@@ -339,8 +339,8 @@ override def companion: GenericCompanion[Vector] = Vector
// //println("------- append " + value)
// debug()
if (endIndex != startIndex) {
- var blockIndex = endIndex & ~31
- var lo = endIndex & 31
+ val blockIndex = endIndex & ~31
+ val lo = endIndex & 31
if (endIndex != blockIndex) {
//println("will make writable block (from "+focus+") at: " + blockIndex)
@@ -574,9 +574,7 @@ override def companion: GenericCompanion[Vector] = Vector
}
private def dropFront0(cutIndex: Int): Vector[A] = {
- var blockIndex = cutIndex & ~31
- var lo = cutIndex & 31
-
+ val blockIndex = cutIndex & ~31
val xor = cutIndex ^ (endIndex - 1)
val d = requiredDepth(xor)
val shift = (cutIndex & ~((1 << (5*d))-1))
@@ -606,9 +604,7 @@ override def companion: GenericCompanion[Vector] = Vector
}
private def dropBack0(cutIndex: Int): Vector[A] = {
- var blockIndex = (cutIndex - 1) & ~31
- var lo = ((cutIndex - 1) & 31) + 1
-
+ val blockIndex = (cutIndex - 1) & ~31
val xor = startIndex ^ (cutIndex - 1)
val d = requiredDepth(xor)
val shift = (startIndex & ~((1 << (5*d))-1))
diff --git a/src/library/scala/collection/mutable/FlatHashTable.scala b/src/library/scala/collection/mutable/FlatHashTable.scala
index 74f576b0f7..f1301d2011 100644
--- a/src/library/scala/collection/mutable/FlatHashTable.scala
+++ b/src/library/scala/collection/mutable/FlatHashTable.scala
@@ -266,7 +266,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
val totalbuckets = totalSizeMapBuckets
var bucketidx = 0
var tableidx = 0
- var tbl = table
+ val tbl = table
var tableuntil = sizeMapBucketSize min tbl.length
while (bucketidx < totalbuckets) {
var currbucketsz = 0
diff --git a/src/library/scala/collection/mutable/ListBuffer.scala b/src/library/scala/collection/mutable/ListBuffer.scala
index bced92e663..0a61b537ce 100644
--- a/src/library/scala/collection/mutable/ListBuffer.scala
+++ b/src/library/scala/collection/mutable/ListBuffer.scala
@@ -269,7 +269,6 @@ final class ListBuffer[A]
if (exported) copy()
val n1 = n max 0
val count1 = count min (len - n1)
- var old = start.head
if (n1 == 0) {
var c = count1
while (c > 0) {
diff --git a/src/library/scala/collection/parallel/Tasks.scala b/src/library/scala/collection/parallel/Tasks.scala
index d6b75202da..af32faf0aa 100644
--- a/src/library/scala/collection/parallel/Tasks.scala
+++ b/src/library/scala/collection/parallel/Tasks.scala
@@ -66,7 +66,6 @@ trait Task[R, +Tp] {
private[parallel] def tryMerge(t: Tp @uncheckedVariance) {
val that = t.asInstanceOf[Task[R, Tp]]
- val local = result // ensure that any effects of modifying `result` are detected
if (this.throwable == null && that.throwable == null) merge(t)
mergeThrowables(that)
}
@@ -167,7 +166,6 @@ trait AdaptiveWorkStealingTasks extends Tasks {
while (last.next != null) {
// val lastresult = Option(last.body.result)
- val beforelast = last
last = last.next
if (last.tryCancel()) {
// println("Done with " + beforelast.body + ", next direct is " + last.body)
diff --git a/src/library/scala/collection/parallel/mutable/ParArray.scala b/src/library/scala/collection/parallel/mutable/ParArray.scala
index 5ac2725f11..7527c9a71a 100644
--- a/src/library/scala/collection/parallel/mutable/ParArray.scala
+++ b/src/library/scala/collection/parallel/mutable/ParArray.scala
@@ -184,7 +184,7 @@ self =>
override def aggregate[S](z: =>S)(seqop: (S, T) => S, combop: (S, S) => S): S = foldLeft[S](z)(seqop)
override def sum[U >: T](implicit num: Numeric[U]): U = {
- var s = sum_quick(num, arr, until, i, num.zero)
+ val s = sum_quick(num, arr, until, i, num.zero)
i = until
s
}
@@ -200,7 +200,7 @@ self =>
}
override def product[U >: T](implicit num: Numeric[U]): U = {
- var p = product_quick(num, arr, until, i, num.one)
+ val p = product_quick(num, arr, until, i, num.one)
i = until
p
}
@@ -432,7 +432,7 @@ self =>
private def filter2combiner_quick[U >: T, This](pred: T => Boolean, cb: Builder[U, This], a: Array[Any], ntil: Int, from: Int) {
var j = i
while(j < ntil) {
- var curr = a(j).asInstanceOf[T]
+ val curr = a(j).asInstanceOf[T]
if (pred(curr)) cb += curr
j += 1
}
@@ -447,7 +447,7 @@ self =>
private def filterNot2combiner_quick[U >: T, This](pred: T => Boolean, cb: Builder[U, This], a: Array[Any], ntil: Int, from: Int) {
var j = i
while(j < ntil) {
- var curr = a(j).asInstanceOf[T]
+ val curr = a(j).asInstanceOf[T]
if (!pred(curr)) cb += curr
j += 1
}
diff --git a/src/library/scala/collection/parallel/mutable/ParHashMap.scala b/src/library/scala/collection/parallel/mutable/ParHashMap.scala
index 3b2c66763e..d8f846dd10 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashMap.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashMap.scala
@@ -231,7 +231,7 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau
def setSize(sz: Int) = tableSize = sz
def insertEntry(/*block: Int, */e: DefaultEntry[K, V]) = {
var h = index(elemHashCode(e.key))
- var olde = table(h).asInstanceOf[DefaultEntry[K, V]]
+ val olde = table(h).asInstanceOf[DefaultEntry[K, V]]
// check if key already exists
var ce = olde
diff --git a/src/library/scala/collection/parallel/mutable/ParHashSet.scala b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
index 22f22c8305..cbfb09bfdd 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashSet.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
@@ -263,12 +263,12 @@ with scala.collection.mutable.FlatHashTable.HashUtils[T] {
(elemsIn + leftoversIn, elemsLeft concat leftoversLeft)
}
private def insertAll(atPos: Int, beforePos: Int, elems: UnrolledBuffer[Any]): (Int, UnrolledBuffer[Any]) = {
- var leftovers = new UnrolledBuffer[Any]
+ val leftovers = new UnrolledBuffer[Any]
var inserted = 0
var unrolled = elems.headPtr
var i = 0
- var t = table
+ val t = table
while (unrolled ne null) {
val chunkarr = unrolled.array
val chunksz = unrolled.size
diff --git a/src/library/scala/collection/parallel/mutable/ParHashTable.scala b/src/library/scala/collection/parallel/mutable/ParHashTable.scala
index bb9a7b7823..b203ef8e5d 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashTable.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashTable.scala
@@ -110,7 +110,7 @@ trait ParHashTable[K, Entry >: Null <: HashEntry[K, Entry]] extends scala.collec
} else Seq(this.asInstanceOf[IterRepr])
private def convertToArrayBuffer(chainhead: Entry): mutable.ArrayBuffer[T] = {
- var buff = mutable.ArrayBuffer[Entry]()
+ val buff = mutable.ArrayBuffer[Entry]()
var curr = chainhead
while (curr ne null) {
buff += curr
diff --git a/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala b/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala
index 68f37137f8..f78de073d6 100644
--- a/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala
+++ b/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala
@@ -26,7 +26,7 @@ trait ResizableParArrayCombiner[T] extends LazyCombiner[T, ParArray[T], ExposedA
override def sizeHint(sz: Int) = if (chain.length == 1) chain(0).sizeHint(sz)
// public method with private[mutable] type ExposedArrayBuffer in parameter type; cannot be overridden.
- def newLazyCombiner(c: ArrayBuffer[ExposedArrayBuffer[T]]) = ResizableParArrayCombiner(c)
+ final def newLazyCombiner(c: ArrayBuffer[ExposedArrayBuffer[T]]) = ResizableParArrayCombiner(c)
def allocateAndCopy = if (chain.size > 1) {
val arrayseq = new ArraySeq[T](size)
diff --git a/src/library/scala/collection/script/Message.scala b/src/library/scala/collection/script/Message.scala
index 7466f2ac3e..43cb8a6138 100644
--- a/src/library/scala/collection/script/Message.scala
+++ b/src/library/scala/collection/script/Message.scala
@@ -69,7 +69,7 @@ class Script[A] extends ArrayBuffer[Message[A]] with Message[A] {
override def toString(): String = {
var res = "Script("
- var it = this.iterator
+ val it = this.iterator
var i = 1
while (it.hasNext) {
if (i > 1)
diff --git a/src/library/scala/util/automata/WordBerrySethi.scala b/src/library/scala/util/automata/WordBerrySethi.scala
index b648d179c6..07d5d2ff08 100644
--- a/src/library/scala/util/automata/WordBerrySethi.scala
+++ b/src/library/scala/util/automata/WordBerrySethi.scala
@@ -140,7 +140,6 @@ abstract class WordBerrySethi extends BaseBerrySethi {
val delta1 = immutable.Map(deltaq.zipWithIndex map (_.swap): _*)
val finalsArr = (0 until pos map (k => finals.getOrElse(k, 0))).toArray // 0 == not final
- val initialsArr = initials.toArray
val deltaArr: Array[mutable.Map[_labelT, immutable.BitSet]] =
(0 until pos map { x =>
diff --git a/src/library/scala/util/parsing/input/OffsetPosition.scala b/src/library/scala/util/parsing/input/OffsetPosition.scala
index 3366584ab2..115741b9e9 100644
--- a/src/library/scala/util/parsing/input/OffsetPosition.scala
+++ b/src/library/scala/util/parsing/input/OffsetPosition.scala
@@ -22,7 +22,7 @@ case class OffsetPosition(source: java.lang.CharSequence, offset: Int) extends P
/** An index that contains all line starts, including first line, and eof. */
private lazy val index: Array[Int] = {
- var lineStarts = new ArrayBuffer[Int]
+ val lineStarts = new ArrayBuffer[Int]
lineStarts += 0
for (i <- 0 until source.length)
if (source.charAt(i) == '\n') lineStarts += (i + 1)
diff --git a/src/library/scala/xml/PrettyPrinter.scala b/src/library/scala/xml/PrettyPrinter.scala
index da82aca33a..8c0a101c2a 100755
--- a/src/library/scala/xml/PrettyPrinter.scala
+++ b/src/library/scala/xml/PrettyPrinter.scala
@@ -47,7 +47,6 @@ class PrettyPrinter(width: Int, step: Int) {
val tmp = width - cur
if (s.length <= tmp)
return List(Box(ind, s))
- val sb = new StringBuilder()
var i = s indexOf ' '
if (i > tmp || i == -1) throw new BrokenException() // cannot break
diff --git a/src/library/scala/xml/dtd/ElementValidator.scala b/src/library/scala/xml/dtd/ElementValidator.scala
index f97da1c8a3..5260d87b04 100644
--- a/src/library/scala/xml/dtd/ElementValidator.scala
+++ b/src/library/scala/xml/dtd/ElementValidator.scala
@@ -61,7 +61,7 @@ class ElementValidator() extends Function1[Node,Boolean] {
*/
def check(md: MetaData): Boolean = {
val len: Int = exc.length
- var ok = new mutable.BitSet(adecls.length)
+ val ok = new mutable.BitSet(adecls.length)
for (attr <- md) {
def attrStr = attr.value.toString
diff --git a/src/library/scala/xml/include/sax/XIncludeFilter.scala b/src/library/scala/xml/include/sax/XIncludeFilter.scala
index 52ddf6b476..ac5a8c8331 100644
--- a/src/library/scala/xml/include/sax/XIncludeFilter.scala
+++ b/src/library/scala/xml/include/sax/XIncludeFilter.scala
@@ -275,7 +275,7 @@ class XIncludeFilter extends XMLFilterImpl {
try {
val uc = source.openConnection()
val in = new BufferedInputStream(uc.getInputStream())
- var encodingFromHeader = uc.getContentEncoding()
+ val encodingFromHeader = uc.getContentEncoding()
var contentType = uc.getContentType()
if (encodingFromHeader != null)
encoding = encodingFromHeader
diff --git a/src/library/scala/xml/parsing/MarkupParser.scala b/src/library/scala/xml/parsing/MarkupParser.scala
index d4dc6da14d..8f8c25805c 100755
--- a/src/library/scala/xml/parsing/MarkupParser.scala
+++ b/src/library/scala/xml/parsing/MarkupParser.scala
@@ -154,7 +154,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
var info_enc: Option[String] = None
var info_stdl: Option[Boolean] = None
- var m = xmlProcInstr()
+ val m = xmlProcInstr()
var n = 0
if (isProlog)
@@ -303,10 +303,8 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
var scope: NamespaceBinding = pscope
var aMap: MetaData = Null
while (isNameStart(ch)) {
- val pos = this.pos
-
val qname = xName
- val _ = xEQ
+ xEQ // side effect
val value = xAttributeValue()
Utility.prefix(qname) match {
@@ -423,7 +421,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
* content1 ::= '<' content1 | '&' charref ...
* }}} */
def content(pscope: NamespaceBinding): NodeSeq = {
- var ts = new NodeBuffer
+ val ts = new NodeBuffer
var exit = eof
// todo: optimize seq repr.
def done = new NodeSeq { val theSeq = ts.toList }
@@ -582,7 +580,6 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
var exit = false
while (! exit) {
putChar(ch)
- val opos = pos
nextch
exit = eof || ( ch == '<' ) || ( ch == '&' )
@@ -828,7 +825,6 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
* }}} */
def entityDecl() = {
var isParameterEntity = false
- var entdef: EntityDef = null
xToken("NTITY")
xSpace
if ('%' == ch) {