summaryrefslogtreecommitdiff
path: root/src/library
diff options
context:
space:
mode:
authorPaul Phillips <paulp@improving.org>2012-10-30 14:29:23 -0700
committerAdriaan Moors <adriaan.moors@epfl.ch>2012-11-01 18:54:17 -0700
commitd5ebd7e069d6a60936267e239f74ce89a3851453 (patch)
treed32e72e3867d8f7c49db6a8d052b3d2e9e3cc9ff /src/library
parentd0c4be6861109683d80513eda74e5c6ca88f1441 (diff)
downloadscala-d5ebd7e069d6a60936267e239f74ce89a3851453.tar.gz
scala-d5ebd7e069d6a60936267e239f74ce89a3851453.tar.bz2
scala-d5ebd7e069d6a60936267e239f74ce89a3851453.zip
Remove unused private members.
That's a lot of unused code. Most of this is pure cruft; a small amount is debugging code which somebody might want to keep around, but we should not be using trunk as a repository of our personal snippets of undocumented, unused, unintegrated debugging code. So let's make the easy decision to err in the removing direction. If it isn't built to last, it shouldn't be checked into master.
Diffstat (limited to 'src/library')
-rw-r--r--src/library/scala/collection/IndexedSeqLike.scala1
-rw-r--r--src/library/scala/collection/Iterator.scala2
-rw-r--r--src/library/scala/collection/concurrent/TrieMap.scala4
-rw-r--r--src/library/scala/collection/immutable/List.scala6
-rw-r--r--src/library/scala/collection/immutable/NumericRange.scala27
-rw-r--r--src/library/scala/collection/immutable/StringLike.scala9
-rw-r--r--src/library/scala/collection/immutable/Vector.scala7
-rw-r--r--src/library/scala/collection/mutable/IndexedSeqView.scala2
-rw-r--r--src/library/scala/collection/parallel/Tasks.scala9
-rw-r--r--src/library/scala/collection/parallel/mutable/ParArray.scala2
-rw-r--r--src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala10
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashMap.scala21
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashSet.scala5
-rw-r--r--src/library/scala/concurrent/JavaConversions.scala4
-rw-r--r--src/library/scala/sys/SystemProperties.scala1
-rw-r--r--src/library/scala/util/automata/WordBerrySethi.scala1
-rw-r--r--src/library/scala/util/matching/Regex.scala4
-rw-r--r--src/library/scala/xml/persistent/SetStorage.scala6
18 files changed, 25 insertions, 96 deletions
diff --git a/src/library/scala/collection/IndexedSeqLike.scala b/src/library/scala/collection/IndexedSeqLike.scala
index 7cac6154b9..22ad857119 100644
--- a/src/library/scala/collection/IndexedSeqLike.scala
+++ b/src/library/scala/collection/IndexedSeqLike.scala
@@ -53,7 +53,6 @@ trait IndexedSeqLike[+A, +Repr] extends Any with SeqLike[A, Repr] {
// pre: start >= 0, end <= self.length
@SerialVersionUID(1756321872811029277L)
protected class Elements(start: Int, end: Int) extends AbstractIterator[A] with BufferedIterator[A] with Serializable {
- private def initialSize = if (end <= start) 0 else end - start
private var index = start
private def available = (end - index) max 0
diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala
index e12b8d231c..fddd436dde 100644
--- a/src/library/scala/collection/Iterator.scala
+++ b/src/library/scala/collection/Iterator.scala
@@ -562,7 +562,6 @@ trait Iterator[+A] extends TraversableOnce[A] {
* handling of structural calls. It's not what's intended here.
*/
class Leading extends AbstractIterator[A] {
- private var isDone = false
val lookahead = new mutable.Queue[A]
def advance() = {
self.hasNext && p(self.head) && {
@@ -572,7 +571,6 @@ trait Iterator[+A] extends TraversableOnce[A] {
}
def finish() = {
while (advance()) ()
- isDone = true
}
def hasNext = lookahead.nonEmpty || advance()
def next() = {
diff --git a/src/library/scala/collection/concurrent/TrieMap.scala b/src/library/scala/collection/concurrent/TrieMap.scala
index 82f62f3c85..b0736ecace 100644
--- a/src/library/scala/collection/concurrent/TrieMap.scala
+++ b/src/library/scala/collection/concurrent/TrieMap.scala
@@ -920,8 +920,8 @@ object TrieMap extends MutableMapFactory[TrieMap] {
private[collection] class TrieMapIterator[K, V](var level: Int, private var ct: TrieMap[K, V], mustInit: Boolean = true) extends Iterator[(K, V)] {
- private var stack = new Array[Array[BasicNode]](7)
- private var stackpos = new Array[Int](7)
+ private val stack = new Array[Array[BasicNode]](7)
+ private val stackpos = new Array[Int](7)
private var depth = -1
private var subiter: Iterator[(K, V)] = null
private var current: KVNode[K, V] = null
diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala
index 47cac9a1d5..d825f5fb20 100644
--- a/src/library/scala/collection/immutable/List.scala
+++ b/src/library/scala/collection/immutable/List.scala
@@ -379,12 +379,6 @@ final case class ::[B](private var hd: B, private[scala] var tl: List[B]) extend
current = list
}
}
-
- private def oldWriteObject(out: ObjectOutputStream) {
- var xs: List[B] = this
- while (!xs.isEmpty) { out.writeObject(xs.head); xs = xs.tail }
- out.writeObject(ListSerializeEnd)
- }
}
/** $factoryInfo
diff --git a/src/library/scala/collection/immutable/NumericRange.scala b/src/library/scala/collection/immutable/NumericRange.scala
index ce04ef09af..2df7db4d22 100644
--- a/src/library/scala/collection/immutable/NumericRange.scala
+++ b/src/library/scala/collection/immutable/NumericRange.scala
@@ -81,17 +81,6 @@ extends AbstractSeq[T] with IndexedSeq[T] with Serializable {
// to guard against any (most likely illusory) performance drop. They should
// be eliminated one way or another.
- // Counts how many elements from the start meet the given test.
- private def skipCount(p: T => Boolean): Int = {
- var current = start
- var counted = 0
-
- while (counted < length && p(current)) {
- counted += 1
- current += step
- }
- counted
- }
// Tests whether a number is within the endpoints, without testing
// whether it is a member of the sequence (i.e. when step > 1.)
private def isWithinBoundaries(elem: T) = !isEmpty && (
@@ -124,21 +113,21 @@ extends AbstractSeq[T] with IndexedSeq[T] with Serializable {
if (idx < 0 || idx >= length) throw new IndexOutOfBoundsException(idx.toString)
else locationAfterN(idx)
}
-
+
import NumericRange.defaultOrdering
-
+
override def min[T1 >: T](implicit ord: Ordering[T1]): T =
if (ord eq defaultOrdering(num)) {
if (num.signum(step) > 0) start
else last
} else super.min(ord)
-
- override def max[T1 >: T](implicit ord: Ordering[T1]): T =
+
+ override def max[T1 >: T](implicit ord: Ordering[T1]): T =
if (ord eq defaultOrdering(num)) {
if (num.signum(step) > 0) last
else start
} else super.max(ord)
-
+
// Motivated by the desire for Double ranges with BigDecimal precision,
// we need some way to map a Range and get another Range. This can't be
// done in any fully general way because Ranges are not arbitrary
@@ -213,7 +202,7 @@ extends AbstractSeq[T] with IndexedSeq[T] with Serializable {
/** A companion object for numeric ranges.
*/
object NumericRange {
-
+
/** Calculates the number of elements in a range given start, end, step, and
* whether or not it is inclusive. Throws an exception if step == 0 or
* the number of elements exceeds the maximum Int.
@@ -272,7 +261,7 @@ object NumericRange {
new Exclusive(start, end, step)
def inclusive[T](start: T, end: T, step: T)(implicit num: Integral[T]): Inclusive[T] =
new Inclusive(start, end, step)
-
+
private[collection] val defaultOrdering = Map[Numeric[_], Ordering[_]](
Numeric.BigIntIsIntegral -> Ordering.BigInt,
Numeric.IntIsIntegral -> Ordering.Int,
@@ -284,6 +273,6 @@ object NumericRange {
Numeric.DoubleAsIfIntegral -> Ordering.Double,
Numeric.BigDecimalAsIfIntegral -> Ordering.BigDecimal
)
-
+
}
diff --git a/src/library/scala/collection/immutable/StringLike.scala b/src/library/scala/collection/immutable/StringLike.scala
index 4d28bf9518..4020f1f5b3 100644
--- a/src/library/scala/collection/immutable/StringLike.scala
+++ b/src/library/scala/collection/immutable/StringLike.scala
@@ -19,12 +19,11 @@ import scala.reflect.ClassTag
* @since 2.8
*/
object StringLike {
-
// just statics for companion class.
- private final val LF: Char = 0x0A
- private final val FF: Char = 0x0C
- private final val CR: Char = 0x0D
- private final val SU: Char = 0x1A
+ private final val LF = 0x0A
+ private final val FF = 0x0C
+ private final val CR = 0x0D
+ private final val SU = 0x1A
}
import StringLike._
diff --git a/src/library/scala/collection/immutable/Vector.scala b/src/library/scala/collection/immutable/Vector.scala
index 895d073869..1f90436636 100644
--- a/src/library/scala/collection/immutable/Vector.scala
+++ b/src/library/scala/collection/immutable/Vector.scala
@@ -630,14 +630,13 @@ override def companion: GenericCompanion[Vector] = Vector
}
-class VectorIterator[+A](_startIndex: Int, _endIndex: Int)
+class VectorIterator[+A](_startIndex: Int, endIndex: Int)
extends AbstractIterator[A]
with Iterator[A]
with VectorPointer[A @uncheckedVariance] {
private var blockIndex: Int = _startIndex & ~31
private var lo: Int = _startIndex & 31
- private var endIndex: Int = _endIndex
private var endLo = math.min(endIndex - blockIndex, 32)
@@ -667,13 +666,13 @@ extends AbstractIterator[A]
res
}
- private[collection] def remainingElementCount: Int = (_endIndex - (blockIndex + lo)) max 0
+ private[collection] def remainingElementCount: Int = (endIndex - (blockIndex + lo)) max 0
/** Creates a new vector which consists of elements remaining in this iterator.
* Such a vector can then be split into several vectors using methods like `take` and `drop`.
*/
private[collection] def remainingVector: Vector[A] = {
- val v = new Vector(blockIndex + lo, _endIndex, blockIndex + lo)
+ val v = new Vector(blockIndex + lo, endIndex, blockIndex + lo)
v.initFrom(this)
v
}
diff --git a/src/library/scala/collection/mutable/IndexedSeqView.scala b/src/library/scala/collection/mutable/IndexedSeqView.scala
index ab3d0ec312..17ad459e2c 100644
--- a/src/library/scala/collection/mutable/IndexedSeqView.scala
+++ b/src/library/scala/collection/mutable/IndexedSeqView.scala
@@ -82,8 +82,6 @@ self =>
protected override def newTakenWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with TakenWhile
protected override def newReversed: Transformed[A] = new AbstractTransformed[A] with Reversed
- private implicit def asThis(xs: Transformed[A]): This = xs.asInstanceOf[This]
-
override def filter(p: A => Boolean): This = newFiltered(p)
override def init: This = newSliced(SliceInterval(0, self.length - 1))
override def drop(n: Int): This = newSliced(SliceInterval(n, self.length))
diff --git a/src/library/scala/collection/parallel/Tasks.scala b/src/library/scala/collection/parallel/Tasks.scala
index 2556cd3f68..d6b75202da 100644
--- a/src/library/scala/collection/parallel/Tasks.scala
+++ b/src/library/scala/collection/parallel/Tasks.scala
@@ -67,19 +67,10 @@ trait Task[R, +Tp] {
private[parallel] def tryMerge(t: Tp @uncheckedVariance) {
val that = t.asInstanceOf[Task[R, Tp]]
val local = result // ensure that any effects of modifying `result` are detected
- // checkMerge(that)
if (this.throwable == null && that.throwable == null) merge(t)
mergeThrowables(that)
}
- private def checkMerge(that: Task[R, Tp] @uncheckedVariance) {
- if (this.throwable == null && that.throwable == null && (this.result == null || that.result == null)) {
- println("This: " + this + ", thr=" + this.throwable + "; merged with " + that + ", thr=" + that.throwable)
- } else if (this.throwable != null || that.throwable != null) {
- println("merging this: " + this + " with thr: " + this.throwable + " with " + that + ", thr=" + that.throwable)
- }
- }
-
private[parallel] def mergeThrowables(that: Task[_, _]) {
if (this.throwable != null && that.throwable != null) {
// merge exceptions, since there were multiple exceptions
diff --git a/src/library/scala/collection/parallel/mutable/ParArray.scala b/src/library/scala/collection/parallel/mutable/ParArray.scala
index deff9eda3b..5ac2725f11 100644
--- a/src/library/scala/collection/parallel/mutable/ParArray.scala
+++ b/src/library/scala/collection/parallel/mutable/ParArray.scala
@@ -579,8 +579,6 @@ self =>
/* operations */
- private def asTask[R, Tp](t: Any) = t.asInstanceOf[Task[R, Tp]]
-
private def buildsArray[S, That](c: Builder[S, That]) = c.isInstanceOf[ParArrayCombiner[_]]
override def map[S, That](f: T => S)(implicit bf: CanBuildFrom[ParArray[T], S, That]) = if (buildsArray(bf(repr))) {
diff --git a/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala b/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala
index c7f025207c..0b81d2c90a 100644
--- a/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala
+++ b/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala
@@ -38,10 +38,6 @@ trait ParFlatHashTable[T] extends scala.collection.mutable.FlatHashTable[T] {
}
}
- private def checkbounds() = if (idx >= itertable.length) {
- throw new IndexOutOfBoundsException(idx.toString)
- }
-
def newIterator(index: Int, until: Int, totalsize: Int): IterableSplitter[T]
def remaining = totalsize - traversed
@@ -102,11 +98,5 @@ trait ParFlatHashTable[T] extends scala.collection.mutable.FlatHashTable[T] {
}
count
}
-
- private def check() = if (table.slice(idx, until).count(_ != null) != remaining) {
- println("Invariant broken: " + debugInformation)
- assert(false)
- }
}
-
}
diff --git a/src/library/scala/collection/parallel/mutable/ParHashMap.scala b/src/library/scala/collection/parallel/mutable/ParHashMap.scala
index fad7ddad59..3b2c66763e 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashMap.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashMap.scala
@@ -166,9 +166,8 @@ private[mutable] abstract class ParHashMapCombiner[K, V](private val tableLoadFa
extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntry[K, V], ParHashMapCombiner[K, V]](ParHashMapCombiner.numblocks)
with scala.collection.mutable.HashTable.HashUtils[K]
{
- private var mask = ParHashMapCombiner.discriminantmask
- private var nonmasklen = ParHashMapCombiner.nonmasklength
- private var seedvalue = 27
+ private val nonmasklen = ParHashMapCombiner.nonmasklength
+ private val seedvalue = 27
def +=(elem: (K, V)) = {
sz += 1
@@ -232,7 +231,6 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau
def setSize(sz: Int) = tableSize = sz
def insertEntry(/*block: Int, */e: DefaultEntry[K, V]) = {
var h = index(elemHashCode(e.key))
- // assertCorrectBlock(h, block)
var olde = table(h).asInstanceOf[DefaultEntry[K, V]]
// check if key already exists
@@ -252,13 +250,6 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau
true
} else false
}
- private def assertCorrectBlock(h: Int, block: Int) {
- val blocksize = table.length / (1 << ParHashMapCombiner.discriminantbits)
- if (!(h >= block * blocksize && h < (block + 1) * blocksize)) {
- println("trying to put " + h + " into block no.: " + block + ", range: [" + block * blocksize + ", " + (block + 1) * blocksize + ">")
- assert(h >= block * blocksize && h < (block + 1) * blocksize)
- }
- }
protected def createNewEntry[X](key: K, x: X) = ???
}
@@ -288,7 +279,6 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau
val chunksz = unrolled.size
while (i < chunksz) {
val elem = chunkarr(i)
- // assertCorrectBlock(block, elem.key)
if (t.insertEntry(elem)) insertcount += 1
i += 1
}
@@ -297,13 +287,6 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau
}
insertcount
}
- private def assertCorrectBlock(block: Int, k: K) {
- val hc = improve(elemHashCode(k), seedvalue)
- if ((hc >>> nonmasklen) != block) {
- println(hc + " goes to " + (hc >>> nonmasklen) + ", while expected block is " + block)
- assert((hc >>> nonmasklen) == block)
- }
- }
def split = {
val fp = howmany / 2
List(new FillBlocks(buckets, table, offset, fp), new FillBlocks(buckets, table, offset + fp, howmany - fp))
diff --git a/src/library/scala/collection/parallel/mutable/ParHashSet.scala b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
index aef9f6856b..22f22c8305 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashSet.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
@@ -120,9 +120,8 @@ private[mutable] abstract class ParHashSetCombiner[T](private val tableLoadFacto
extends scala.collection.parallel.BucketCombiner[T, ParHashSet[T], Any, ParHashSetCombiner[T]](ParHashSetCombiner.numblocks)
with scala.collection.mutable.FlatHashTable.HashUtils[T] {
//self: EnvironmentPassingCombiner[T, ParHashSet[T]] =>
- private var mask = ParHashSetCombiner.discriminantmask
- private var nonmasklen = ParHashSetCombiner.nonmasklength
- private var seedvalue = 27
+ private val nonmasklen = ParHashSetCombiner.nonmasklength
+ private val seedvalue = 27
def +=(elem: T) = {
sz += 1
diff --git a/src/library/scala/concurrent/JavaConversions.scala b/src/library/scala/concurrent/JavaConversions.scala
index f66d64bc3b..1b32781afa 100644
--- a/src/library/scala/concurrent/JavaConversions.scala
+++ b/src/library/scala/concurrent/JavaConversions.scala
@@ -41,10 +41,6 @@ object JavaConversions {
exec.execute(task)
}
- def managedBlock(blocker: ManagedBlocker) {
- blocker.block()
- }
-
def shutdown() {
// do nothing
}
diff --git a/src/library/scala/sys/SystemProperties.scala b/src/library/scala/sys/SystemProperties.scala
index 5777c255c3..fbf2d940dc 100644
--- a/src/library/scala/sys/SystemProperties.scala
+++ b/src/library/scala/sys/SystemProperties.scala
@@ -64,7 +64,6 @@ object SystemProperties {
propertyHelp(p.key) = helpText
p
}
- private def str(key: String, helpText: String) = addHelp(Prop[String](key), helpText)
private def bool(key: String, helpText: String): BooleanProp = addHelp[BooleanProp](
if (key startsWith "java.") BooleanProp.valueIsTrue(key) else BooleanProp.keyExists(key),
helpText
diff --git a/src/library/scala/util/automata/WordBerrySethi.scala b/src/library/scala/util/automata/WordBerrySethi.scala
index 235a74dd7a..b648d179c6 100644
--- a/src/library/scala/util/automata/WordBerrySethi.scala
+++ b/src/library/scala/util/automata/WordBerrySethi.scala
@@ -152,7 +152,6 @@ abstract class WordBerrySethi extends BaseBerrySethi {
new NondetWordAutom[_labelT] {
val nstates = pos
val labels = WordBerrySethi.this.labels.toList
- val initials = initialsArr
val finals = finalsArr
val delta = deltaArr
val default = defaultArr
diff --git a/src/library/scala/util/matching/Regex.scala b/src/library/scala/util/matching/Regex.scala
index 63d049208a..9bd596a904 100644
--- a/src/library/scala/util/matching/Regex.scala
+++ b/src/library/scala/util/matching/Regex.scala
@@ -199,7 +199,7 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends
* Otherwise, this Regex is applied to the previously matched input,
* and the result of that match is used.
*/
- def unapplySeq(m: Match): Option[Seq[String]] =
+ def unapplySeq(m: Match): Option[Seq[String]] =
if (m.matched == null) None
else if (m.matcher.pattern == this.pattern) Some(1 to m.groupCount map m.group)
else unapplySeq(m.matched)
@@ -650,7 +650,7 @@ object Regex {
private[matching] trait Replacement {
protected def matcher: Matcher
- private var sb = new java.lang.StringBuffer
+ private val sb = new java.lang.StringBuffer
def replaced = {
val newsb = new java.lang.StringBuffer(sb)
diff --git a/src/library/scala/xml/persistent/SetStorage.scala b/src/library/scala/xml/persistent/SetStorage.scala
index 765d2a8393..56a0be6cf9 100644
--- a/src/library/scala/xml/persistent/SetStorage.scala
+++ b/src/library/scala/xml/persistent/SetStorage.scala
@@ -20,16 +20,14 @@ import java.io.File
*/
class SetStorage(file: File) extends CachedFileStorage(file) {
- private var theSet: mutable.HashSet[Node] = new mutable.HashSet[Node]
+ private val theSet = mutable.HashSet[Node]()
// initialize
{
val it = super.initialNodes
dirty = it.hasNext
- for(x <- it) {
- theSet += x;
- }
+ theSet ++= it
}
/* forwarding methods to hashset*/