summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/actors/scala/actors/scheduler/ThreadPoolConfig.scala5
-rw-r--r--src/compiler/scala/reflect/reify/package.scala5
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala9
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala6
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala6
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala5
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala17
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala21
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala6
-rw-r--r--src/library/scala/concurrent/impl/Promise.scala85
-rw-r--r--src/library/scala/util/Properties.scala12
-rw-r--r--src/reflect/scala/reflect/internal/Definitions.scala12
-rw-r--r--src/reflect/scala/reflect/internal/SymbolTable.scala13
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala38
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeComparers.scala93
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeMaps.scala3
-rw-r--r--src/reflect/scala/reflect/internal/util/TriState.scala26
-rw-r--r--src/reflect/scala/reflect/internal/util/WeakHashSet.scala447
18 files changed, 623 insertions, 186 deletions
diff --git a/src/actors/scala/actors/scheduler/ThreadPoolConfig.scala b/src/actors/scala/actors/scheduler/ThreadPoolConfig.scala
index ee8776f397..bfd4e7ac40 100644
--- a/src/actors/scala/actors/scheduler/ThreadPoolConfig.scala
+++ b/src/actors/scala/actors/scheduler/ThreadPoolConfig.scala
@@ -42,10 +42,7 @@ private[actors] object ThreadPoolConfig {
(propIsSetTo("actors.enableForkJoin", "true") || {
Debug.info(this+": java.version = "+javaVersion)
Debug.info(this+": java.vm.vendor = "+javaVmVendor)
-
- // on IBM J9 1.6 do not use ForkJoinPool
- // XXX this all needs to go into Properties.
- isJavaAtLeast("1.6") && ((javaVmVendor contains "Oracle") || (javaVmVendor contains "Sun") || (javaVmVendor contains "Apple"))
+ isJavaAtLeast("1.6")
})
catch {
case _: SecurityException => false
diff --git a/src/compiler/scala/reflect/reify/package.scala b/src/compiler/scala/reflect/reify/package.scala
index ae0288186b..d3cae3d123 100644
--- a/src/compiler/scala/reflect/reify/package.scala
+++ b/src/compiler/scala/reflect/reify/package.scala
@@ -46,11 +46,14 @@ package object reify {
def reifyType(global: Global)(typer: global.analyzer.Typer,universe: global.Tree, mirror: global.Tree, tpe: global.Type, concrete: Boolean = false): global.Tree =
mkReifier(global)(typer, universe, mirror, tpe, concrete = concrete).reification.asInstanceOf[global.Tree]
- def reifyRuntimeClass(global: Global)(typer0: global.analyzer.Typer, tpe: global.Type, concrete: Boolean = true): global.Tree = {
+ def reifyRuntimeClass(global: Global)(typer0: global.analyzer.Typer, tpe0: global.Type, concrete: Boolean = true): global.Tree = {
import global._
import definitions._
import analyzer.enclosingMacroPosition
+ // SI-7375
+ val tpe = tpe0.dealiasWiden
+
if (tpe.isSpliceable) {
val classTagInScope = typer0.resolveClassTag(enclosingMacroPosition, tpe, allowMaterialization = false)
if (!classTagInScope.isEmpty) return Select(classTagInScope, nme.runtimeClass)
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index 89f933f577..fc532f5d44 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -1423,8 +1423,9 @@ self =>
* }}}
*/
def postfixExpr(): Tree = {
- val base = opstack
- var top = prefixExpr()
+ val start = in.offset
+ val base = opstack
+ var top = prefixExpr()
while (isIdent) {
top = reduceStack(isExpr = true, base, top, precedence(in.name), leftAssoc = treeInfo.isLeftAssoc(in.name))
@@ -1442,9 +1443,7 @@ self =>
val topinfo = opstack.head
opstack = opstack.tail
val od = stripParens(reduceStack(isExpr = true, base, topinfo.operand, 0, leftAssoc = true))
- return atPos(od.pos.startOrPoint, topinfo.offset) {
- new PostfixSelect(od, topinfo.operator.encode)
- }
+ return makePostfixSelect(start, topinfo.offset, od, topinfo.operator)
}
}
reduceStack(isExpr = true, base, top, 0, leftAssoc = true)
diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
index f737fcc635..ec4e932aae 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
@@ -242,6 +242,12 @@ abstract class TreeBuilder {
Assign(lhs, rhs)
}
+ /** Tree for `od op`, start is start0 if od.pos is borked. */
+ def makePostfixSelect(start0: Int, end: Int, od: Tree, op: Name): Tree = {
+ val start = if (od.pos.isDefined) od.pos.startOrPoint else start0
+ atPos(r2p(start, end, end)) { new PostfixSelect(od, op.encode) }
+ }
+
/** A type tree corresponding to (possibly unary) intersection type */
def makeIntersectionTypeTree(tps: List[Tree]): Tree =
if (tps.tail.isEmpty) tps.head
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index 6948d02fd5..fb927d15d3 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -491,8 +491,10 @@ abstract class ClassfileParser {
}
}
- val c = if (currentIsTopLevel) pool.getClassSymbol(nameIdx) else clazz
- if (currentIsTopLevel) {
+ val isTopLevel = !(currentClass containsChar '$') // Java class name; *don't* try to to use Scala name decoding (SI-7532)
+
+ val c = if (isTopLevel) pool.getClassSymbol(nameIdx) else clazz
+ if (isTopLevel) {
if (c != clazz) {
if ((clazz eq NoSymbol) && (c ne NoSymbol)) clazz = c
else mismatchError(c)
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index c9a7cc2e3f..0f65b11e9b 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -342,11 +342,6 @@ abstract class Erasure extends AddInterfaces
}
}
- // Each primitive value class has its own getClass for ultra-precise class object typing.
- private lazy val primitiveGetClassMethods = Set[Symbol](Any_getClass, AnyVal_getClass) ++ (
- ScalaValueClasses map (_.tpe member nme.getClass_)
- )
-
// ## requires a little translation
private lazy val poundPoundMethods = Set[Symbol](Any_##, Object_##)
// Methods on Any/Object which we rewrite here while we still know what
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index 5622f67459..b53efafdd4 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -931,22 +931,11 @@ trait Implicits {
if (DivergentImplicitRecovery.sym != null) {
DivergingImplicitExpansionError(tree, pt, DivergentImplicitRecovery.sym)(context)
}
- else if (invalidImplicits.nonEmpty) {
- val sym = invalidImplicits.head
- // We don't even dare look if errors are being buffered
- // !sym.hasFlag(LOCKED) is a hail mary between SI-2206 and SI-7486
- def isSensibleAddendum = !sym.hasFlag(LOCKED) && (pt match {
- case Function1(_, out) => out <:< sym.tpe.finalResultType
- case _ => pt <:< sym.tpe.finalResultType
- })
- // Don't pitch in with this theory unless it looks plausible that the
- // implicit would have helped
+
+ if (invalidImplicits.nonEmpty)
setAddendum(pos, () =>
- if (isSensibleAddendum)
- s"\n Note: implicit $sym is not applicable here because it comes after the application point and it lacks an explicit result type"
- else ""
+ s"\n Note: implicit ${invalidImplicits.head} is not applicable here because it comes after the application point and it lacks an explicit result type"
)
- }
}
best
diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
index 03ad127498..8e9933f734 100644
--- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
@@ -282,11 +282,13 @@ trait NamesDefaults { self: Analyzer =>
case WildcardStarArg(expr) => expr.tpe
case _ => seqType(arg.tpe)
}
- else
- // Note stabilizing can lead to a non-conformant argument when existentials are involved, e.g. neg/t3507-old.scala, hence the filter.
- gen.stableTypeFor(arg).filter(_ <:< paramTpe).getOrElse(arg.tpe)
- // We have to deconst or types inferred from literal arguments will be Constant(_), e.g. pos/z1730.scala.
- ).deconst
+ else {
+ // TODO In 83c9c764b, we tried to a stable type here to fix SI-7234. But the resulting TypeTree over a
+ // singleton type without an original TypeTree fails to retypecheck after a resetLocalAttrs (SI-7516),
+ // which is important for (at least) macros.
+ arg.tpe
+ }
+ ).widen // have to widen or types inferred from literal defaults will be singletons
val s = context.owner.newValue(unit.freshTermName("x$"), arg.pos, newFlags = ARTIFACT) setInfo (
if (byName) functionType(Nil, argTpe) else argTpe
)
@@ -328,9 +330,12 @@ trait NamesDefaults { self: Analyzer =>
// type the application without names; put the arguments in definition-site order
val typedApp = doTypedApply(tree, funOnly, reorderArgs(namelessArgs, argPos), mode, pt)
typedApp match {
- // Extract the typed arguments, restore the call-site evaluation order (using
- // ValDef's in the block), change the arguments to these local values.
- case Apply(expr, typedArgs) if !(typedApp :: typedArgs).exists(_.isErrorTyped) => // bail out with erroneous args, see SI-7238
+ case Apply(expr, typedArgs) if (typedApp :: typedArgs).exists(_.isErrorTyped) =>
+ setError(tree) // bail out with and erroneous Apply *or* erroneous arguments, see SI-7238, SI-7509
+ case Apply(expr, typedArgs) =>
+ // Extract the typed arguments, restore the call-site evaluation order (using
+ // ValDef's in the block), change the arguments to these local values.
+
// typedArgs: definition-site order
val formals = formalTypes(expr.tpe.paramTypes, typedArgs.length, removeByName = false, removeRepeated = false)
// valDefs: call-site order
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index c9aa27cf49..ec6b78f2de 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -602,10 +602,8 @@ trait Typers extends Adaptations with Tags {
// type of the qualifier is inaccessible, we can cause private types to
// escape scope here, e.g. pos/t1107. I'm not sure how to properly handle
// this so for now it requires the type symbol be public.
- def isGetClassCall = (
- (sym == Any_getClass || sym == Object_getClass)
- && pre.typeSymbol.isPublic
- )
+ def isGetClassCall = isGetClass(sym) && pre.typeSymbol.isPublic
+
def narrowIf(tree: Tree, condition: Boolean) =
if (condition) tree setType singleType(pre, sym) else tree
diff --git a/src/library/scala/concurrent/impl/Promise.scala b/src/library/scala/concurrent/impl/Promise.scala
index 7af70400ef..ffaea8de96 100644
--- a/src/library/scala/concurrent/impl/Promise.scala
+++ b/src/library/scala/concurrent/impl/Promise.scala
@@ -8,11 +8,14 @@
package scala.concurrent.impl
-import scala.concurrent.{ ExecutionContext, CanAwait, OnCompleteRunnable, TimeoutException, ExecutionException }
+import scala.concurrent.{ ExecutionContext, CanAwait, OnCompleteRunnable, TimeoutException, ExecutionException, blocking }
+import scala.concurrent.Future.InternalCallbackExecutor
import scala.concurrent.duration.{ Duration, Deadline, FiniteDuration, NANOSECONDS }
import scala.annotation.tailrec
import scala.util.control.NonFatal
import scala.util.{ Try, Success, Failure }
+import java.io.ObjectInputStream
+import java.util.concurrent.locks.AbstractQueuedSynchronizer
private[concurrent] trait Promise[T] extends scala.concurrent.Promise[T] with scala.concurrent.Future[T] {
def future: this.type = this
@@ -53,69 +56,68 @@ private[concurrent] object Promise {
case t => Failure(t)
}
+ /*
+ * Inspired by: http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/src/main/java/util/concurrent/locks/AbstractQueuedSynchronizer.java
+ * Written by Doug Lea with assistance from members of JCP JSR-166
+ * Expert Group and released to the public domain, as explained at
+ * http://creativecommons.org/publicdomain/zero/1.0/
+ */
+ private final class CompletionLatch[T] extends AbstractQueuedSynchronizer with (Try[T] => Unit) {
+ override protected def tryAcquireShared(ignored: Int): Int = if (getState != 0) 1 else -1
+ override protected def tryReleaseShared(ignore: Int): Boolean = {
+ setState(1)
+ true
+ }
+ override def apply(ignored: Try[T]): Unit = releaseShared(1)
+ }
+
+
/** Default promise implementation.
*/
class DefaultPromise[T] extends AbstractPromise with Promise[T] { self =>
updateState(null, Nil) // Start at "No callbacks"
- protected final def tryAwait(atMost: Duration): Boolean = {
- @tailrec
- def awaitUnsafe(deadline: Deadline, nextWait: FiniteDuration): Boolean = {
- if (!isCompleted && nextWait > Duration.Zero) {
- val ms = nextWait.toMillis
- val ns = (nextWait.toNanos % 1000000l).toInt // as per object.wait spec
-
- synchronized { if (!isCompleted) wait(ms, ns) }
-
- awaitUnsafe(deadline, deadline.timeLeft)
- } else
- isCompleted
- }
- @tailrec
- def awaitUnbounded(): Boolean = {
- if (isCompleted) true
- else {
- synchronized { if (!isCompleted) wait() }
- awaitUnbounded()
- }
- }
-
+ protected final def tryAwait(atMost: Duration): Boolean = if (!isCompleted) {
import Duration.Undefined
+ import scala.concurrent.Future.InternalCallbackExecutor
atMost match {
- case u if u eq Undefined => throw new IllegalArgumentException("cannot wait for Undefined period")
- case Duration.Inf => awaitUnbounded()
- case Duration.MinusInf => isCompleted
- case f: FiniteDuration => if (f > Duration.Zero) awaitUnsafe(f.fromNow, f) else isCompleted
+ case e if e eq Undefined => throw new IllegalArgumentException("cannot wait for Undefined period")
+ case Duration.Inf =>
+ val l = new CompletionLatch[T]()
+ onComplete(l)(InternalCallbackExecutor)
+ l.acquireSharedInterruptibly(1)
+ case Duration.MinusInf => // Drop out
+ case f: FiniteDuration =>
+ if (f > Duration.Zero) {
+ val l = new CompletionLatch[T]()
+ onComplete(l)(InternalCallbackExecutor)
+ l.tryAcquireSharedNanos(1, f.toNanos)
+ }
}
- }
+
+ isCompleted
+ } else true // Already completed
@throws(classOf[TimeoutException])
@throws(classOf[InterruptedException])
def ready(atMost: Duration)(implicit permit: CanAwait): this.type =
- if (isCompleted || tryAwait(atMost)) this
+ if (tryAwait(atMost)) this
else throw new TimeoutException("Futures timed out after [" + atMost + "]")
@throws(classOf[Exception])
def result(atMost: Duration)(implicit permit: CanAwait): T =
- ready(atMost).value.get match {
- case Failure(e) => throw e
- case Success(r) => r
- }
+ ready(atMost).value.get.get // ready throws TimeoutException if timeout so value.get is safe here
def value: Option[Try[T]] = getState match {
case c: Try[_] => Some(c.asInstanceOf[Try[T]])
case _ => None
}
- override def isCompleted: Boolean = getState match { // Cheaper than boxing result into Option due to "def value"
- case _: Try[_] => true
- case _ => false
- }
+ override def isCompleted: Boolean = getState.isInstanceOf[Try[_]]
def tryComplete(value: Try[T]): Boolean = {
val resolved = resolveTry(value)
- (try {
- @tailrec
+ @tailrec
def tryComplete(v: Try[T]): List[CallbackRunnable[T]] = {
getState match {
case raw: List[_] =>
@@ -124,10 +126,7 @@ private[concurrent] object Promise {
case _ => null
}
}
- tryComplete(resolved)
- } finally {
- synchronized { notifyAll() } //Notify any evil blockers
- }) match {
+ tryComplete(resolved) match {
case null => false
case rs if rs.isEmpty => true
case rs => rs.foreach(r => r.executeWithValue(resolved)); true
diff --git a/src/library/scala/util/Properties.scala b/src/library/scala/util/Properties.scala
index 73469a955d..2240dde360 100644
--- a/src/library/scala/util/Properties.scala
+++ b/src/library/scala/util/Properties.scala
@@ -122,8 +122,7 @@ private[scala] trait PropertiesTrait {
*/
def lineSeparator = propOrElse("line.separator", "\n")
- /** Various well-known properties.
- */
+ /* Various well-known properties. */
def javaClassPath = propOrEmpty("java.class.path")
def javaHome = propOrEmpty("java.home")
def javaVendor = propOrEmpty("java.vendor")
@@ -139,10 +138,13 @@ private[scala] trait PropertiesTrait {
def userHome = propOrEmpty("user.home")
def userName = propOrEmpty("user.name")
- /** Some derived values.
- */
+ /* Some derived values. */
+ /** Returns `true` iff the underlying operating system is a version of Microsoft Windows. */
def isWin = osName startsWith "Windows"
- def isMac = javaVendor startsWith "Apple"
+ // See http://mail.openjdk.java.net/pipermail/macosx-port-dev/2012-November/005148.html for
+ // the reason why we don't follow developer.apple.com/library/mac/#technotes/tn2002/tn2110.
+ /** Returns `true` iff the underlying operating system is a version of Apple Mac OSX. */
+ def isMac = osName startsWith "Mac OS X"
// This is looking for javac, tools.jar, etc.
// Tries JDK_HOME first, then the more common but likely jre JAVA_HOME,
diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala
index 5892053b5e..4f2b7e2642 100644
--- a/src/reflect/scala/reflect/internal/Definitions.scala
+++ b/src/reflect/scala/reflect/internal/Definitions.scala
@@ -111,8 +111,10 @@ trait Definitions extends api.StandardDefinitions {
/** Is symbol a numeric value class? */
def isNumericValueClass(sym: Symbol) = ScalaNumericValueClasses contains sym
- def isGetClass(sym: Symbol) =
- (sym.name == nme.getClass_) && flattensToEmpty(sym.paramss)
+ def isGetClass(sym: Symbol) = (
+ sym.name == nme.getClass_ // this condition is for performance only, this is called from `Typer#stabliize`.
+ && getClassMethods(sym)
+ )
lazy val UnitClass = valueClassSymbol(tpnme.Unit)
lazy val ByteClass = valueClassSymbol(tpnme.Byte)
@@ -747,6 +749,12 @@ trait Definitions extends api.StandardDefinitions {
lazy val Any_isInstanceOf = newT1NullaryMethod(AnyClass, nme.isInstanceOf_, FINAL)(_ => BooleanTpe)
lazy val Any_asInstanceOf = newT1NullaryMethod(AnyClass, nme.asInstanceOf_, FINAL)(_.typeConstructor)
+ lazy val primitiveGetClassMethods = Set[Symbol](Any_getClass, AnyVal_getClass) ++ (
+ ScalaValueClasses map (_.tpe member nme.getClass_)
+ )
+
+ lazy val getClassMethods: Set[Symbol] = primitiveGetClassMethods + Object_getClass
+
// A type function from T => Class[U], used to determine the return
// type of getClass calls. The returned type is:
//
diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala
index 7f9811220f..2ae9f81a09 100644
--- a/src/reflect/scala/reflect/internal/SymbolTable.scala
+++ b/src/reflect/scala/reflect/internal/SymbolTable.scala
@@ -307,27 +307,20 @@ abstract class SymbolTable extends macros.Universe
}
object perRunCaches {
- import java.lang.ref.WeakReference
import scala.collection.generic.Clearable
// Weak references so the garbage collector will take care of
// letting us know when a cache is really out of commission.
- private val caches = mutable.HashSet[WeakReference[Clearable]]()
+ private val caches = WeakHashSet[Clearable]()
def recordCache[T <: Clearable](cache: T): T = {
- caches += new WeakReference(cache)
+ caches += cache
cache
}
def clearAll() = {
debuglog("Clearing " + caches.size + " caches.")
- caches foreach { ref =>
- val cache = ref.get()
- if (cache == null)
- caches -= ref
- else
- cache.clear()
- }
+ caches foreach (_.clear)
}
def newWeakMap[K, V]() = recordCache(mutable.WeakHashMap[K, V]())
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index f27e6c979a..d942c71619 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -107,6 +107,18 @@ trait Types
protected val enableTypeVarExperimentals = settings.Xexperimental.value
+ /** Caching the most recent map has a 75-90% hit rate. */
+ private object substTypeMapCache {
+ private[this] var cached: SubstTypeMap = new SubstTypeMap(Nil, Nil)
+
+ def apply(from: List[Symbol], to: List[Type]): SubstTypeMap = {
+ if ((cached.from ne from) || (cached.to ne to))
+ cached = new SubstTypeMap(from, to)
+
+ cached
+ }
+ }
+
/** The current skolemization level, needed for the algorithms
* in isSameType, isSubType that do constraint solving under a prefix.
*/
@@ -698,8 +710,7 @@ trait Types
* symbols `from` in this type.
*/
def subst(from: List[Symbol], to: List[Type]): Type =
- if (from.isEmpty) this
- else new SubstTypeMap(from, to) apply this
+ if (from.isEmpty) this else substTypeMapCache(from, to)(this)
/** Substitute symbols `to` for occurrences of symbols `from` in this type.
*
@@ -1066,6 +1077,14 @@ trait Types
continue = false
val bcs0 = baseClasses
var bcs = bcs0
+ // omit PRIVATE LOCALS unless selector class is contained in class owning the def.
+ def admitPrivateLocal(owner: Symbol): Boolean = {
+ val selectorClass = this match {
+ case tt: ThisType => tt.sym // SI-7507 the first base class is not necessarily the selector class.
+ case _ => bcs0.head
+ }
+ selectorClass.hasTransOwner(owner)
+ }
while (!bcs.isEmpty) {
val decls = bcs.head.info.decls
var entry = decls.lookupEntry(name)
@@ -1075,10 +1094,10 @@ trait Types
if ((flags & required) == required) {
val excl = flags & excluded
if (excl == 0L &&
- (// omit PRIVATE LOCALS unless selector class is contained in class owning the def.
+ (
(bcs eq bcs0) ||
(flags & PrivateLocal) != PrivateLocal ||
- (bcs0.head.hasTransOwner(bcs.head)))) {
+ admitPrivateLocal(bcs.head))) {
if (name.isTypeName || (stableOnly && sym.isStable && !sym.hasVolatileType)) {
if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
return sym
@@ -3119,10 +3138,9 @@ trait Types
sameLength(typeArgs, tp.typeArgs) && {
val lhs = if (isLowerBound) tp.typeArgs else typeArgs
val rhs = if (isLowerBound) typeArgs else tp.typeArgs
- // this is a higher-kinded type var with same arity as tp.
- // side effect: adds the type constructor itself as a bound
- addBound(tp.typeConstructor)
- isSubArgs(lhs, rhs, params, AnyDepth)
+ // This is a higher-kinded type var with same arity as tp.
+ // If so (see SI-7517), side effect: adds the type constructor itself as a bound.
+ isSubArgs(lhs, rhs, params, AnyDepth) && { addBound(tp.typeConstructor); true }
}
}
// The type with which we can successfully unify can be hidden
@@ -3669,13 +3687,13 @@ trait Types
// Hash consing --------------------------------------------------------------
private val initialUniquesCapacity = 4096
- private var uniques: util.HashSet[Type] = _
+ private var uniques: util.WeakHashSet[Type] = _
private var uniqueRunId = NoRunId
protected def unique[T <: Type](tp: T): T = {
if (Statistics.canEnable) Statistics.incCounter(rawTypeCount)
if (uniqueRunId != currentRunId) {
- uniques = util.HashSet[Type]("uniques", initialUniquesCapacity)
+ uniques = util.WeakHashSet[Type](initialUniquesCapacity)
perRunCaches.recordCache(uniques)
uniqueRunId = currentRunId
}
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala
index 711a94d7bd..63f17dff34 100644
--- a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala
+++ b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala
@@ -4,7 +4,7 @@ package internal
package tpe
import scala.collection.{ mutable }
-import util.Statistics
+import util.{ Statistics, TriState }
import scala.annotation.tailrec
trait TypeComparers {
@@ -118,30 +118,20 @@ trait TypeComparers {
// if (subsametypeRecursions == 0) undoLog.clear()
}
- private def isSameType1(tp1: Type, tp2: Type): Boolean = {
- if ((tp1 eq tp2) ||
- (tp1 eq ErrorType) || (tp1 eq WildcardType) ||
- (tp2 eq ErrorType) || (tp2 eq WildcardType))
- true
- else if ((tp1 eq NoType) || (tp2 eq NoType))
- false
- else if (tp1 eq NoPrefix) // !! I do not see how this would be warranted by the spec
- tp2.typeSymbol.isPackageClass
- else if (tp2 eq NoPrefix) // !! I do not see how this would be warranted by the spec
- tp1.typeSymbol.isPackageClass
- else if (tp1.isInstanceOf[AnnotatedType] || tp2.isInstanceOf[AnnotatedType])
- annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && (tp1.withoutAnnotations =:= tp2.withoutAnnotations)
- else {
- // We flush out any AnnotatedTypes before calling isSameType2 because
- // unlike most other subclasses of Type, we have to allow for equivalence of any
- // combination of { tp1, tp2 } { is, is not } an AnnotatedType - this because the
- // logic of "annotationsConform" is arbitrary and unknown.
- isSameType2(tp1, tp2) || {
- val tp1n = normalizePlus(tp1)
- val tp2n = normalizePlus(tp2)
- ((tp1n ne tp1) || (tp2n ne tp2)) && isSameType(tp1n, tp2n)
- }
- }
+ // @pre: at least one argument has annotations
+ private def sameAnnotatedTypes(tp1: Type, tp2: Type) = (
+ annotationsConform(tp1, tp2)
+ && annotationsConform(tp2, tp1)
+ && (tp1.withoutAnnotations =:= tp2.withoutAnnotations)
+ )
+ // We flush out any AnnotatedTypes before calling isSameType2 because
+ // unlike most other subclasses of Type, we have to allow for equivalence of any
+ // combination of { tp1, tp2 } { is, is not } an AnnotatedType - this because the
+ // logic of "annotationsConform" is arbitrary and unknown.
+ private def isSameType1(tp1: Type, tp2: Type): Boolean = typeRelationPreCheck(tp1, tp2) match {
+ case state if state.isKnown => state.booleanValue
+ case _ if typeHasAnnotations(tp1) || typeHasAnnotations(tp2) => sameAnnotatedTypes(tp1, tp2)
+ case _ => isSameType2(tp1, tp2)
}
private def isSameHKTypes(tp1: Type, tp2: Type) = (
@@ -186,6 +176,8 @@ trait TypeComparers {
}
def isSameType2(tp1: Type, tp2: Type): Boolean = {
+ def retry(lhs: Type, rhs: Type) = ((lhs ne tp1) || (rhs ne tp2)) && isSameType(lhs, rhs)
+
/* Here we highlight those unfortunate type-like constructs which
* are hidden bundles of mutable state, cruising the type system picking
* up any type constraints naive enough to get into their hot rods.
@@ -236,6 +228,7 @@ trait TypeComparers {
|| sameSingletonType
|| mutateNonTypeConstructs(tp1, tp2)
|| mutateNonTypeConstructs(tp2, tp1)
+ || retry(normalizePlus(tp1), normalizePlus(tp2))
)
}
@@ -276,12 +269,12 @@ trait TypeComparers {
else
try {
pendingSubTypes += p
- isSubType2(tp1, tp2, depth)
+ isSubType1(tp1, tp2, depth)
} finally {
pendingSubTypes -= p
}
} else {
- isSubType2(tp1, tp2, depth)
+ isSubType1(tp1, tp2, depth)
}
} finally if (!result) undoLog.undoTo(before)
@@ -294,6 +287,39 @@ trait TypeComparers {
// if (subsametypeRecursions == 0) undoLog.clear()
}
+ /** Check whether the subtype or type equivalence relationship
+ * between the argument is predetermined. Returns a tri-state
+ * value: True means the arguments are always sub/same types,
+ * False means they never are, and Unknown means the caller
+ * will have to figure things out.
+ */
+ private def typeRelationPreCheck(tp1: Type, tp2: Type): TriState = {
+ def isTrue = (
+ (tp1 eq tp2)
+ || isErrorOrWildcard(tp1)
+ || isErrorOrWildcard(tp2)
+ || (tp1 eq NoPrefix) && tp2.typeSymbol.isPackageClass // !! I do not see how this would be warranted by the spec
+ || (tp2 eq NoPrefix) && tp1.typeSymbol.isPackageClass // !! I do not see how this would be warranted by the spec
+ )
+ // isFalse, assuming !isTrue
+ def isFalse = (
+ (tp1 eq NoType)
+ || (tp2 eq NoType)
+ || (tp1 eq NoPrefix)
+ || (tp2 eq NoPrefix)
+ )
+
+ if (isTrue) TriState.True
+ else if (isFalse) TriState.False
+ else TriState.Unknown
+ }
+
+ private def isSubType1(tp1: Type, tp2: Type, depth: Int): Boolean = typeRelationPreCheck(tp1, tp2) match {
+ case state if state.isKnown => state.booleanValue
+ case _ if typeHasAnnotations(tp1) || typeHasAnnotations(tp2) => annotationsConform(tp1, tp2) && (tp1.withoutAnnotations <:< tp2.withoutAnnotations)
+ case _ => isSubType2(tp1, tp2, depth)
+ }
+
private def isPolySubType(tp1: PolyType, tp2: PolyType): Boolean = {
val PolyType(tparams1, res1) = tp1
val PolyType(tparams2, res2) = tp2
@@ -332,12 +358,13 @@ trait TypeComparers {
/** Does type `tp1` conform to `tp2`? */
private def isSubType2(tp1: Type, tp2: Type, depth: Int): Boolean = {
- if ((tp1 eq tp2) || isErrorOrWildcard(tp1) || isErrorOrWildcard(tp2)) return true
- if ((tp1 eq NoType) || (tp2 eq NoType)) return false
- if (tp1 eq NoPrefix) return (tp2 eq NoPrefix) || tp2.typeSymbol.isPackageClass // !! I do not see how the "isPackageClass" would be warranted by the spec
- if (tp2 eq NoPrefix) return tp1.typeSymbol.isPackageClass
- if (isSingleType(tp1) && isSingleType(tp2) || isConstantType(tp1) && isConstantType(tp2)) return tp1 =:= tp2
- if (tp1.isHigherKinded || tp2.isHigherKinded) return isHKSubType(tp1, tp2, depth)
+ def retry(lhs: Type, rhs: Type) = ((lhs ne tp1) || (rhs ne tp2)) && isSubType(lhs, rhs, depth)
+
+ if (isSingleType(tp1) && isSingleType(tp2) || isConstantType(tp1) && isConstantType(tp2))
+ return (tp1 =:= tp2) || retry(tp1.underlying, tp2)
+
+ if (tp1.isHigherKinded || tp2.isHigherKinded)
+ return isHKSubType(tp1, tp2, depth)
/* First try, on the right:
* - unwrap Annotated types, BoundedWildcardTypes,
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
index c35825dcee..bebc419c7c 100644
--- a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
+++ b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
@@ -794,8 +794,7 @@ private[internal] trait TypeMaps {
}
/** A map to implement the `subst` method. */
- class SubstTypeMap(from: List[Symbol], to: List[Type])
- extends SubstMap(from, to) {
+ class SubstTypeMap(val from: List[Symbol], val to: List[Type]) extends SubstMap(from, to) {
protected def toType(fromtp: Type, tp: Type) = tp
override def mapOver(tree: Tree, giveup: () => Nothing): Tree = {
diff --git a/src/reflect/scala/reflect/internal/util/TriState.scala b/src/reflect/scala/reflect/internal/util/TriState.scala
new file mode 100644
index 0000000000..c7a35d4637
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/util/TriState.scala
@@ -0,0 +1,26 @@
+package scala
+package reflect
+package internal
+package util
+
+import TriState._
+
+/** A simple true/false/unknown value, for those days when
+ * true and false don't quite partition the space.
+ */
+final class TriState private (val value: Int) extends AnyVal {
+ def isKnown = this != Unknown
+ def booleanValue = this match {
+ case True => true
+ case False => false
+ case _ => sys.error("Not a Boolean value")
+ }
+}
+
+object TriState {
+ implicit def booleanToTriState(b: Boolean): TriState = if (b) True else False
+
+ val Unknown = new TriState(-1)
+ val False = new TriState(0)
+ val True = new TriState(1)
+}
diff --git a/src/reflect/scala/reflect/internal/util/WeakHashSet.scala b/src/reflect/scala/reflect/internal/util/WeakHashSet.scala
index ef62fa481b..9b792a3f43 100644
--- a/src/reflect/scala/reflect/internal/util/WeakHashSet.scala
+++ b/src/reflect/scala/reflect/internal/util/WeakHashSet.scala
@@ -1,59 +1,430 @@
package scala
package reflect.internal.util
-import scala.collection.mutable
+import java.lang.ref.{WeakReference, ReferenceQueue}
+import scala.annotation.tailrec
import scala.collection.generic.Clearable
-import scala.runtime.AbstractFunction1
+import scala.collection.mutable.{Set => mSet}
-/** A bare-bones implementation of a mutable `Set` that uses weak references
- * to hold the elements.
+/**
+ * A HashSet where the elements are stored weakly. Elements in this set are elligible for GC if no other
+ * hard references are associated with them. Its primary use case is as a canonical reference
+ * identity holder (aka "hash-consing") via findEntryOrUpdate
*
- * This implementation offers only add/remove/test operations,
- * therefore it does not fulfill the contract of Scala collection sets.
+ * This Set implementation cannot hold null. Any attempt to put a null in it will result in a NullPointerException
+ *
+ * This set implmeentation is not in general thread safe without external concurrency control. However it behaves
+ * properly when GC concurrently collects elements in this set.
*/
-class WeakHashSet[T <: AnyRef] extends AbstractFunction1[T, Boolean] with Clearable {
- private val underlying = mutable.HashSet[WeakReferenceWithEquals[T]]()
+final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: Double) extends Set[A] with Function1[A, Boolean] with mSet[A] {
+
+ import WeakHashSet._
+
+ def this() = this(initialCapacity = WeakHashSet.defaultInitialCapacity, loadFactor = WeakHashSet.defaultLoadFactor)
+
+ type This = WeakHashSet[A]
+
+ /**
+ * queue of Entries that hold elements scheduled for GC
+ * the removeStaleEntries() method works through the queue to remeove
+ * stale entries from the table
+ */
+ private[this] val queue = new ReferenceQueue[A]
+
+ /**
+ * the number of elements in this set
+ */
+ private[this] var count = 0
+
+ /**
+ * from a specified initial capacity compute the capacity we'll use as being the next
+ * power of two equal to or greater than the specified initial capacity
+ */
+ private def computeCapacity = {
+ if (initialCapacity < 0) throw new IllegalArgumentException("initial capacity cannot be less than 0");
+ var candidate = 1
+ while (candidate < initialCapacity) {
+ candidate *= 2
+ }
+ candidate
+ }
+
+ /**
+ * the underlying table of entries which is an array of Entry linked lists
+ */
+ private[this] var table = new Array[Entry[A]](computeCapacity)
+
+ /**
+ * the limit at which we'll increase the size of the hash table
+ */
+ var threshhold = computeThreshHold
+
+ private[this] def computeThreshHold: Int = (table.size * loadFactor).ceil.toInt
- /** Add the given element to this set. */
- def +=(elem: T): this.type = {
- underlying += new WeakReferenceWithEquals(elem)
- this
+ /**
+ * find the bucket associated with an elements's hash code
+ */
+ private[this] def bucketFor(hash: Int): Int = {
+ // spread the bits around to try to avoid accidental collisions using the
+ // same algorithm as java.util.HashMap
+ var h = hash
+ h ^= h >>> 20 ^ h >>> 12
+ h ^= h >>> 7 ^ h >>> 4
+
+ // this is finding h % table.length, but takes advantage of the
+ // fact that table length is a power of 2,
+ // if you don't do bit flipping in your head, if table.length
+ // is binary 100000.. (with n 0s) then table.length - 1
+ // is 1111.. with n 1's.
+ // In other words this masks on the last n bits in the hash
+ h & (table.length - 1)
}
- /** Remove the given element from this set. */
- def -=(elem: T): this.type = {
- underlying -= new WeakReferenceWithEquals(elem)
- this
+ /**
+ * remove a single entry from a linked list in a given bucket
+ */
+ private[this] def remove(bucket: Int, prevEntry: Entry[A], entry: Entry[A]) {
+ prevEntry match {
+ case null => table(bucket) = entry.tail
+ case _ => prevEntry.tail = entry.tail
+ }
+ count -= 1
}
- /** Does the given element belong to this set? */
- def contains(elem: T): Boolean =
- underlying.contains(new WeakReferenceWithEquals(elem))
+ /**
+ * remove entries associated with elements that have been gc'ed
+ */
+ private[this] def removeStaleEntries() {
+ def poll(): Entry[A] = queue.poll().asInstanceOf[Entry[A]]
- /** Does the given element belong to this set? */
- def apply(elem: T): Boolean = contains(elem)
+ @tailrec
+ def queueLoop {
+ val stale = poll()
+ if (stale != null) {
+ val bucket = bucketFor(stale.hash)
- /** Return the number of elements in this set, including reclaimed elements. */
- def size = underlying.size
+ @tailrec
+ def linkedListLoop(prevEntry: Entry[A], entry: Entry[A]): Unit = if (stale eq entry) remove(bucket, prevEntry, entry)
+ else if (entry != null) linkedListLoop(entry, entry.tail)
- /** Remove all elements in this set. */
- def clear() = underlying.clear()
-}
+ linkedListLoop(null, table(bucket))
-/** A WeakReference implementation that implements equals and hashCode by
- * delegating to the referent.
- */
-class WeakReferenceWithEquals[T <: AnyRef](ref: T) {
- def get(): T = underlying.get()
+ queueLoop
+ }
+ }
+
+ queueLoop
+ }
+
+ /**
+ * Double the size of the internal table
+ */
+ private[this] def resize() {
+ val oldTable = table
+ table = new Array[Entry[A]](oldTable.size * 2)
+ threshhold = computeThreshHold
+
+ @tailrec
+ def tableLoop(oldBucket: Int): Unit = if (oldBucket < oldTable.size) {
+ @tailrec
+ def linkedListLoop(entry: Entry[A]): Unit = entry match {
+ case null => ()
+ case _ => {
+ val bucket = bucketFor(entry.hash)
+ val oldNext = entry.tail
+ entry.tail = table(bucket)
+ table(bucket) = entry
+ linkedListLoop(oldNext)
+ }
+ }
+ linkedListLoop(oldTable(oldBucket))
+
+ tableLoop(oldBucket + 1)
+ }
+ tableLoop(0)
+ }
+
+ // from scala.reflect.internal.Set, find an element or null if it isn't contained
+ override def findEntry(elem: A): A = elem match {
+ case null => throw new NullPointerException("WeakHashSet cannot hold nulls")
+ case _ => {
+ removeStaleEntries()
+ val hash = elem.hashCode
+ val bucket = bucketFor(hash)
+
+ @tailrec
+ def linkedListLoop(entry: Entry[A]): A = entry match {
+ case null => null.asInstanceOf[A]
+ case _ => {
+ val entryElem = entry.get
+ if (elem == entryElem) entryElem
+ else linkedListLoop(entry.tail)
+ }
+ }
+
+ linkedListLoop(table(bucket))
+ }
+ }
+ // add an element to this set unless it's already in there and return the element
+ def findEntryOrUpdate(elem: A): A = elem match {
+ case null => throw new NullPointerException("WeakHashSet cannot hold nulls")
+ case _ => {
+ removeStaleEntries()
+ val hash = elem.hashCode
+ val bucket = bucketFor(hash)
+ val oldHead = table(bucket)
+
+ def add() = {
+ table(bucket) = new Entry(elem, hash, oldHead, queue)
+ count += 1
+ if (count > threshhold) resize()
+ elem
+ }
+
+ @tailrec
+ def linkedListLoop(entry: Entry[A]): A = entry match {
+ case null => add()
+ case _ => {
+ val entryElem = entry.get
+ if (elem == entryElem) entryElem
+ else linkedListLoop(entry.tail)
+ }
+ }
+
+ linkedListLoop(oldHead)
+ }
+ }
+
+ // add an element to this set unless it's already in there and return this set
+ override def +(elem: A): this.type = elem match {
+ case null => throw new NullPointerException("WeakHashSet cannot hold nulls")
+ case _ => {
+ removeStaleEntries()
+ val hash = elem.hashCode
+ val bucket = bucketFor(hash)
+ val oldHead = table(bucket)
+
+ def add() {
+ table(bucket) = new Entry(elem, hash, oldHead, queue)
+ count += 1
+ if (count > threshhold) resize()
+ }
+
+ @tailrec
+ def linkedListLoop(entry: Entry[A]): Unit = entry match {
+ case null => add()
+ case _ if (elem == entry.get) => ()
+ case _ => linkedListLoop(entry.tail)
+ }
+
+ linkedListLoop(oldHead)
+ this
+ }
+ }
+
+ def +=(elem: A) = this + elem
+
+ // from scala.reflect.interanl.Set
+ override def addEntry(x: A) { this += x }
+
+ // remove an element from this set and return this set
+ override def -(elem: A): this.type = elem match {
+ case null => this
+ case _ => {
+ removeStaleEntries()
+ val bucket = bucketFor(elem.hashCode)
+
+
+
+ @tailrec
+ def linkedListLoop(prevEntry: Entry[A], entry: Entry[A]): Unit = entry match {
+ case null => ()
+ case _ if (elem == entry.get) => remove(bucket, prevEntry, entry)
+ case _ => linkedListLoop(entry, entry.tail)
+ }
- override val hashCode = ref.hashCode
+ linkedListLoop(null, table(bucket))
+ this
+ }
+ }
+
+ def -=(elem: A) = this - elem
+
+ // empty this set
+ override def clear(): Unit = {
+ table = new Array[Entry[A]](table.size)
+ threshhold = computeThreshHold
+ count = 0
+
+ // drain the queue - doesn't do anything because we're throwing away all the values anyway
+ @tailrec def queueLoop(): Unit = if (queue.poll() != null) queueLoop()
+ queueLoop()
+ }
+
+ // true if this set is empty
+ override def empty: This = new WeakHashSet[A](initialCapacity, loadFactor)
+
+ // the number of elements in this set
+ override def size: Int = {
+ removeStaleEntries()
+ count
+ }
+
+ override def apply(x: A): Boolean = this contains x
+
+ override def foreach[U](f: A => U): Unit = iterator foreach f
+
+ override def toList(): List[A] = iterator.toList
+
+ // Iterator over all the elements in this set in no particular order
+ override def iterator: Iterator[A] = {
+ removeStaleEntries()
+
+ new Iterator[A] {
+
+ /**
+ * the bucket currently being examined. Initially it's set past the last bucket and will be decremented
+ */
+ private[this] var currentBucket: Int = table.size
+
+ /**
+ * the entry that was last examined
+ */
+ private[this] var entry: Entry[A] = null
+
+ /**
+ * the element that will be the result of the next call to next()
+ */
+ private[this] var lookaheadelement: A = null.asInstanceOf[A]
+
+ @tailrec
+ def hasNext: Boolean = {
+ while (entry == null && currentBucket > 0) {
+ currentBucket -= 1
+ entry = table(currentBucket)
+ }
+
+ if (entry == null) false
+ else {
+ lookaheadelement = entry.get
+ if (lookaheadelement == null) {
+ // element null means the weakref has been cleared since we last did a removeStaleEntries(), move to the next entry
+ entry = entry.tail
+ hasNext
+ } else {
+ true
+ }
+ }
+ }
- override def equals(other: Any): Boolean = other match {
- case wf: WeakReferenceWithEquals[_] =>
- underlying.get() == wf.get()
- case _ =>
- false
+ def next(): A = if (lookaheadelement == null)
+ throw new IndexOutOfBoundsException("next on an empty iterator")
+ else {
+ val result = lookaheadelement
+ lookaheadelement = null.asInstanceOf[A]
+ entry = entry.tail
+ result
+ }
+ }
}
- private val underlying = new java.lang.ref.WeakReference(ref)
+ /**
+ * Diagnostic information about the internals of this set. Not normally
+ * needed by ordinary code, but may be useful for diagnosing performance problems
+ */
+ private[util] class Diagnostics {
+ /**
+ * Verify that the internal structure of this hash set is fully consistent.
+ * Throws an assertion error on any problem. In order for it to be reliable
+ * the entries must be stable. If any are garbage collected during validation
+ * then an assertion may inappropriately fire.
+ */
+ def fullyValidate {
+ var computedCount = 0
+ var bucket = 0
+ while (bucket < table.size) {
+ var entry = table(bucket)
+ while (entry != null) {
+ assert(entry.get != null, s"$entry had a null value indicated that gc activity was happening during diagnostic validation or that a null value was inserted")
+ computedCount += 1
+ val cachedHash = entry.hash
+ val realHash = entry.get.hashCode
+ assert(cachedHash == realHash, s"for $entry cached hash was $cachedHash but should have been $realHash")
+ val computedBucket = bucketFor(realHash)
+ assert(computedBucket == bucket, s"for $entry the computed bucket was $computedBucket but should have been $bucket")
+
+ entry = entry.tail
+ }
+
+ bucket += 1
+ }
+
+ assert(computedCount == count, s"The computed count was $computedCount but should have been $count")
+ }
+
+ /**
+ * Produces a diagnostic dump of the table that underlies this hash set.
+ */
+ def dump = table.deep
+
+ /**
+ * Number of buckets that hold collisions. Useful for diagnosing performance issues.
+ */
+ def collisionBucketsCount: Int =
+ (table filter (entry => entry != null && entry.tail != null)).size
+
+ /**
+ * Number of buckets that are occupied in this hash table.
+ */
+ def fullBucketsCount: Int =
+ (table filter (entry => entry != null)).size
+
+ /**
+ * Number of buckets in the table
+ */
+ def bucketsCount: Int = table.size
+
+ /**
+ * Number of buckets that don't hold anything
+ */
+ def emptyBucketsCount = bucketsCount - fullBucketsCount
+
+ /**
+ * Number of elements that are in collision. Useful for diagnosing performance issues.
+ */
+ def collisionsCount = size - (fullBucketsCount - collisionBucketsCount)
+
+ /**
+ * A map from a count of elements to the number of buckets with that count
+ */
+ def elementCountDistribution = table map linkedListSize groupBy identity map {case (size, list) => (size, list.size)}
+
+ private def linkedListSize(entry: Entry[A]) = {
+ var e = entry
+ var count = 0
+ while (e != null) {
+ count += 1
+ e = e.tail
+ }
+ count
+ }
+ }
+
+ private[util] def diagnostics = new Diagnostics
}
+
+/**
+ * Companion object for WeakHashSet
+ */
+object WeakHashSet {
+ /**
+ * A single entry in a WeakHashSet. It's a WeakReference plus a cached hash code and
+ * a link to the next Entry in the same bucket
+ */
+ private class Entry[A](element: A, val hash:Int, var tail: Entry[A], queue: ReferenceQueue[A]) extends WeakReference[A](element, queue)
+
+ val defaultInitialCapacity = 16
+ val defaultLoadFactor = .75
+
+ def apply[A <: AnyRef](initialCapacity: Int = WeakHashSet.defaultInitialCapacity, loadFactor: Double = WeakHashSet.defaultLoadFactor) = new WeakHashSet[A](initialCapacity, defaultLoadFactor)
+} \ No newline at end of file