summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/actors/scala/actors/scheduler/ThreadPoolConfig.scala5
-rw-r--r--src/compiler/scala/reflect/reify/package.scala5
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala9
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala6
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala6
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala5
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala17
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala21
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala6
-rw-r--r--src/library/scala/concurrent/impl/Promise.scala85
-rw-r--r--src/library/scala/util/Properties.scala12
-rw-r--r--src/reflect/scala/reflect/internal/Definitions.scala12
-rw-r--r--src/reflect/scala/reflect/internal/SymbolTable.scala13
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala38
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeComparers.scala93
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeMaps.scala3
-rw-r--r--src/reflect/scala/reflect/internal/util/TriState.scala26
-rw-r--r--src/reflect/scala/reflect/internal/util/WeakHashSet.scala447
-rw-r--r--test/files/neg/t6138.check7
-rw-r--r--test/files/neg/t6138.scala5
-rw-r--r--test/files/neg/t7473.check7
-rw-r--r--test/files/neg/t7473.scala7
-rw-r--r--test/files/neg/t7507.check4
-rw-r--r--test/files/neg/t7507.scala7
-rw-r--r--test/files/neg/t7509.check12
-rw-r--r--test/files/neg/t7509.scala4
-rw-r--r--test/files/pos/lub-dealias-widen.scala34
-rw-r--r--test/files/pos/t7516/A_1.scala9
-rw-r--r--test/files/pos/t7516/B_2.scala4
-rw-r--r--test/files/pos/t7517.scala22
-rw-r--r--test/files/pos/t7520.scala10
-rw-r--r--test/files/pos/t7532/A_1.java6
-rw-r--r--test/files/pos/t7532/B_2.scala5
-rw-r--r--test/files/pos/t7532b/A_1.scala7
-rw-r--r--test/files/pos/t7532b/B_2.scala8
-rw-r--r--test/files/run/WeakHashSetTest.scala174
-rw-r--r--test/files/run/t7375a.check4
-rw-r--r--test/files/run/t7375a.scala16
-rw-r--r--test/files/run/t7375b.check4
-rw-r--r--test/files/run/t7375b/Macros_1.scala18
-rw-r--r--test/files/run/t7375b/Test_2.scala3
-rw-r--r--test/files/run/t7507.scala31
-rw-r--r--test/pending/neg/t7441.check (renamed from test/files/neg/t7441.check)0
-rw-r--r--test/pending/neg/t7441.scala (renamed from test/files/neg/t7441.scala)0
-rw-r--r--test/pending/pos/t7234.scala (renamed from test/files/pos/t7234.scala)0
-rw-r--r--test/pending/pos/t7234b.scala (renamed from test/files/pos/t7234b.scala)0
-rw-r--r--test/pending/pos/t7486.scala (renamed from test/files/pos/t7486.scala)0
47 files changed, 1031 insertions, 186 deletions
diff --git a/src/actors/scala/actors/scheduler/ThreadPoolConfig.scala b/src/actors/scala/actors/scheduler/ThreadPoolConfig.scala
index ee8776f397..bfd4e7ac40 100644
--- a/src/actors/scala/actors/scheduler/ThreadPoolConfig.scala
+++ b/src/actors/scala/actors/scheduler/ThreadPoolConfig.scala
@@ -42,10 +42,7 @@ private[actors] object ThreadPoolConfig {
(propIsSetTo("actors.enableForkJoin", "true") || {
Debug.info(this+": java.version = "+javaVersion)
Debug.info(this+": java.vm.vendor = "+javaVmVendor)
-
- // on IBM J9 1.6 do not use ForkJoinPool
- // XXX this all needs to go into Properties.
- isJavaAtLeast("1.6") && ((javaVmVendor contains "Oracle") || (javaVmVendor contains "Sun") || (javaVmVendor contains "Apple"))
+ isJavaAtLeast("1.6")
})
catch {
case _: SecurityException => false
diff --git a/src/compiler/scala/reflect/reify/package.scala b/src/compiler/scala/reflect/reify/package.scala
index ae0288186b..d3cae3d123 100644
--- a/src/compiler/scala/reflect/reify/package.scala
+++ b/src/compiler/scala/reflect/reify/package.scala
@@ -46,11 +46,14 @@ package object reify {
def reifyType(global: Global)(typer: global.analyzer.Typer,universe: global.Tree, mirror: global.Tree, tpe: global.Type, concrete: Boolean = false): global.Tree =
mkReifier(global)(typer, universe, mirror, tpe, concrete = concrete).reification.asInstanceOf[global.Tree]
- def reifyRuntimeClass(global: Global)(typer0: global.analyzer.Typer, tpe: global.Type, concrete: Boolean = true): global.Tree = {
+ def reifyRuntimeClass(global: Global)(typer0: global.analyzer.Typer, tpe0: global.Type, concrete: Boolean = true): global.Tree = {
import global._
import definitions._
import analyzer.enclosingMacroPosition
+ // SI-7375
+ val tpe = tpe0.dealiasWiden
+
if (tpe.isSpliceable) {
val classTagInScope = typer0.resolveClassTag(enclosingMacroPosition, tpe, allowMaterialization = false)
if (!classTagInScope.isEmpty) return Select(classTagInScope, nme.runtimeClass)
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index 89f933f577..fc532f5d44 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -1423,8 +1423,9 @@ self =>
* }}}
*/
def postfixExpr(): Tree = {
- val base = opstack
- var top = prefixExpr()
+ val start = in.offset
+ val base = opstack
+ var top = prefixExpr()
while (isIdent) {
top = reduceStack(isExpr = true, base, top, precedence(in.name), leftAssoc = treeInfo.isLeftAssoc(in.name))
@@ -1442,9 +1443,7 @@ self =>
val topinfo = opstack.head
opstack = opstack.tail
val od = stripParens(reduceStack(isExpr = true, base, topinfo.operand, 0, leftAssoc = true))
- return atPos(od.pos.startOrPoint, topinfo.offset) {
- new PostfixSelect(od, topinfo.operator.encode)
- }
+ return makePostfixSelect(start, topinfo.offset, od, topinfo.operator)
}
}
reduceStack(isExpr = true, base, top, 0, leftAssoc = true)
diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
index f737fcc635..ec4e932aae 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
@@ -242,6 +242,12 @@ abstract class TreeBuilder {
Assign(lhs, rhs)
}
+ /** Tree for `od op`, start is start0 if od.pos is borked. */
+ def makePostfixSelect(start0: Int, end: Int, od: Tree, op: Name): Tree = {
+ val start = if (od.pos.isDefined) od.pos.startOrPoint else start0
+ atPos(r2p(start, end, end)) { new PostfixSelect(od, op.encode) }
+ }
+
/** A type tree corresponding to (possibly unary) intersection type */
def makeIntersectionTypeTree(tps: List[Tree]): Tree =
if (tps.tail.isEmpty) tps.head
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index 6948d02fd5..fb927d15d3 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -491,8 +491,10 @@ abstract class ClassfileParser {
}
}
- val c = if (currentIsTopLevel) pool.getClassSymbol(nameIdx) else clazz
- if (currentIsTopLevel) {
+ val isTopLevel = !(currentClass containsChar '$') // Java class name; *don't* try to to use Scala name decoding (SI-7532)
+
+ val c = if (isTopLevel) pool.getClassSymbol(nameIdx) else clazz
+ if (isTopLevel) {
if (c != clazz) {
if ((clazz eq NoSymbol) && (c ne NoSymbol)) clazz = c
else mismatchError(c)
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index c9a7cc2e3f..0f65b11e9b 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -342,11 +342,6 @@ abstract class Erasure extends AddInterfaces
}
}
- // Each primitive value class has its own getClass for ultra-precise class object typing.
- private lazy val primitiveGetClassMethods = Set[Symbol](Any_getClass, AnyVal_getClass) ++ (
- ScalaValueClasses map (_.tpe member nme.getClass_)
- )
-
// ## requires a little translation
private lazy val poundPoundMethods = Set[Symbol](Any_##, Object_##)
// Methods on Any/Object which we rewrite here while we still know what
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index 5622f67459..b53efafdd4 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -931,22 +931,11 @@ trait Implicits {
if (DivergentImplicitRecovery.sym != null) {
DivergingImplicitExpansionError(tree, pt, DivergentImplicitRecovery.sym)(context)
}
- else if (invalidImplicits.nonEmpty) {
- val sym = invalidImplicits.head
- // We don't even dare look if errors are being buffered
- // !sym.hasFlag(LOCKED) is a hail mary between SI-2206 and SI-7486
- def isSensibleAddendum = !sym.hasFlag(LOCKED) && (pt match {
- case Function1(_, out) => out <:< sym.tpe.finalResultType
- case _ => pt <:< sym.tpe.finalResultType
- })
- // Don't pitch in with this theory unless it looks plausible that the
- // implicit would have helped
+
+ if (invalidImplicits.nonEmpty)
setAddendum(pos, () =>
- if (isSensibleAddendum)
- s"\n Note: implicit $sym is not applicable here because it comes after the application point and it lacks an explicit result type"
- else ""
+ s"\n Note: implicit ${invalidImplicits.head} is not applicable here because it comes after the application point and it lacks an explicit result type"
)
- }
}
best
diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
index 03ad127498..8e9933f734 100644
--- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
@@ -282,11 +282,13 @@ trait NamesDefaults { self: Analyzer =>
case WildcardStarArg(expr) => expr.tpe
case _ => seqType(arg.tpe)
}
- else
- // Note stabilizing can lead to a non-conformant argument when existentials are involved, e.g. neg/t3507-old.scala, hence the filter.
- gen.stableTypeFor(arg).filter(_ <:< paramTpe).getOrElse(arg.tpe)
- // We have to deconst or types inferred from literal arguments will be Constant(_), e.g. pos/z1730.scala.
- ).deconst
+ else {
+ // TODO In 83c9c764b, we tried to a stable type here to fix SI-7234. But the resulting TypeTree over a
+ // singleton type without an original TypeTree fails to retypecheck after a resetLocalAttrs (SI-7516),
+ // which is important for (at least) macros.
+ arg.tpe
+ }
+ ).widen // have to widen or types inferred from literal defaults will be singletons
val s = context.owner.newValue(unit.freshTermName("x$"), arg.pos, newFlags = ARTIFACT) setInfo (
if (byName) functionType(Nil, argTpe) else argTpe
)
@@ -328,9 +330,12 @@ trait NamesDefaults { self: Analyzer =>
// type the application without names; put the arguments in definition-site order
val typedApp = doTypedApply(tree, funOnly, reorderArgs(namelessArgs, argPos), mode, pt)
typedApp match {
- // Extract the typed arguments, restore the call-site evaluation order (using
- // ValDef's in the block), change the arguments to these local values.
- case Apply(expr, typedArgs) if !(typedApp :: typedArgs).exists(_.isErrorTyped) => // bail out with erroneous args, see SI-7238
+ case Apply(expr, typedArgs) if (typedApp :: typedArgs).exists(_.isErrorTyped) =>
+ setError(tree) // bail out with and erroneous Apply *or* erroneous arguments, see SI-7238, SI-7509
+ case Apply(expr, typedArgs) =>
+ // Extract the typed arguments, restore the call-site evaluation order (using
+ // ValDef's in the block), change the arguments to these local values.
+
// typedArgs: definition-site order
val formals = formalTypes(expr.tpe.paramTypes, typedArgs.length, removeByName = false, removeRepeated = false)
// valDefs: call-site order
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index c9aa27cf49..ec6b78f2de 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -602,10 +602,8 @@ trait Typers extends Adaptations with Tags {
// type of the qualifier is inaccessible, we can cause private types to
// escape scope here, e.g. pos/t1107. I'm not sure how to properly handle
// this so for now it requires the type symbol be public.
- def isGetClassCall = (
- (sym == Any_getClass || sym == Object_getClass)
- && pre.typeSymbol.isPublic
- )
+ def isGetClassCall = isGetClass(sym) && pre.typeSymbol.isPublic
+
def narrowIf(tree: Tree, condition: Boolean) =
if (condition) tree setType singleType(pre, sym) else tree
diff --git a/src/library/scala/concurrent/impl/Promise.scala b/src/library/scala/concurrent/impl/Promise.scala
index 7af70400ef..ffaea8de96 100644
--- a/src/library/scala/concurrent/impl/Promise.scala
+++ b/src/library/scala/concurrent/impl/Promise.scala
@@ -8,11 +8,14 @@
package scala.concurrent.impl
-import scala.concurrent.{ ExecutionContext, CanAwait, OnCompleteRunnable, TimeoutException, ExecutionException }
+import scala.concurrent.{ ExecutionContext, CanAwait, OnCompleteRunnable, TimeoutException, ExecutionException, blocking }
+import scala.concurrent.Future.InternalCallbackExecutor
import scala.concurrent.duration.{ Duration, Deadline, FiniteDuration, NANOSECONDS }
import scala.annotation.tailrec
import scala.util.control.NonFatal
import scala.util.{ Try, Success, Failure }
+import java.io.ObjectInputStream
+import java.util.concurrent.locks.AbstractQueuedSynchronizer
private[concurrent] trait Promise[T] extends scala.concurrent.Promise[T] with scala.concurrent.Future[T] {
def future: this.type = this
@@ -53,69 +56,68 @@ private[concurrent] object Promise {
case t => Failure(t)
}
+ /*
+ * Inspired by: http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/src/main/java/util/concurrent/locks/AbstractQueuedSynchronizer.java
+ * Written by Doug Lea with assistance from members of JCP JSR-166
+ * Expert Group and released to the public domain, as explained at
+ * http://creativecommons.org/publicdomain/zero/1.0/
+ */
+ private final class CompletionLatch[T] extends AbstractQueuedSynchronizer with (Try[T] => Unit) {
+ override protected def tryAcquireShared(ignored: Int): Int = if (getState != 0) 1 else -1
+ override protected def tryReleaseShared(ignore: Int): Boolean = {
+ setState(1)
+ true
+ }
+ override def apply(ignored: Try[T]): Unit = releaseShared(1)
+ }
+
+
/** Default promise implementation.
*/
class DefaultPromise[T] extends AbstractPromise with Promise[T] { self =>
updateState(null, Nil) // Start at "No callbacks"
- protected final def tryAwait(atMost: Duration): Boolean = {
- @tailrec
- def awaitUnsafe(deadline: Deadline, nextWait: FiniteDuration): Boolean = {
- if (!isCompleted && nextWait > Duration.Zero) {
- val ms = nextWait.toMillis
- val ns = (nextWait.toNanos % 1000000l).toInt // as per object.wait spec
-
- synchronized { if (!isCompleted) wait(ms, ns) }
-
- awaitUnsafe(deadline, deadline.timeLeft)
- } else
- isCompleted
- }
- @tailrec
- def awaitUnbounded(): Boolean = {
- if (isCompleted) true
- else {
- synchronized { if (!isCompleted) wait() }
- awaitUnbounded()
- }
- }
-
+ protected final def tryAwait(atMost: Duration): Boolean = if (!isCompleted) {
import Duration.Undefined
+ import scala.concurrent.Future.InternalCallbackExecutor
atMost match {
- case u if u eq Undefined => throw new IllegalArgumentException("cannot wait for Undefined period")
- case Duration.Inf => awaitUnbounded()
- case Duration.MinusInf => isCompleted
- case f: FiniteDuration => if (f > Duration.Zero) awaitUnsafe(f.fromNow, f) else isCompleted
+ case e if e eq Undefined => throw new IllegalArgumentException("cannot wait for Undefined period")
+ case Duration.Inf =>
+ val l = new CompletionLatch[T]()
+ onComplete(l)(InternalCallbackExecutor)
+ l.acquireSharedInterruptibly(1)
+ case Duration.MinusInf => // Drop out
+ case f: FiniteDuration =>
+ if (f > Duration.Zero) {
+ val l = new CompletionLatch[T]()
+ onComplete(l)(InternalCallbackExecutor)
+ l.tryAcquireSharedNanos(1, f.toNanos)
+ }
}
- }
+
+ isCompleted
+ } else true // Already completed
@throws(classOf[TimeoutException])
@throws(classOf[InterruptedException])
def ready(atMost: Duration)(implicit permit: CanAwait): this.type =
- if (isCompleted || tryAwait(atMost)) this
+ if (tryAwait(atMost)) this
else throw new TimeoutException("Futures timed out after [" + atMost + "]")
@throws(classOf[Exception])
def result(atMost: Duration)(implicit permit: CanAwait): T =
- ready(atMost).value.get match {
- case Failure(e) => throw e
- case Success(r) => r
- }
+ ready(atMost).value.get.get // ready throws TimeoutException if timeout so value.get is safe here
def value: Option[Try[T]] = getState match {
case c: Try[_] => Some(c.asInstanceOf[Try[T]])
case _ => None
}
- override def isCompleted: Boolean = getState match { // Cheaper than boxing result into Option due to "def value"
- case _: Try[_] => true
- case _ => false
- }
+ override def isCompleted: Boolean = getState.isInstanceOf[Try[_]]
def tryComplete(value: Try[T]): Boolean = {
val resolved = resolveTry(value)
- (try {
- @tailrec
+ @tailrec
def tryComplete(v: Try[T]): List[CallbackRunnable[T]] = {
getState match {
case raw: List[_] =>
@@ -124,10 +126,7 @@ private[concurrent] object Promise {
case _ => null
}
}
- tryComplete(resolved)
- } finally {
- synchronized { notifyAll() } //Notify any evil blockers
- }) match {
+ tryComplete(resolved) match {
case null => false
case rs if rs.isEmpty => true
case rs => rs.foreach(r => r.executeWithValue(resolved)); true
diff --git a/src/library/scala/util/Properties.scala b/src/library/scala/util/Properties.scala
index 73469a955d..2240dde360 100644
--- a/src/library/scala/util/Properties.scala
+++ b/src/library/scala/util/Properties.scala
@@ -122,8 +122,7 @@ private[scala] trait PropertiesTrait {
*/
def lineSeparator = propOrElse("line.separator", "\n")
- /** Various well-known properties.
- */
+ /* Various well-known properties. */
def javaClassPath = propOrEmpty("java.class.path")
def javaHome = propOrEmpty("java.home")
def javaVendor = propOrEmpty("java.vendor")
@@ -139,10 +138,13 @@ private[scala] trait PropertiesTrait {
def userHome = propOrEmpty("user.home")
def userName = propOrEmpty("user.name")
- /** Some derived values.
- */
+ /* Some derived values. */
+ /** Returns `true` iff the underlying operating system is a version of Microsoft Windows. */
def isWin = osName startsWith "Windows"
- def isMac = javaVendor startsWith "Apple"
+ // See http://mail.openjdk.java.net/pipermail/macosx-port-dev/2012-November/005148.html for
+ // the reason why we don't follow developer.apple.com/library/mac/#technotes/tn2002/tn2110.
+ /** Returns `true` iff the underlying operating system is a version of Apple Mac OSX. */
+ def isMac = osName startsWith "Mac OS X"
// This is looking for javac, tools.jar, etc.
// Tries JDK_HOME first, then the more common but likely jre JAVA_HOME,
diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala
index 5892053b5e..4f2b7e2642 100644
--- a/src/reflect/scala/reflect/internal/Definitions.scala
+++ b/src/reflect/scala/reflect/internal/Definitions.scala
@@ -111,8 +111,10 @@ trait Definitions extends api.StandardDefinitions {
/** Is symbol a numeric value class? */
def isNumericValueClass(sym: Symbol) = ScalaNumericValueClasses contains sym
- def isGetClass(sym: Symbol) =
- (sym.name == nme.getClass_) && flattensToEmpty(sym.paramss)
+ def isGetClass(sym: Symbol) = (
+ sym.name == nme.getClass_ // this condition is for performance only, this is called from `Typer#stabliize`.
+ && getClassMethods(sym)
+ )
lazy val UnitClass = valueClassSymbol(tpnme.Unit)
lazy val ByteClass = valueClassSymbol(tpnme.Byte)
@@ -747,6 +749,12 @@ trait Definitions extends api.StandardDefinitions {
lazy val Any_isInstanceOf = newT1NullaryMethod(AnyClass, nme.isInstanceOf_, FINAL)(_ => BooleanTpe)
lazy val Any_asInstanceOf = newT1NullaryMethod(AnyClass, nme.asInstanceOf_, FINAL)(_.typeConstructor)
+ lazy val primitiveGetClassMethods = Set[Symbol](Any_getClass, AnyVal_getClass) ++ (
+ ScalaValueClasses map (_.tpe member nme.getClass_)
+ )
+
+ lazy val getClassMethods: Set[Symbol] = primitiveGetClassMethods + Object_getClass
+
// A type function from T => Class[U], used to determine the return
// type of getClass calls. The returned type is:
//
diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala
index 7f9811220f..2ae9f81a09 100644
--- a/src/reflect/scala/reflect/internal/SymbolTable.scala
+++ b/src/reflect/scala/reflect/internal/SymbolTable.scala
@@ -307,27 +307,20 @@ abstract class SymbolTable extends macros.Universe
}
object perRunCaches {
- import java.lang.ref.WeakReference
import scala.collection.generic.Clearable
// Weak references so the garbage collector will take care of
// letting us know when a cache is really out of commission.
- private val caches = mutable.HashSet[WeakReference[Clearable]]()
+ private val caches = WeakHashSet[Clearable]()
def recordCache[T <: Clearable](cache: T): T = {
- caches += new WeakReference(cache)
+ caches += cache
cache
}
def clearAll() = {
debuglog("Clearing " + caches.size + " caches.")
- caches foreach { ref =>
- val cache = ref.get()
- if (cache == null)
- caches -= ref
- else
- cache.clear()
- }
+ caches foreach (_.clear)
}
def newWeakMap[K, V]() = recordCache(mutable.WeakHashMap[K, V]())
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index f27e6c979a..d942c71619 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -107,6 +107,18 @@ trait Types
protected val enableTypeVarExperimentals = settings.Xexperimental.value
+ /** Caching the most recent map has a 75-90% hit rate. */
+ private object substTypeMapCache {
+ private[this] var cached: SubstTypeMap = new SubstTypeMap(Nil, Nil)
+
+ def apply(from: List[Symbol], to: List[Type]): SubstTypeMap = {
+ if ((cached.from ne from) || (cached.to ne to))
+ cached = new SubstTypeMap(from, to)
+
+ cached
+ }
+ }
+
/** The current skolemization level, needed for the algorithms
* in isSameType, isSubType that do constraint solving under a prefix.
*/
@@ -698,8 +710,7 @@ trait Types
* symbols `from` in this type.
*/
def subst(from: List[Symbol], to: List[Type]): Type =
- if (from.isEmpty) this
- else new SubstTypeMap(from, to) apply this
+ if (from.isEmpty) this else substTypeMapCache(from, to)(this)
/** Substitute symbols `to` for occurrences of symbols `from` in this type.
*
@@ -1066,6 +1077,14 @@ trait Types
continue = false
val bcs0 = baseClasses
var bcs = bcs0
+ // omit PRIVATE LOCALS unless selector class is contained in class owning the def.
+ def admitPrivateLocal(owner: Symbol): Boolean = {
+ val selectorClass = this match {
+ case tt: ThisType => tt.sym // SI-7507 the first base class is not necessarily the selector class.
+ case _ => bcs0.head
+ }
+ selectorClass.hasTransOwner(owner)
+ }
while (!bcs.isEmpty) {
val decls = bcs.head.info.decls
var entry = decls.lookupEntry(name)
@@ -1075,10 +1094,10 @@ trait Types
if ((flags & required) == required) {
val excl = flags & excluded
if (excl == 0L &&
- (// omit PRIVATE LOCALS unless selector class is contained in class owning the def.
+ (
(bcs eq bcs0) ||
(flags & PrivateLocal) != PrivateLocal ||
- (bcs0.head.hasTransOwner(bcs.head)))) {
+ admitPrivateLocal(bcs.head))) {
if (name.isTypeName || (stableOnly && sym.isStable && !sym.hasVolatileType)) {
if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
return sym
@@ -3119,10 +3138,9 @@ trait Types
sameLength(typeArgs, tp.typeArgs) && {
val lhs = if (isLowerBound) tp.typeArgs else typeArgs
val rhs = if (isLowerBound) typeArgs else tp.typeArgs
- // this is a higher-kinded type var with same arity as tp.
- // side effect: adds the type constructor itself as a bound
- addBound(tp.typeConstructor)
- isSubArgs(lhs, rhs, params, AnyDepth)
+ // This is a higher-kinded type var with same arity as tp.
+ // If so (see SI-7517), side effect: adds the type constructor itself as a bound.
+ isSubArgs(lhs, rhs, params, AnyDepth) && { addBound(tp.typeConstructor); true }
}
}
// The type with which we can successfully unify can be hidden
@@ -3669,13 +3687,13 @@ trait Types
// Hash consing --------------------------------------------------------------
private val initialUniquesCapacity = 4096
- private var uniques: util.HashSet[Type] = _
+ private var uniques: util.WeakHashSet[Type] = _
private var uniqueRunId = NoRunId
protected def unique[T <: Type](tp: T): T = {
if (Statistics.canEnable) Statistics.incCounter(rawTypeCount)
if (uniqueRunId != currentRunId) {
- uniques = util.HashSet[Type]("uniques", initialUniquesCapacity)
+ uniques = util.WeakHashSet[Type](initialUniquesCapacity)
perRunCaches.recordCache(uniques)
uniqueRunId = currentRunId
}
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala
index 711a94d7bd..63f17dff34 100644
--- a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala
+++ b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala
@@ -4,7 +4,7 @@ package internal
package tpe
import scala.collection.{ mutable }
-import util.Statistics
+import util.{ Statistics, TriState }
import scala.annotation.tailrec
trait TypeComparers {
@@ -118,30 +118,20 @@ trait TypeComparers {
// if (subsametypeRecursions == 0) undoLog.clear()
}
- private def isSameType1(tp1: Type, tp2: Type): Boolean = {
- if ((tp1 eq tp2) ||
- (tp1 eq ErrorType) || (tp1 eq WildcardType) ||
- (tp2 eq ErrorType) || (tp2 eq WildcardType))
- true
- else if ((tp1 eq NoType) || (tp2 eq NoType))
- false
- else if (tp1 eq NoPrefix) // !! I do not see how this would be warranted by the spec
- tp2.typeSymbol.isPackageClass
- else if (tp2 eq NoPrefix) // !! I do not see how this would be warranted by the spec
- tp1.typeSymbol.isPackageClass
- else if (tp1.isInstanceOf[AnnotatedType] || tp2.isInstanceOf[AnnotatedType])
- annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && (tp1.withoutAnnotations =:= tp2.withoutAnnotations)
- else {
- // We flush out any AnnotatedTypes before calling isSameType2 because
- // unlike most other subclasses of Type, we have to allow for equivalence of any
- // combination of { tp1, tp2 } { is, is not } an AnnotatedType - this because the
- // logic of "annotationsConform" is arbitrary and unknown.
- isSameType2(tp1, tp2) || {
- val tp1n = normalizePlus(tp1)
- val tp2n = normalizePlus(tp2)
- ((tp1n ne tp1) || (tp2n ne tp2)) && isSameType(tp1n, tp2n)
- }
- }
+ // @pre: at least one argument has annotations
+ private def sameAnnotatedTypes(tp1: Type, tp2: Type) = (
+ annotationsConform(tp1, tp2)
+ && annotationsConform(tp2, tp1)
+ && (tp1.withoutAnnotations =:= tp2.withoutAnnotations)
+ )
+ // We flush out any AnnotatedTypes before calling isSameType2 because
+ // unlike most other subclasses of Type, we have to allow for equivalence of any
+ // combination of { tp1, tp2 } { is, is not } an AnnotatedType - this because the
+ // logic of "annotationsConform" is arbitrary and unknown.
+ private def isSameType1(tp1: Type, tp2: Type): Boolean = typeRelationPreCheck(tp1, tp2) match {
+ case state if state.isKnown => state.booleanValue
+ case _ if typeHasAnnotations(tp1) || typeHasAnnotations(tp2) => sameAnnotatedTypes(tp1, tp2)
+ case _ => isSameType2(tp1, tp2)
}
private def isSameHKTypes(tp1: Type, tp2: Type) = (
@@ -186,6 +176,8 @@ trait TypeComparers {
}
def isSameType2(tp1: Type, tp2: Type): Boolean = {
+ def retry(lhs: Type, rhs: Type) = ((lhs ne tp1) || (rhs ne tp2)) && isSameType(lhs, rhs)
+
/* Here we highlight those unfortunate type-like constructs which
* are hidden bundles of mutable state, cruising the type system picking
* up any type constraints naive enough to get into their hot rods.
@@ -236,6 +228,7 @@ trait TypeComparers {
|| sameSingletonType
|| mutateNonTypeConstructs(tp1, tp2)
|| mutateNonTypeConstructs(tp2, tp1)
+ || retry(normalizePlus(tp1), normalizePlus(tp2))
)
}
@@ -276,12 +269,12 @@ trait TypeComparers {
else
try {
pendingSubTypes += p
- isSubType2(tp1, tp2, depth)
+ isSubType1(tp1, tp2, depth)
} finally {
pendingSubTypes -= p
}
} else {
- isSubType2(tp1, tp2, depth)
+ isSubType1(tp1, tp2, depth)
}
} finally if (!result) undoLog.undoTo(before)
@@ -294,6 +287,39 @@ trait TypeComparers {
// if (subsametypeRecursions == 0) undoLog.clear()
}
+ /** Check whether the subtype or type equivalence relationship
+ * between the argument is predetermined. Returns a tri-state
+ * value: True means the arguments are always sub/same types,
+ * False means they never are, and Unknown means the caller
+ * will have to figure things out.
+ */
+ private def typeRelationPreCheck(tp1: Type, tp2: Type): TriState = {
+ def isTrue = (
+ (tp1 eq tp2)
+ || isErrorOrWildcard(tp1)
+ || isErrorOrWildcard(tp2)
+ || (tp1 eq NoPrefix) && tp2.typeSymbol.isPackageClass // !! I do not see how this would be warranted by the spec
+ || (tp2 eq NoPrefix) && tp1.typeSymbol.isPackageClass // !! I do not see how this would be warranted by the spec
+ )
+ // isFalse, assuming !isTrue
+ def isFalse = (
+ (tp1 eq NoType)
+ || (tp2 eq NoType)
+ || (tp1 eq NoPrefix)
+ || (tp2 eq NoPrefix)
+ )
+
+ if (isTrue) TriState.True
+ else if (isFalse) TriState.False
+ else TriState.Unknown
+ }
+
+ private def isSubType1(tp1: Type, tp2: Type, depth: Int): Boolean = typeRelationPreCheck(tp1, tp2) match {
+ case state if state.isKnown => state.booleanValue
+ case _ if typeHasAnnotations(tp1) || typeHasAnnotations(tp2) => annotationsConform(tp1, tp2) && (tp1.withoutAnnotations <:< tp2.withoutAnnotations)
+ case _ => isSubType2(tp1, tp2, depth)
+ }
+
private def isPolySubType(tp1: PolyType, tp2: PolyType): Boolean = {
val PolyType(tparams1, res1) = tp1
val PolyType(tparams2, res2) = tp2
@@ -332,12 +358,13 @@ trait TypeComparers {
/** Does type `tp1` conform to `tp2`? */
private def isSubType2(tp1: Type, tp2: Type, depth: Int): Boolean = {
- if ((tp1 eq tp2) || isErrorOrWildcard(tp1) || isErrorOrWildcard(tp2)) return true
- if ((tp1 eq NoType) || (tp2 eq NoType)) return false
- if (tp1 eq NoPrefix) return (tp2 eq NoPrefix) || tp2.typeSymbol.isPackageClass // !! I do not see how the "isPackageClass" would be warranted by the spec
- if (tp2 eq NoPrefix) return tp1.typeSymbol.isPackageClass
- if (isSingleType(tp1) && isSingleType(tp2) || isConstantType(tp1) && isConstantType(tp2)) return tp1 =:= tp2
- if (tp1.isHigherKinded || tp2.isHigherKinded) return isHKSubType(tp1, tp2, depth)
+ def retry(lhs: Type, rhs: Type) = ((lhs ne tp1) || (rhs ne tp2)) && isSubType(lhs, rhs, depth)
+
+ if (isSingleType(tp1) && isSingleType(tp2) || isConstantType(tp1) && isConstantType(tp2))
+ return (tp1 =:= tp2) || retry(tp1.underlying, tp2)
+
+ if (tp1.isHigherKinded || tp2.isHigherKinded)
+ return isHKSubType(tp1, tp2, depth)
/* First try, on the right:
* - unwrap Annotated types, BoundedWildcardTypes,
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
index c35825dcee..bebc419c7c 100644
--- a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
+++ b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
@@ -794,8 +794,7 @@ private[internal] trait TypeMaps {
}
/** A map to implement the `subst` method. */
- class SubstTypeMap(from: List[Symbol], to: List[Type])
- extends SubstMap(from, to) {
+ class SubstTypeMap(val from: List[Symbol], val to: List[Type]) extends SubstMap(from, to) {
protected def toType(fromtp: Type, tp: Type) = tp
override def mapOver(tree: Tree, giveup: () => Nothing): Tree = {
diff --git a/src/reflect/scala/reflect/internal/util/TriState.scala b/src/reflect/scala/reflect/internal/util/TriState.scala
new file mode 100644
index 0000000000..c7a35d4637
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/util/TriState.scala
@@ -0,0 +1,26 @@
+package scala
+package reflect
+package internal
+package util
+
+import TriState._
+
+/** A simple true/false/unknown value, for those days when
+ * true and false don't quite partition the space.
+ */
+final class TriState private (val value: Int) extends AnyVal {
+ def isKnown = this != Unknown
+ def booleanValue = this match {
+ case True => true
+ case False => false
+ case _ => sys.error("Not a Boolean value")
+ }
+}
+
+object TriState {
+ implicit def booleanToTriState(b: Boolean): TriState = if (b) True else False
+
+ val Unknown = new TriState(-1)
+ val False = new TriState(0)
+ val True = new TriState(1)
+}
diff --git a/src/reflect/scala/reflect/internal/util/WeakHashSet.scala b/src/reflect/scala/reflect/internal/util/WeakHashSet.scala
index ef62fa481b..9b792a3f43 100644
--- a/src/reflect/scala/reflect/internal/util/WeakHashSet.scala
+++ b/src/reflect/scala/reflect/internal/util/WeakHashSet.scala
@@ -1,59 +1,430 @@
package scala
package reflect.internal.util
-import scala.collection.mutable
+import java.lang.ref.{WeakReference, ReferenceQueue}
+import scala.annotation.tailrec
import scala.collection.generic.Clearable
-import scala.runtime.AbstractFunction1
+import scala.collection.mutable.{Set => mSet}
-/** A bare-bones implementation of a mutable `Set` that uses weak references
- * to hold the elements.
+/**
+ * A HashSet where the elements are stored weakly. Elements in this set are elligible for GC if no other
+ * hard references are associated with them. Its primary use case is as a canonical reference
+ * identity holder (aka "hash-consing") via findEntryOrUpdate
*
- * This implementation offers only add/remove/test operations,
- * therefore it does not fulfill the contract of Scala collection sets.
+ * This Set implementation cannot hold null. Any attempt to put a null in it will result in a NullPointerException
+ *
+ * This set implmeentation is not in general thread safe without external concurrency control. However it behaves
+ * properly when GC concurrently collects elements in this set.
*/
-class WeakHashSet[T <: AnyRef] extends AbstractFunction1[T, Boolean] with Clearable {
- private val underlying = mutable.HashSet[WeakReferenceWithEquals[T]]()
+final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: Double) extends Set[A] with Function1[A, Boolean] with mSet[A] {
+
+ import WeakHashSet._
+
+ def this() = this(initialCapacity = WeakHashSet.defaultInitialCapacity, loadFactor = WeakHashSet.defaultLoadFactor)
+
+ type This = WeakHashSet[A]
+
+ /**
+ * queue of Entries that hold elements scheduled for GC
+ * the removeStaleEntries() method works through the queue to remeove
+ * stale entries from the table
+ */
+ private[this] val queue = new ReferenceQueue[A]
+
+ /**
+ * the number of elements in this set
+ */
+ private[this] var count = 0
+
+ /**
+ * from a specified initial capacity compute the capacity we'll use as being the next
+ * power of two equal to or greater than the specified initial capacity
+ */
+ private def computeCapacity = {
+ if (initialCapacity < 0) throw new IllegalArgumentException("initial capacity cannot be less than 0");
+ var candidate = 1
+ while (candidate < initialCapacity) {
+ candidate *= 2
+ }
+ candidate
+ }
+
+ /**
+ * the underlying table of entries which is an array of Entry linked lists
+ */
+ private[this] var table = new Array[Entry[A]](computeCapacity)
+
+ /**
+ * the limit at which we'll increase the size of the hash table
+ */
+ var threshhold = computeThreshHold
+
+ private[this] def computeThreshHold: Int = (table.size * loadFactor).ceil.toInt
- /** Add the given element to this set. */
- def +=(elem: T): this.type = {
- underlying += new WeakReferenceWithEquals(elem)
- this
+ /**
+ * find the bucket associated with an elements's hash code
+ */
+ private[this] def bucketFor(hash: Int): Int = {
+ // spread the bits around to try to avoid accidental collisions using the
+ // same algorithm as java.util.HashMap
+ var h = hash
+ h ^= h >>> 20 ^ h >>> 12
+ h ^= h >>> 7 ^ h >>> 4
+
+ // this is finding h % table.length, but takes advantage of the
+ // fact that table length is a power of 2,
+ // if you don't do bit flipping in your head, if table.length
+ // is binary 100000.. (with n 0s) then table.length - 1
+ // is 1111.. with n 1's.
+ // In other words this masks on the last n bits in the hash
+ h & (table.length - 1)
}
- /** Remove the given element from this set. */
- def -=(elem: T): this.type = {
- underlying -= new WeakReferenceWithEquals(elem)
- this
+ /**
+ * remove a single entry from a linked list in a given bucket
+ */
+ private[this] def remove(bucket: Int, prevEntry: Entry[A], entry: Entry[A]) {
+ prevEntry match {
+ case null => table(bucket) = entry.tail
+ case _ => prevEntry.tail = entry.tail
+ }
+ count -= 1
}
- /** Does the given element belong to this set? */
- def contains(elem: T): Boolean =
- underlying.contains(new WeakReferenceWithEquals(elem))
+ /**
+ * remove entries associated with elements that have been gc'ed
+ */
+ private[this] def removeStaleEntries() {
+ def poll(): Entry[A] = queue.poll().asInstanceOf[Entry[A]]
- /** Does the given element belong to this set? */
- def apply(elem: T): Boolean = contains(elem)
+ @tailrec
+ def queueLoop {
+ val stale = poll()
+ if (stale != null) {
+ val bucket = bucketFor(stale.hash)
- /** Return the number of elements in this set, including reclaimed elements. */
- def size = underlying.size
+ @tailrec
+ def linkedListLoop(prevEntry: Entry[A], entry: Entry[A]): Unit = if (stale eq entry) remove(bucket, prevEntry, entry)
+ else if (entry != null) linkedListLoop(entry, entry.tail)
- /** Remove all elements in this set. */
- def clear() = underlying.clear()
-}
+ linkedListLoop(null, table(bucket))
-/** A WeakReference implementation that implements equals and hashCode by
- * delegating to the referent.
- */
-class WeakReferenceWithEquals[T <: AnyRef](ref: T) {
- def get(): T = underlying.get()
+ queueLoop
+ }
+ }
+
+ queueLoop
+ }
+
+ /**
+ * Double the size of the internal table
+ */
+ private[this] def resize() {
+ val oldTable = table
+ table = new Array[Entry[A]](oldTable.size * 2)
+ threshhold = computeThreshHold
+
+ @tailrec
+ def tableLoop(oldBucket: Int): Unit = if (oldBucket < oldTable.size) {
+ @tailrec
+ def linkedListLoop(entry: Entry[A]): Unit = entry match {
+ case null => ()
+ case _ => {
+ val bucket = bucketFor(entry.hash)
+ val oldNext = entry.tail
+ entry.tail = table(bucket)
+ table(bucket) = entry
+ linkedListLoop(oldNext)
+ }
+ }
+ linkedListLoop(oldTable(oldBucket))
+
+ tableLoop(oldBucket + 1)
+ }
+ tableLoop(0)
+ }
+
+ // from scala.reflect.internal.Set, find an element or null if it isn't contained
+ override def findEntry(elem: A): A = elem match {
+ case null => throw new NullPointerException("WeakHashSet cannot hold nulls")
+ case _ => {
+ removeStaleEntries()
+ val hash = elem.hashCode
+ val bucket = bucketFor(hash)
+
+ @tailrec
+ def linkedListLoop(entry: Entry[A]): A = entry match {
+ case null => null.asInstanceOf[A]
+ case _ => {
+ val entryElem = entry.get
+ if (elem == entryElem) entryElem
+ else linkedListLoop(entry.tail)
+ }
+ }
+
+ linkedListLoop(table(bucket))
+ }
+ }
+ // add an element to this set unless it's already in there and return the element
+ def findEntryOrUpdate(elem: A): A = elem match {
+ case null => throw new NullPointerException("WeakHashSet cannot hold nulls")
+ case _ => {
+ removeStaleEntries()
+ val hash = elem.hashCode
+ val bucket = bucketFor(hash)
+ val oldHead = table(bucket)
+
+ def add() = {
+ table(bucket) = new Entry(elem, hash, oldHead, queue)
+ count += 1
+ if (count > threshhold) resize()
+ elem
+ }
+
+ @tailrec
+ def linkedListLoop(entry: Entry[A]): A = entry match {
+ case null => add()
+ case _ => {
+ val entryElem = entry.get
+ if (elem == entryElem) entryElem
+ else linkedListLoop(entry.tail)
+ }
+ }
+
+ linkedListLoop(oldHead)
+ }
+ }
+
+ // add an element to this set unless it's already in there and return this set
+ override def +(elem: A): this.type = elem match {
+ case null => throw new NullPointerException("WeakHashSet cannot hold nulls")
+ case _ => {
+ removeStaleEntries()
+ val hash = elem.hashCode
+ val bucket = bucketFor(hash)
+ val oldHead = table(bucket)
+
+ def add() {
+ table(bucket) = new Entry(elem, hash, oldHead, queue)
+ count += 1
+ if (count > threshhold) resize()
+ }
+
+ @tailrec
+ def linkedListLoop(entry: Entry[A]): Unit = entry match {
+ case null => add()
+ case _ if (elem == entry.get) => ()
+ case _ => linkedListLoop(entry.tail)
+ }
+
+ linkedListLoop(oldHead)
+ this
+ }
+ }
+
+ def +=(elem: A) = this + elem
+
+ // from scala.reflect.interanl.Set
+ override def addEntry(x: A) { this += x }
+
+ // remove an element from this set and return this set
+ override def -(elem: A): this.type = elem match {
+ case null => this
+ case _ => {
+ removeStaleEntries()
+ val bucket = bucketFor(elem.hashCode)
+
+
+
+ @tailrec
+ def linkedListLoop(prevEntry: Entry[A], entry: Entry[A]): Unit = entry match {
+ case null => ()
+ case _ if (elem == entry.get) => remove(bucket, prevEntry, entry)
+ case _ => linkedListLoop(entry, entry.tail)
+ }
- override val hashCode = ref.hashCode
+ linkedListLoop(null, table(bucket))
+ this
+ }
+ }
+
+ def -=(elem: A) = this - elem
+
+ // empty this set
+ override def clear(): Unit = {
+ table = new Array[Entry[A]](table.size)
+ threshhold = computeThreshHold
+ count = 0
+
+ // drain the queue - doesn't do anything because we're throwing away all the values anyway
+ @tailrec def queueLoop(): Unit = if (queue.poll() != null) queueLoop()
+ queueLoop()
+ }
+
+ // true if this set is empty
+ override def empty: This = new WeakHashSet[A](initialCapacity, loadFactor)
+
+ // the number of elements in this set
+ override def size: Int = {
+ removeStaleEntries()
+ count
+ }
+
+ override def apply(x: A): Boolean = this contains x
+
+ override def foreach[U](f: A => U): Unit = iterator foreach f
+
+ override def toList(): List[A] = iterator.toList
+
+ // Iterator over all the elements in this set in no particular order
+ override def iterator: Iterator[A] = {
+ removeStaleEntries()
+
+ new Iterator[A] {
+
+ /**
+ * the bucket currently being examined. Initially it's set past the last bucket and will be decremented
+ */
+ private[this] var currentBucket: Int = table.size
+
+ /**
+ * the entry that was last examined
+ */
+ private[this] var entry: Entry[A] = null
+
+ /**
+ * the element that will be the result of the next call to next()
+ */
+ private[this] var lookaheadelement: A = null.asInstanceOf[A]
+
+ @tailrec
+ def hasNext: Boolean = {
+ while (entry == null && currentBucket > 0) {
+ currentBucket -= 1
+ entry = table(currentBucket)
+ }
+
+ if (entry == null) false
+ else {
+ lookaheadelement = entry.get
+ if (lookaheadelement == null) {
+ // element null means the weakref has been cleared since we last did a removeStaleEntries(), move to the next entry
+ entry = entry.tail
+ hasNext
+ } else {
+ true
+ }
+ }
+ }
- override def equals(other: Any): Boolean = other match {
- case wf: WeakReferenceWithEquals[_] =>
- underlying.get() == wf.get()
- case _ =>
- false
+ def next(): A = if (lookaheadelement == null)
+ throw new IndexOutOfBoundsException("next on an empty iterator")
+ else {
+ val result = lookaheadelement
+ lookaheadelement = null.asInstanceOf[A]
+ entry = entry.tail
+ result
+ }
+ }
}
- private val underlying = new java.lang.ref.WeakReference(ref)
+ /**
+ * Diagnostic information about the internals of this set. Not normally
+ * needed by ordinary code, but may be useful for diagnosing performance problems
+ */
+ private[util] class Diagnostics {
+ /**
+ * Verify that the internal structure of this hash set is fully consistent.
+ * Throws an assertion error on any problem. In order for it to be reliable
+ * the entries must be stable. If any are garbage collected during validation
+ * then an assertion may inappropriately fire.
+ */
+ def fullyValidate {
+ var computedCount = 0
+ var bucket = 0
+ while (bucket < table.size) {
+ var entry = table(bucket)
+ while (entry != null) {
+ assert(entry.get != null, s"$entry had a null value indicated that gc activity was happening during diagnostic validation or that a null value was inserted")
+ computedCount += 1
+ val cachedHash = entry.hash
+ val realHash = entry.get.hashCode
+ assert(cachedHash == realHash, s"for $entry cached hash was $cachedHash but should have been $realHash")
+ val computedBucket = bucketFor(realHash)
+ assert(computedBucket == bucket, s"for $entry the computed bucket was $computedBucket but should have been $bucket")
+
+ entry = entry.tail
+ }
+
+ bucket += 1
+ }
+
+ assert(computedCount == count, s"The computed count was $computedCount but should have been $count")
+ }
+
+ /**
+ * Produces a diagnostic dump of the table that underlies this hash set.
+ */
+ def dump = table.deep
+
+ /**
+ * Number of buckets that hold collisions. Useful for diagnosing performance issues.
+ */
+ def collisionBucketsCount: Int =
+ (table filter (entry => entry != null && entry.tail != null)).size
+
+ /**
+ * Number of buckets that are occupied in this hash table.
+ */
+ def fullBucketsCount: Int =
+ (table filter (entry => entry != null)).size
+
+ /**
+ * Number of buckets in the table
+ */
+ def bucketsCount: Int = table.size
+
+ /**
+ * Number of buckets that don't hold anything
+ */
+ def emptyBucketsCount = bucketsCount - fullBucketsCount
+
+ /**
+ * Number of elements that are in collision. Useful for diagnosing performance issues.
+ */
+ def collisionsCount = size - (fullBucketsCount - collisionBucketsCount)
+
+ /**
+ * A map from a count of elements to the number of buckets with that count
+ */
+ def elementCountDistribution = table map linkedListSize groupBy identity map {case (size, list) => (size, list.size)}
+
+ private def linkedListSize(entry: Entry[A]) = {
+ var e = entry
+ var count = 0
+ while (e != null) {
+ count += 1
+ e = e.tail
+ }
+ count
+ }
+ }
+
+ private[util] def diagnostics = new Diagnostics
}
+
+/**
+ * Companion object for WeakHashSet
+ */
+object WeakHashSet {
+ /**
+ * A single entry in a WeakHashSet. It's a WeakReference plus a cached hash code and
+ * a link to the next Entry in the same bucket
+ */
+ private class Entry[A](element: A, val hash:Int, var tail: Entry[A], queue: ReferenceQueue[A]) extends WeakReference[A](element, queue)
+
+ val defaultInitialCapacity = 16
+ val defaultLoadFactor = .75
+
+ def apply[A <: AnyRef](initialCapacity: Int = WeakHashSet.defaultInitialCapacity, loadFactor: Double = WeakHashSet.defaultLoadFactor) = new WeakHashSet[A](initialCapacity, defaultLoadFactor)
+} \ No newline at end of file
diff --git a/test/files/neg/t6138.check b/test/files/neg/t6138.check
new file mode 100644
index 0000000000..8fd9978248
--- /dev/null
+++ b/test/files/neg/t6138.check
@@ -0,0 +1,7 @@
+t6138.scala:4: error: ambiguous reference to overloaded definition,
+both method getClass in object definitions of type (s: Int)Any
+and method getClass in object definitions of type (s: String)Any
+match argument types (Nothing)
+ getClass(???): String
+ ^
+one error found
diff --git a/test/files/neg/t6138.scala b/test/files/neg/t6138.scala
new file mode 100644
index 0000000000..2f45a46b1c
--- /dev/null
+++ b/test/files/neg/t6138.scala
@@ -0,0 +1,5 @@
+object definitions {
+ def getClass(s: String): Any = ???
+ def getClass(s: Int): Any = ???
+ getClass(???): String
+}
diff --git a/test/files/neg/t7473.check b/test/files/neg/t7473.check
new file mode 100644
index 0000000000..bc8c29d463
--- /dev/null
+++ b/test/files/neg/t7473.check
@@ -0,0 +1,7 @@
+t7473.scala:6: error: '<-' expected but '=' found.
+ (for (x = Option(i); if x == j) yield 42) toList
+ ^
+t7473.scala:6: error: illegal start of simple expression
+ (for (x = Option(i); if x == j) yield 42) toList
+ ^
+two errors found
diff --git a/test/files/neg/t7473.scala b/test/files/neg/t7473.scala
new file mode 100644
index 0000000000..593231d5f2
--- /dev/null
+++ b/test/files/neg/t7473.scala
@@ -0,0 +1,7 @@
+
+object Foo {
+ val i,j = 3
+ //for (x = Option(i); if x == j) yield 42 //t7473.scala:4: error: '<-' expected but '=' found.
+ // evil postfix!
+ (for (x = Option(i); if x == j) yield 42) toList
+}
diff --git a/test/files/neg/t7507.check b/test/files/neg/t7507.check
new file mode 100644
index 0000000000..d402869fd4
--- /dev/null
+++ b/test/files/neg/t7507.check
@@ -0,0 +1,4 @@
+t7507.scala:6: error: value bippy in trait Cake cannot be accessed in Cake
+ locally(bippy)
+ ^
+one error found
diff --git a/test/files/neg/t7507.scala b/test/files/neg/t7507.scala
new file mode 100644
index 0000000000..1b4756d955
--- /dev/null
+++ b/test/files/neg/t7507.scala
@@ -0,0 +1,7 @@
+trait Cake extends Slice {
+ private[this] val bippy = ()
+}
+
+trait Slice { self: Cake =>
+ locally(bippy)
+}
diff --git a/test/files/neg/t7509.check b/test/files/neg/t7509.check
new file mode 100644
index 0000000000..eaa6303cf5
--- /dev/null
+++ b/test/files/neg/t7509.check
@@ -0,0 +1,12 @@
+t7509.scala:3: error: inferred type arguments [Int] do not conform to method crash's type parameter bounds [R <: AnyRef]
+ crash(42)
+ ^
+t7509.scala:3: error: type mismatch;
+ found : Int(42)
+ required: R
+ crash(42)
+ ^
+t7509.scala:3: error: could not find implicit value for parameter ev: R
+ crash(42)
+ ^
+three errors found
diff --git a/test/files/neg/t7509.scala b/test/files/neg/t7509.scala
new file mode 100644
index 0000000000..3cba801ea7
--- /dev/null
+++ b/test/files/neg/t7509.scala
@@ -0,0 +1,4 @@
+object NMWE {
+ def crash[R <: AnyRef](f: R)(implicit ev: R): Any = ???
+ crash(42)
+}
diff --git a/test/files/pos/lub-dealias-widen.scala b/test/files/pos/lub-dealias-widen.scala
new file mode 100644
index 0000000000..38854fbc5c
--- /dev/null
+++ b/test/files/pos/lub-dealias-widen.scala
@@ -0,0 +1,34 @@
+import scala.language.higherKinds
+
+sealed trait Path {
+ type EncodeFunc
+ type Route[R] = List[String] => R
+
+ def >>(f: Route[Int]): Sitelet[EncodeFunc] = ???
+}
+
+case object PAny extends Path {
+ type EncodeFunc = List[String] => String
+}
+
+case class PLit[Next <: Path]() extends Path {
+ type EncodeFunc = Next#EncodeFunc
+}
+
+trait Sitelet[EncodeFunc] { self =>
+ def &[G <: H, H >: EncodeFunc](that: Sitelet[G]): Sitelet[H] = ???
+}
+
+object Test {
+ val r: Sitelet[Int => (Int => String)] = ???
+
+ val p2: PLit[PAny.type] = ???
+ val r2 /*: Sitelet[List[String] => String] */ // annotate type and it compiles with 2.10.0
+ = p2 >> { (xs: List[String]) => 0 }
+
+ // This works after https://github.com/scala/scala/commit/a06d31f6a
+ // Before: error: inferred type arguments [List[String] => String,List[String] => String]
+ // do not conform to method &'s type parameter bounds
+ // [G <: H,H >: Int => (Int => String)]
+ val s = r & r2
+} \ No newline at end of file
diff --git a/test/files/pos/t7516/A_1.scala b/test/files/pos/t7516/A_1.scala
new file mode 100644
index 0000000000..3bba19966d
--- /dev/null
+++ b/test/files/pos/t7516/A_1.scala
@@ -0,0 +1,9 @@
+import scala.reflect._,macros._, scala.language.experimental.macros
+
+object A {
+ def impl[T: c.WeakTypeTag](c: Context)(t: c.Expr[T]): c.Expr[List[T]] = {
+ val r = c.universe.reify { List(t.splice) }
+ c.Expr[List[T]]( c.resetLocalAttrs(r.tree) )
+ }
+ def demo[T](t: T): List[T] = macro impl[T]
+}
diff --git a/test/files/pos/t7516/B_2.scala b/test/files/pos/t7516/B_2.scala
new file mode 100644
index 0000000000..1b8531bc85
--- /dev/null
+++ b/test/files/pos/t7516/B_2.scala
@@ -0,0 +1,4 @@
+object B {
+ final case class CV(p: Int = 3, g: Int = 2)
+ A.demo { val d = 4; CV(g = d); "a" }
+}
diff --git a/test/files/pos/t7517.scala b/test/files/pos/t7517.scala
new file mode 100644
index 0000000000..7ce4c6b13e
--- /dev/null
+++ b/test/files/pos/t7517.scala
@@ -0,0 +1,22 @@
+trait Box[ K[A[x]] ]
+
+object Box {
+ // type constructor composition
+ sealed trait ∙[A[_], B[_]] { type l[T] = A[B[T]] }
+
+ // composes type constructors inside K
+ type SplitBox[K[A[x]], B[x]] = Box[ ({ type l[A[x]] = K[ (A ∙ B)#l] })#l ]
+
+ def split[ K[A[x]], B[x] ](base: Box[K]): SplitBox[K,B] = ???
+
+ class Composed[B[_], L[A[x]] ] {
+ val box: Box[L] = ???
+
+ type Split[ A[x] ] = L[ (A ∙ B)#l ]
+ val a: Box[Split] = Box.split(box)
+
+ //Either of these work:
+ val a1: Box[Split] = Box.split[L,B](box)
+ val a2: Box[ ({ type l[A[x]] = L[ (A ∙ B)#l ] })#l ] = Box.split(box)
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/t7520.scala b/test/files/pos/t7520.scala
new file mode 100644
index 0000000000..747f5278e5
--- /dev/null
+++ b/test/files/pos/t7520.scala
@@ -0,0 +1,10 @@
+class A {
+ val x: Singleton with this.type = this
+ val y: this.type = x
+}
+
+class B {
+ val x = ""
+ val xs: x.type with Singleton = x
+ val y: x.type = xs
+}
diff --git a/test/files/pos/t7532/A_1.java b/test/files/pos/t7532/A_1.java
new file mode 100644
index 0000000000..1ade76cc70
--- /dev/null
+++ b/test/files/pos/t7532/A_1.java
@@ -0,0 +1,6 @@
+class R {
+ public class attr { // Will have the bytecode name `R$attr`, not to be confused with `R@tr`!
+ }
+ public static class attr1 {
+ }
+}
diff --git a/test/files/pos/t7532/B_2.scala b/test/files/pos/t7532/B_2.scala
new file mode 100644
index 0000000000..ee7ce7751f
--- /dev/null
+++ b/test/files/pos/t7532/B_2.scala
@@ -0,0 +1,5 @@
+object Test {
+ val r = new R
+ new r.attr() // Was: error while loading attr, class file '.../t7532-pos.obj/R$attr.class' has location not matching its contents: contains class
+ new R.attr1
+} \ No newline at end of file
diff --git a/test/files/pos/t7532b/A_1.scala b/test/files/pos/t7532b/A_1.scala
new file mode 100644
index 0000000000..e8f9540609
--- /dev/null
+++ b/test/files/pos/t7532b/A_1.scala
@@ -0,0 +1,7 @@
+package pack
+class R {
+ class attr // Will have the bytecode name `R$attr`, not to be confused with `R@tr`!
+ class `@`
+}
+
+class `@` \ No newline at end of file
diff --git a/test/files/pos/t7532b/B_2.scala b/test/files/pos/t7532b/B_2.scala
new file mode 100644
index 0000000000..1555a5daa7
--- /dev/null
+++ b/test/files/pos/t7532b/B_2.scala
@@ -0,0 +1,8 @@
+import pack._
+
+object Test {
+ val r = new R
+ new r.attr()
+ new r.`@`
+ new `@`
+} \ No newline at end of file
diff --git a/test/files/run/WeakHashSetTest.scala b/test/files/run/WeakHashSetTest.scala
new file mode 100644
index 0000000000..3c8f380150
--- /dev/null
+++ b/test/files/run/WeakHashSetTest.scala
@@ -0,0 +1,174 @@
+object Test {
+ def main(args: Array[String]) {
+ val test = scala.reflect.internal.util.WeakHashSetTest
+ test.checkEmpty
+ test.checkPlusEquals
+ test.checkPlusEqualsCollisions
+ test.checkRehashing
+ test.checkRehashCollisions
+ test.checkFindOrUpdate
+ test.checkMinusEquals
+ test.checkMinusEqualsCollisions
+ test.checkClear
+ test.checkIterator
+ test.checkIteratorCollisions
+
+ // This test is commented out because it relies on gc behavior which isn't reliable enough in an automated environment
+ // test.checkRemoveUnreferencedObjects
+ }
+}
+
+// put the main test object in the same package as WeakHashSet because
+// it uses the package private "diagnostics" method
+package scala.reflect.internal.util {
+
+ object WeakHashSetTest {
+ // a class guaranteed to provide hash collisions
+ case class Collider(x : String) extends Comparable[Collider] with Serializable {
+ override def hashCode = 0
+ def compareTo(y : Collider) = this.x compareTo y.x
+ }
+
+ // basic emptiness check
+ def checkEmpty {
+ val hs = new WeakHashSet[String]()
+ assert(hs.size == 0)
+ hs.diagnostics.fullyValidate
+ }
+
+ // make sure += works
+ def checkPlusEquals {
+ val hs = new WeakHashSet[String]()
+ val elements = List("hello", "goodbye")
+ elements foreach (hs += _)
+ assert(hs.size == 2)
+ assert(hs contains "hello")
+ assert(hs contains "goodbye")
+ hs.diagnostics.fullyValidate
+ }
+
+ // make sure += works when there are collisions
+ def checkPlusEqualsCollisions {
+ val hs = new WeakHashSet[Collider]()
+ val elements = List("hello", "goodbye") map Collider
+ elements foreach (hs += _)
+ assert(hs.size == 2)
+ assert(hs contains Collider("hello"))
+ assert(hs contains Collider("goodbye"))
+ hs.diagnostics.fullyValidate
+ }
+
+ // add a large number of elements to force rehashing and then validate
+ def checkRehashing {
+ val size = 200
+ val hs = new WeakHashSet[String]()
+ val elements = (0 until size).toList map ("a" + _)
+ elements foreach (hs += _)
+ elements foreach {i => assert(hs contains i)}
+ hs.diagnostics.fullyValidate
+ }
+
+ // make sure rehashing works properly when the set is rehashed
+ def checkRehashCollisions {
+ val size = 200
+ val hs = new WeakHashSet[Collider]()
+ val elements = (0 until size).toList map {x => Collider("a" + x)}
+ elements foreach (hs += _)
+ elements foreach {i => assert(hs contains i)}
+ hs.diagnostics.fullyValidate
+ }
+
+ // test that unreferenced objects are removed
+ // not run in an automated environment because gc behavior can't be relied on
+ def checkRemoveUnreferencedObjects {
+ val size = 200
+ val hs = new WeakHashSet[Collider]()
+ val elements = (0 until size).toList map {x => Collider("a" + x)}
+ elements foreach (hs += _)
+ // don't throw the following into a retained collection so gc
+ // can remove them
+ for (i <- 0 until size) {
+ hs += Collider("b" + i)
+ }
+ System.gc()
+ Thread.sleep(1000)
+ assert(hs.size == 200)
+ elements foreach {i => assert(hs contains i)}
+ for (i <- 0 until size) {
+ assert(!(hs contains Collider("b" + i)))
+ }
+ hs.diagnostics.fullyValidate
+ }
+
+ // make sure findOrUpdate returns the originally entered element
+ def checkFindOrUpdate {
+ val size = 200
+ val hs = new WeakHashSet[Collider]()
+ val elements = (0 until size).toList map {x => Collider("a" + x)}
+ elements foreach {x => assert(hs findEntryOrUpdate x eq x)}
+ for (i <- 0 until size) {
+ // when we do a lookup the result should be the same reference we
+ // original put in
+ assert(hs findEntryOrUpdate(Collider("a" + i)) eq elements(i))
+ }
+ hs.diagnostics.fullyValidate
+ }
+
+ // check -= functionality
+ def checkMinusEquals {
+ val hs = new WeakHashSet[String]()
+ val elements = List("hello", "goodbye")
+ elements foreach (hs += _)
+ hs -= "goodbye"
+ assert(hs.size == 1)
+ assert(hs contains "hello")
+ assert(!(hs contains "goodbye"))
+ hs.diagnostics.fullyValidate
+ }
+
+ // check -= when there are collisions
+ def checkMinusEqualsCollisions {
+ val hs = new WeakHashSet[Collider]
+ val elements = List(Collider("hello"), Collider("goodbye"))
+ elements foreach (hs += _)
+ hs -= Collider("goodbye")
+ assert(hs.size == 1)
+ assert(hs contains Collider("hello"))
+ assert(!(hs contains Collider("goodbye")))
+ hs -= Collider("hello")
+ assert(hs.size == 0)
+ assert(!(hs contains Collider("hello")))
+ hs.diagnostics.fullyValidate
+ }
+
+ // check that the clear method actually cleans everything
+ def checkClear {
+ val size = 200
+ val hs = new WeakHashSet[String]()
+ val elements = (0 until size).toList map ("a" + _)
+ elements foreach (hs += _)
+ hs.clear()
+ assert(hs.size == 0)
+ elements foreach {i => assert(!(hs contains i))}
+ hs.diagnostics.fullyValidate
+ }
+
+ // check that the iterator covers all the contents
+ def checkIterator {
+ val hs = new WeakHashSet[String]()
+ val elements = (0 until 20).toList map ("a" + _)
+ elements foreach (hs += _)
+ assert(elements.iterator.toList.sorted == elements.sorted)
+ hs.diagnostics.fullyValidate
+ }
+
+ // check that the iterator covers all the contents even when there is a collision
+ def checkIteratorCollisions {
+ val hs = new WeakHashSet[Collider]
+ val elements = (0 until 20).toList map {x => Collider("a" + x)}
+ elements foreach (hs += _)
+ assert(elements.iterator.toList.sorted == elements.sorted)
+ hs.diagnostics.fullyValidate
+ }
+ }
+}
diff --git a/test/files/run/t7375a.check b/test/files/run/t7375a.check
new file mode 100644
index 0000000000..a0a15dfb2f
--- /dev/null
+++ b/test/files/run/t7375a.check
@@ -0,0 +1,4 @@
+C1
+C2
+C1
+C2
diff --git a/test/files/run/t7375a.scala b/test/files/run/t7375a.scala
new file mode 100644
index 0000000000..e46ad08f63
--- /dev/null
+++ b/test/files/run/t7375a.scala
@@ -0,0 +1,16 @@
+import scala.reflect.ClassTag
+
+class C1(val n: Int) extends AnyVal
+class C2(val n: Int) extends AnyRef
+
+object Test {
+ type F1 = C1
+ type F2 = C2
+
+ def main(args: Array[String]): Unit = {
+ println(implicitly[ClassTag[C1]])
+ println(implicitly[ClassTag[C2]])
+ println(implicitly[ClassTag[F1]])
+ println(implicitly[ClassTag[F2]])
+ }
+}
diff --git a/test/files/run/t7375b.check b/test/files/run/t7375b.check
new file mode 100644
index 0000000000..d7578e28ba
--- /dev/null
+++ b/test/files/run/t7375b.check
@@ -0,0 +1,4 @@
+Predef.this.classOf[C1]
+Predef.this.classOf[C2]
+Predef.this.classOf[C1]
+Predef.this.classOf[C2]
diff --git a/test/files/run/t7375b/Macros_1.scala b/test/files/run/t7375b/Macros_1.scala
new file mode 100644
index 0000000000..70e79cc2b4
--- /dev/null
+++ b/test/files/run/t7375b/Macros_1.scala
@@ -0,0 +1,18 @@
+import language.experimental.macros
+import scala.reflect.macros.Context
+
+class C1(val n: Int) extends AnyVal
+class C2(val n: Int) extends AnyRef
+
+object Macros {
+ type F1 = C1
+ type F2 = C2
+
+ def foo = macro impl
+ def impl(c: Context) = {
+ import c.universe._
+ def test[T: c.TypeTag] = reify(println(c.literal(c.reifyRuntimeClass(c.typeOf[T]).toString).splice)).tree
+ def tests = Block(List(test[C1], test[C2], test[F1], test[F2]), Literal(Constant(())))
+ c.Expr[Unit](tests)
+ }
+} \ No newline at end of file
diff --git a/test/files/run/t7375b/Test_2.scala b/test/files/run/t7375b/Test_2.scala
new file mode 100644
index 0000000000..acfddae942
--- /dev/null
+++ b/test/files/run/t7375b/Test_2.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+ Macros.foo
+} \ No newline at end of file
diff --git a/test/files/run/t7507.scala b/test/files/run/t7507.scala
new file mode 100644
index 0000000000..6c1959ddac
--- /dev/null
+++ b/test/files/run/t7507.scala
@@ -0,0 +1,31 @@
+trait Cake extends Slice
+
+// Minimization
+trait Slice { self: Cake => // must have self type that extends `Slice`
+ private[this] val bippy = () // must be private[this]
+ locally(bippy)
+}
+
+// Originally reported bug:
+trait Cake1 extends Slice1
+trait Slice1 { self: Cake1 =>
+ import java.lang.String // any import will do!
+ val Tuple2(x, y) = ((1, 2))
+}
+
+
+// Nesting
+trait Cake3 extends Outer.Slice3
+
+// Minimization
+object Outer {
+ private[this] val bippy = ()
+ trait Slice3 { self: Cake3 =>
+ locally(bippy)
+ }
+}
+
+object Test extends App {
+ val s1 = new Cake1 {}
+ assert((s1.x, s1.y) == (1, 2), (s1.x, s1.y))
+}
diff --git a/test/files/neg/t7441.check b/test/pending/neg/t7441.check
index f259457197..f259457197 100644
--- a/test/files/neg/t7441.check
+++ b/test/pending/neg/t7441.check
diff --git a/test/files/neg/t7441.scala b/test/pending/neg/t7441.scala
index dad7421e3f..dad7421e3f 100644
--- a/test/files/neg/t7441.scala
+++ b/test/pending/neg/t7441.scala
diff --git a/test/files/pos/t7234.scala b/test/pending/pos/t7234.scala
index 59a233d835..59a233d835 100644
--- a/test/files/pos/t7234.scala
+++ b/test/pending/pos/t7234.scala
diff --git a/test/files/pos/t7234b.scala b/test/pending/pos/t7234b.scala
index fee98e87a8..fee98e87a8 100644
--- a/test/files/pos/t7234b.scala
+++ b/test/pending/pos/t7234b.scala
diff --git a/test/files/pos/t7486.scala b/test/pending/pos/t7486.scala
index 6dd7f4c4ac..6dd7f4c4ac 100644
--- a/test/files/pos/t7486.scala
+++ b/test/pending/pos/t7486.scala