summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/compiler/scala/tools/nsc/CompileServer.scala44
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/IMain.scala3
-rw-r--r--src/compiler/scala/tools/nsc/transform/CleanUp.scala45
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala22
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala12
-rw-r--r--src/compiler/scala/tools/nsc/transform/LambdaLift.scala86
-rw-r--r--src/compiler/scala/tools/nsc/transform/Mixin.scala19
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/UnCurry.scala121
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala17
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala11
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala8
-rw-r--r--src/compiler/scala/tools/util/SocketServer.scala4
-rw-r--r--src/library/scala/Array.scala10
-rw-r--r--src/library/scala/collection/mutable/HashSet.scala2
-rw-r--r--src/library/scala/collection/parallel/mutable/ParArray.scala3
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashSet.scala2
-rw-r--r--src/library/scala/concurrent/BatchingExecutor.scala117
-rw-r--r--src/library/scala/concurrent/Future.scala6
-rw-r--r--src/partest/scala/tools/partest/PartestTask.scala2
-rw-r--r--src/partest/scala/tools/partest/javaagent/ASMTransformer.java13
-rw-r--r--src/partest/scala/tools/partest/javaagent/ProfilerVisitor.java13
-rw-r--r--src/partest/scala/tools/partest/javaagent/ProfilingAgent.java2
-rw-r--r--src/partest/scala/tools/partest/nest/SBTRunner.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Definitions.scala25
-rw-r--r--src/reflect/scala/reflect/internal/TreeInfo.scala14
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala6
-rw-r--r--src/reflect/scala/reflect/macros/Attachments.scala14
-rw-r--r--test/files/instrumented/t6611.check1
-rw-r--r--test/files/instrumented/t6611.scala35
-rw-r--r--test/files/jvm/scala-concurrent-tck.scala7
-rw-r--r--test/files/neg/t6040.check4
-rw-r--r--test/files/neg/t6231.check6
-rw-r--r--test/files/neg/t6231.scala15
-rw-r--r--test/files/neg/t6443c.check7
-rw-r--r--test/files/neg/t6443c.scala21
-rw-r--r--test/files/neg/t6567.check7
-rw-r--r--test/files/neg/t6567.flags1
-rw-r--r--test/files/neg/t6567.scala11
-rw-r--r--test/files/neg/t6666.check40
-rw-r--r--test/files/neg/t6666.scala132
-rw-r--r--test/files/neg/t6902.check10
-rw-r--r--test/files/neg/t6902.flags1
-rw-r--r--test/files/neg/t6902.scala23
-rw-r--r--test/files/neg/t6952.check13
-rw-r--r--test/files/neg/t6952.scala4
-rw-r--r--test/files/pos/t6976/Exts_1.scala10
-rw-r--r--test/files/pos/t6976/ImplicitBug_1.scala27
-rw-r--r--test/files/pos/t6976/ImplicitBug_2.scala7
-rw-r--r--test/files/pos/t6994.flags1
-rw-r--r--test/files/pos/t6994.scala8
-rw-r--r--test/files/pos/t7011.flags1
-rw-r--r--test/files/pos/t7011.scala7
-rw-r--r--test/files/run/reify_magicsymbols.check2
-rw-r--r--test/files/run/t6011c.scala13
-rw-r--r--test/files/run/t6028.check2
-rw-r--r--test/files/run/t6135.scala13
-rw-r--r--test/files/run/t6434.check10
-rw-r--r--test/files/run/t6434.scala8
-rw-r--r--test/files/run/t6439.check66
-rw-r--r--test/files/run/t6439.scala22
-rw-r--r--test/files/run/t6443-by-name.check3
-rw-r--r--test/files/run/t6443-by-name.scala18
-rw-r--r--test/files/run/t6443-varargs.check1
-rw-r--r--test/files/run/t6443-varargs.scala16
-rw-r--r--test/files/run/t6443.scala15
-rw-r--r--test/files/run/t6443b.scala16
-rw-r--r--test/files/run/t6611.scala61
-rw-r--r--test/files/run/t6637.check1
-rw-r--r--test/files/run/t6637.scala8
-rw-r--r--test/files/run/t6863.scala114
-rw-r--r--test/files/run/t6969.check1
-rw-r--r--test/files/run/t6969.scala28
-rw-r--r--test/files/run/t6987.check1
-rw-r--r--test/files/run/t6987.scala43
-rwxr-xr-xtest/partest8
77 files changed, 1365 insertions, 131 deletions
diff --git a/src/compiler/scala/tools/nsc/CompileServer.scala b/src/compiler/scala/tools/nsc/CompileServer.scala
index c23c1e6154..7a0a072bb8 100644
--- a/src/compiler/scala/tools/nsc/CompileServer.scala
+++ b/src/compiler/scala/tools/nsc/CompileServer.scala
@@ -92,10 +92,11 @@ class StandardCompileServer extends SocketServer {
val args = input.split("\0", -1).toList
val newSettings = new FscSettings(fscError)
- this.verbose = newSettings.verbose.value
val command = newOfflineCompilerCommand(args, newSettings)
+ this.verbose = newSettings.verbose.value
info("Settings after normalizing paths: " + newSettings)
+ if (!command.files.isEmpty) info("Input files after normalizing paths: " + (command.files mkString ","))
printMemoryStats()
// Update the idle timeout if given
@@ -173,11 +174,22 @@ object CompileServer extends StandardCompileServer {
/** A directory holding redirected output */
private lazy val redirectDir = (compileSocket.tmpDir / "output-redirects").createDirectory()
- private def redirect(setter: PrintStream => Unit, filename: String) {
- setter(new PrintStream((redirectDir / filename).createFile().bufferedOutput()))
- }
-
- def main(args: Array[String]) {
+ private def createRedirect(filename: String) =
+ new PrintStream((redirectDir / filename).createFile().bufferedOutput())
+
+ def main(args: Array[String]) =
+ execute(() => (), args)
+
+ /**
+ * Used for internal testing. The callback is called upon
+ * server start, notifying the caller that the server is
+ * ready to run. WARNING: the callback runs in the
+ * server's thread, blocking the server from doing any work
+ * until the callback is finished. Callbacks should be kept
+ * simple and clients should not try to interact with the
+ * server while the callback is processing.
+ */
+ def execute(startupCallback : () => Unit, args: Array[String]) {
val debug = args contains "-v"
if (debug) {
@@ -185,14 +197,16 @@ object CompileServer extends StandardCompileServer {
echo("Redirect dir is " + redirectDir)
}
- redirect(System.setOut, "scala-compile-server-out.log")
- redirect(System.setErr, "scala-compile-server-err.log")
- System.err.println("...starting server on socket "+port+"...")
- System.err.flush()
- compileSocket setPort port
- run()
-
- compileSocket deletePort port
- sys exit 0
+ Console.withErr(createRedirect("scala-compile-server-err.log")) {
+ Console.withOut(createRedirect("scala-compile-server-out.log")) {
+ Console.err.println("...starting server on socket "+port+"...")
+ Console.err.flush()
+ compileSocket setPort port
+ startupCallback()
+ run()
+
+ compileSocket deletePort port
+ }
+ }
}
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/IMain.scala b/src/compiler/scala/tools/nsc/interpreter/IMain.scala
index b46d28dec3..a55f0af116 100644
--- a/src/compiler/scala/tools/nsc/interpreter/IMain.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/IMain.scala
@@ -386,6 +386,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
oldReq <- definedNameMap get name.companionName
newSym <- req.definedSymbols get name
oldSym <- oldReq.definedSymbols get name.companionName
+ if Seq(oldSym, newSym).permutations exists { case Seq(s1, s2) => s1.isClass && s2.isModule }
} {
afterTyper(replwarn(s"warning: previously defined $oldSym is not a companion to $newSym."))
replwarn("Companions must be defined together; you may wish to use :paste mode for this.")
@@ -970,7 +971,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
// }
lazy val definedSymbols = (
termNames.map(x => x -> applyToResultMember(x, x => x)) ++
- typeNames.map(x => x -> compilerTypeOf(x).typeSymbol)
+ typeNames.map(x => x -> compilerTypeOf(x).typeSymbolDirect)
).toMap[Name, Symbol] withDefaultValue NoSymbol
lazy val typesOfDefinedTerms = mapFrom[Name, Name, Type](termNames)(x => applyToResultMember(x, _.tpe))
diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
index 0c9cb31d58..44510ab0c2 100644
--- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala
+++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
@@ -15,6 +15,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
import global._
import definitions._
import CODE._
+ import treeInfo.StripCast
/** the following two members override abstract members in Transform */
val phaseName: String = "cleanup"
@@ -206,12 +207,17 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
var reflPoly$Cache: SoftReference[scala.runtime.MethodCache] = new SoftReference(new EmptyMethodCache())
def reflMethod$Method(forReceiver: JClass[_]): JMethod = {
- var method: JMethod = reflPoly$Cache.find(forReceiver)
- if (method != null)
+ var methodCache: MethodCache = reflPoly$Cache.find(forReceiver)
+ if (methodCache eq null) {
+ methodCache = new EmptyMethodCache
+ reflPoly$Cache = new SoftReference(methodCache)
+ }
+ var method: JMethod = methodCache.find(forReceiver)
+ if (method ne null)
return method
else {
method = ScalaRunTime.ensureAccessible(forReceiver.getMethod("xyz", reflParams$Cache))
- reflPoly$Cache = new SoftReference(reflPoly$Cache.get.add(forReceiver, method))
+ reflPoly$Cache = new SoftReference(methodCache.add(forReceiver, method))
return method
}
}
@@ -228,16 +234,22 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
def getPolyCache = gen.mkCast(fn(REF(reflPolyCacheSym), nme.get), MethodCacheClass.tpe)
addStaticMethodToClass((reflMethodSym, forReceiverSym) => {
+ val methodCache = reflMethodSym.newVariable(mkTerm("methodCache"), ad.pos) setInfo MethodCacheClass.tpe
val methodSym = reflMethodSym.newVariable(mkTerm("method"), ad.pos) setInfo MethodClass.tpe
BLOCK(
- IF (getPolyCache OBJ_EQ NULL) THEN (REF(reflPolyCacheSym) === mkNewPolyCache) ENDIF,
- VAL(methodSym) === ((getPolyCache DOT methodCache_find)(REF(forReceiverSym))) ,
- IF (REF(methodSym) OBJ_!= NULL) .
+ VAR(methodCache) === getPolyCache,
+ IF (REF(methodCache) OBJ_EQ NULL) THEN BLOCK(
+ REF(methodCache) === NEW(TypeTree(EmptyMethodCacheClass.tpe)),
+ REF(reflPolyCacheSym) === gen.mkSoftRef(REF(methodCache))
+ ) ENDIF,
+
+ VAR(methodSym) === (REF(methodCache) DOT methodCache_find)(REF(forReceiverSym)),
+ IF (REF(methodSym) OBJ_NE NULL) .
THEN (Return(REF(methodSym)))
ELSE {
def methodSymRHS = ((REF(forReceiverSym) DOT Class_getMethod)(LIT(method), REF(reflParamsCacheSym)))
- def cacheRHS = ((getPolyCache DOT methodCache_add)(REF(forReceiverSym), REF(methodSym)))
+ def cacheRHS = ((REF(methodCache) DOT methodCache_add)(REF(forReceiverSym), REF(methodSym)))
BLOCK(
REF(methodSym) === (REF(ensureAccessibleMethod) APPLY (methodSymRHS)),
REF(reflPolyCacheSym) === gen.mkSoftRef(cacheRHS),
@@ -246,6 +258,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
}
)
})
+
}
/* ### HANDLING METHODS NORMALLY COMPILED TO OPERATORS ### */
@@ -606,14 +619,16 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
}
transformApply
- // This transform replaces Array(Predef.wrapArray(Array(...)), <tag>)
- // with just Array(...)
- case Apply(appMeth, List(Apply(wrapRefArrayMeth, List(array)), _))
- if (wrapRefArrayMeth.symbol == Predef_wrapRefArray &&
- appMeth.symbol == ArrayModule_overloadedApply.suchThat {
- _.tpe.resultType.dealias.typeSymbol == ObjectClass
- }) =>
- super.transform(array)
+ // Replaces `Array(Predef.wrapArray(ArrayValue(...).$asInstanceOf[...]), <tag>)`
+ // with just `ArrayValue(...).$asInstanceOf[...]`
+ //
+ // See SI-6611; we must *only* do this for literal vararg arrays.
+ case Apply(appMeth, List(Apply(wrapRefArrayMeth, List(arg @ StripCast(ArrayValue(_, _)))), _))
+ if wrapRefArrayMeth.symbol == Predef_wrapRefArray && appMeth.symbol == ArrayModule_genericApply =>
+ super.transform(arg)
+ case Apply(appMeth, List(elem0, Apply(wrapArrayMeth, List(rest @ ArrayValue(elemtpt, _)))))
+ if wrapArrayMeth.symbol == Predef_wrapArray(elemtpt.tpe) && appMeth.symbol == ArrayModule_apply(elemtpt.tpe) =>
+ super.transform(treeCopy.ArrayValue(rest, rest.elemtpt, elem0 :: rest.elems))
case _ =>
super.transform(tree)
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index 41aada473a..889d309ba9 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -1057,17 +1057,17 @@ abstract class Erasure extends AddInterfaces
Apply(Select(qual, cmpOp), List(gen.mkAttributedQualifier(targ.tpe)))
}
case RefinedType(parents, decls) if (parents.length >= 2) =>
- // Optimization: don't generate isInstanceOf tests if the static type
- // conforms, because it always succeeds. (Or at least it had better.)
- // At this writing the pattern matcher generates some instance tests
- // involving intersections where at least one parent is statically known true.
- // That needs fixing, but filtering the parents here adds an additional
- // level of robustness (in addition to the short term fix.)
- val parentTests = parents filterNot (qual.tpe <:< _)
-
- if (parentTests.isEmpty) Literal(Constant(true))
- else gen.evalOnce(qual, currentOwner, unit) { q =>
- atPos(tree.pos) {
+ gen.evalOnce(qual, currentOwner, unit) { q =>
+ // Optimization: don't generate isInstanceOf tests if the static type
+ // conforms, because it always succeeds. (Or at least it had better.)
+ // At this writing the pattern matcher generates some instance tests
+ // involving intersections where at least one parent is statically known true.
+ // That needs fixing, but filtering the parents here adds an additional
+ // level of robustness (in addition to the short term fix.)
+ val parentTests = parents filterNot (qual.tpe <:< _)
+
+ if (parentTests.isEmpty) Literal(Constant(true))
+ else atPos(tree.pos) {
parentTests map mkIsInstanceOf(q) reduceRight gen.mkAnd
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
index 79e51d5daa..39e16c3f58 100644
--- a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
@@ -64,14 +64,18 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
}
}
- /** Return the extension method that corresponds to given instance method `meth`.
- */
+ private def companionModuleForce(sym: Symbol) = {
+ sym.andAlso(_.owner.initialize) // See SI-6976. `companionModule` only calls `rawInfo`. (Why?)
+ sym.companionModule
+ }
+
+ /** Return the extension method that corresponds to given instance method `meth`. */
def extensionMethod(imeth: Symbol): Symbol = atPhase(currentRun.refchecksPhase) {
- val companionInfo = imeth.owner.companionModule.info
+ val companionInfo = companionModuleForce(imeth.owner).info
val candidates = extensionNames(imeth) map (companionInfo.decl(_)) filter (_.exists)
val matching = candidates filter (alt => normalize(alt.tpe, imeth.owner) matches imeth.tpe)
assert(matching.nonEmpty,
- s"no extension method found for $imeth:${imeth.tpe} among ${candidates map (c => c.name+":"+c.tpe)} / ${extensionNames(imeth)}")
+ s"no extension method found for $imeth:${imeth.tpe} among ${candidates.map(c => c.name+":"+c.tpe).toList} / ${extensionNames(imeth).toList}")
matching.head
}
diff --git a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
index 4a23e65ad2..845843e9d6 100644
--- a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
+++ b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
@@ -320,12 +320,24 @@ abstract class LambdaLift extends InfoTransform {
private def memberRef(sym: Symbol) = {
val clazz = sym.owner.enclClass
//Console.println("memberRef from "+currentClass+" to "+sym+" in "+clazz)
- val qual = if (clazz == currentClass) gen.mkAttributedThis(clazz)
- else {
- sym resetFlag(LOCAL | PRIVATE)
- if (clazz.isStaticOwner) gen.mkAttributedQualifier(clazz.thisType)
- else outerPath(outerValue, currentClass.outerClass, clazz)
- }
+ def prematureSelfReference() {
+ val what =
+ if (clazz.isStaticOwner) clazz.fullLocationString
+ else s"the unconstructed `this` of ${clazz.fullLocationString}"
+ val msg = s"Implementation restriction: access of ${sym.fullLocationString} from ${currentClass.fullLocationString}, would require illegal premature access to $what"
+ currentUnit.error(curTree.pos, msg)
+ }
+ val qual =
+ if (clazz == currentClass) gen.mkAttributedThis(clazz)
+ else {
+ sym resetFlag (LOCAL | PRIVATE)
+ if (selfOrSuperCalls exists (_.owner == clazz)) {
+ prematureSelfReference()
+ EmptyTree
+ }
+ else if (clazz.isStaticOwner) gen.mkAttributedQualifier(clazz.thisType)
+ else outerPath(outerValue, currentClass.outerClass, clazz)
+ }
Select(qual, sym) setType sym.tpe
}
@@ -352,6 +364,7 @@ abstract class LambdaLift extends InfoTransform {
copyDefDef(tree)(vparamss = List(vparams ++ freeParams))
case ClassDef(_, _, _, _) =>
+ // SI-6231
// Disabled attempt to to add getters to freeParams
// this does not work yet. Problem is that local symbols need local names
// and references to local symbols need to be transformed into
@@ -369,7 +382,7 @@ abstract class LambdaLift extends InfoTransform {
tree
}
-/* Something like this will be necessary to eliminate the implementation
+/* SI-6231: Something like this will be necessary to eliminate the implementation
* restiction from paramGetter above:
* We need to pass getters to the interface of an implementation class.
private def fixTraitGetters(lifted: List[Tree]): List[Tree] =
@@ -430,28 +443,53 @@ abstract class LambdaLift extends InfoTransform {
/* Creating a constructor argument if one isn't present. */
val constructorArg = rhs match {
case EmptyTree =>
- sym.primaryConstructor.info.paramTypes match {
+ sym.tpe.typeSymbol.primaryConstructor.info.paramTypes match {
case List(tp) => gen.mkZero(tp)
case _ =>
- log("Couldn't determine how to properly construct " + sym)
+ debugwarn("Couldn't determine how to properly construct " + sym)
rhs
}
case arg => arg
}
- /** Wrap expr argument in new *Ref(..) constructor, but make
- * sure that Try expressions stay at toplevel.
+
+ /** Wrap expr argument in new *Ref(..) constructor. But try/catch
+ * is a problem because a throw will clear the stack and post catch
+ * we would expect the partially-constructed object to be on the stack
+ * for the call to init. So we recursively
+ * search for "leaf" result expressions where we know its safe
+ * to put the new *Ref(..) constructor or, if all else fails, transform
+ * an expr to { val temp=expr; new *Ref(temp) }.
+ * The reason we narrowly look for try/catch in captured var definitions
+ * is because other try/catch expression have already been lifted
+ * see SI-6863
*/
- def refConstr(expr: Tree): Tree = expr match {
+ def refConstr(expr: Tree): Tree = typer.typedPos(expr.pos) {expr match {
+ // very simple expressions can be wrapped in a new *Ref(expr) because they can't have
+ // a try/catch in final expression position.
+ case Ident(_) | Apply(_, _) | Literal(_) | New(_) | Select(_, _) | Throw(_) | Assign(_, _) | ValDef(_, _, _, _) | Return(_) | EmptyTree =>
+ New(sym.tpe, expr)
case Try(block, catches, finalizer) =>
Try(refConstr(block), catches map refConstrCase, finalizer)
+ case Block(stats, expr) =>
+ Block(stats, refConstr(expr))
+ case If(cond, trueBranch, falseBranch) =>
+ If(cond, refConstr(trueBranch), refConstr(falseBranch))
+ case Match(selector, cases) =>
+ Match(selector, cases map refConstrCase)
+ // if we can't figure out what else to do, turn expr into {val temp1 = expr; new *Ref(temp1)} to avoid
+ // any possibility of try/catch in the *Ref constructor. This should be a safe tranformation as a default
+ // though it potentially wastes a variable slot. In particular this case handles LabelDefs.
case _ =>
- New(sym.tpe, expr)
- }
+ debuglog("assigning expr to temp: " + (expr.pos))
+ val tempSym = currentOwner.newValue(unit.freshTermName("temp"), expr.pos) setInfo expr.tpe
+ val tempDef = ValDef(tempSym, expr) setPos expr.pos
+ val tempRef = Ident(tempSym) setPos expr.pos
+ Block(tempDef, New(sym.tpe, tempRef))
+ }}
def refConstrCase(cdef: CaseDef): CaseDef =
CaseDef(cdef.pat, cdef.guard, refConstr(cdef.body))
- treeCopy.ValDef(tree, mods, name, tpt1, typer.typedPos(rhs.pos) {
- refConstr(constructorArg)
- })
+
+ treeCopy.ValDef(tree, mods, name, tpt1, refConstr(constructorArg))
} else tree
case Return(Block(stats, value)) =>
Block(stats, treeCopy.Return(tree, value)) setType tree.tpe setPos tree.pos
@@ -495,13 +533,25 @@ abstract class LambdaLift extends InfoTransform {
private def preTransform(tree: Tree) = super.transform(tree) setType lifted(tree.tpe)
+ /** The stack of constructor symbols in which a call to this() or to the super
+ * constructor is active.
+ */
+ private val selfOrSuperCalls = mutable.Stack[Symbol]()
+ @inline private def inSelfOrSuperCall[A](sym: Symbol)(a: => A) = try {
+ selfOrSuperCalls push sym
+ a
+ } finally selfOrSuperCalls.pop()
+
override def transform(tree: Tree): Tree = tree match {
case Select(ReferenceToBoxed(idt), elem) if elem == nme.elem =>
postTransform(preTransform(idt), isBoxedRef = false)
case ReferenceToBoxed(idt) =>
postTransform(preTransform(idt), isBoxedRef = true)
case _ =>
- postTransform(preTransform(tree))
+ def transformTree = postTransform(preTransform(tree))
+ if (treeInfo isSelfOrSuperConstrCall tree)
+ inSelfOrSuperCall(currentOwner)(transformTree)
+ else transformTree
}
/** Transform statements and add lifted definitions to them. */
diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala
index 57bdaea17a..3cd943aa74 100644
--- a/src/compiler/scala/tools/nsc/transform/Mixin.scala
+++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala
@@ -1215,9 +1215,24 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
// refer to fields in some implementation class via an abstract
// getter in the interface.
val iface = toInterface(sym.owner.tpe).typeSymbol
- val getter = sym getter iface orElse abort("No getter for " + sym + " in " + iface)
+ val ifaceGetter = sym getter iface
+
+ def si6231Restriction() {
+ // See SI-6231 comments in LamdaLift for ideas on how to lift the restriction.
+ val msg = sm"""Implementation restriction: local ${iface.fullLocationString} is unable to automatically capture the
+ |free variable ${sym} on behalf of ${currentClass}. You can manually assign it to a val inside the trait,
+ |and refer that that val in ${currentClass}. For more details, see SI-6231."""
+ reporter.error(tree.pos, msg)
+ }
- typedPos(tree.pos)((qual DOT getter)())
+ if (ifaceGetter == NoSymbol) {
+ if (sym.isParamAccessor) {
+ si6231Restriction()
+ EmptyTree
+ }
+ else abort("No getter for " + sym + " in " + iface)
+ }
+ else typedPos(tree.pos)((qual DOT ifaceGetter)())
case Assign(Apply(lhs @ Select(qual, _), List()), rhs) =>
// assign to fields in some implementation class via an abstract
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index bbab545d9e..64051b56ec 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -1360,7 +1360,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
debuglog("obtained env: " + e)
e.keySet == env.keySet
} catch {
- case _ =>
+ case _: Throwable =>
debuglog("Could not unify.")
false
}
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index 9908bd689e..c07177ec10 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -603,8 +603,6 @@ abstract class UnCurry extends InfoTransform
}
case ValDef(_, _, _, rhs) =>
if (sym eq NoSymbol) throw new IllegalStateException("Encountered Valdef without symbol: "+ tree + " in "+ unit)
- // a local variable that is mutable and free somewhere later should be lifted
- // as lambda lifting (coming later) will wrap 'rhs' in an Ref object.
if (!sym.owner.isSourceMethod)
withNeedLift(true) { super.transform(tree) }
else
@@ -748,15 +746,22 @@ abstract class UnCurry extends InfoTransform
}
case dd @ DefDef(_, _, _, vparamss0, _, rhs0) =>
- val vparamss1 = vparamss0 match {
- case _ :: Nil => vparamss0
- case _ => vparamss0.flatten :: Nil
- }
+ val (newParamss, newRhs): (List[List[ValDef]], Tree) =
+ if (dependentParamTypeErasure isDependent dd)
+ dependentParamTypeErasure erase dd
+ else {
+ val vparamss1 = vparamss0 match {
+ case _ :: Nil => vparamss0
+ case _ => vparamss0.flatten :: Nil
+ }
+ (vparamss1, rhs0)
+ }
+
val flatdd = copyDefDef(dd)(
- vparamss = vparamss1,
+ vparamss = newParamss,
rhs = nonLocalReturnKeys get dd.symbol match {
- case Some(k) => atPos(rhs0.pos)(nonLocalReturnTry(rhs0, k, dd.symbol))
- case None => rhs0
+ case Some(k) => atPos(newRhs.pos)(nonLocalReturnTry(newRhs, k, dd.symbol))
+ case None => newRhs
}
)
addJavaVarargsForwarders(dd, flatdd)
@@ -782,6 +787,104 @@ abstract class UnCurry extends InfoTransform
}
}
+ /**
+ * When we concatenate parameter lists, formal parameter types that were dependent
+ * on prior parameter values will no longer be correctly scoped.
+ *
+ * For example:
+ *
+ * {{{
+ * def foo(a: A)(b: a.B): a.type = {b; b}
+ * // after uncurry
+ * def foo(a: A, b: a/* NOT IN SCOPE! */.B): a.B = {b; b}
+ * }}}
+ *
+ * This violates the principle that each compiler phase should produce trees that
+ * can be retyped (see [[scala.tools.nsc.typechecker.TreeCheckers]]), and causes
+ * a practical problem in `erasure`: it is not able to correctly determine if
+ * such a signature overrides a corresponding signature in a parent. (SI-6443).
+ *
+ * This transformation erases the dependent method types by:
+ * - Widening the formal parameter type to existentially abstract
+ * over the prior parameters (using `packSymbols`)
+ * - Inserting casts in the method body to cast to the original,
+ * precise type.
+ *
+ * For the example above, this results in:
+ *
+ * {{{
+ * def foo(a: A, b: a.B forSome { val a: A }): a.B = { val b$1 = b.asInstanceOf[a.B]; b$1; b$1 }
+ * }}}
+ */
+ private object dependentParamTypeErasure {
+ sealed abstract class ParamTransform {
+ def param: ValDef
+ }
+ final case class Identity(param: ValDef) extends ParamTransform
+ final case class Packed(param: ValDef, tempVal: ValDef) extends ParamTransform
+
+ def isDependent(dd: DefDef): Boolean =
+ beforeUncurry {
+ val methType = dd.symbol.info
+ methType.isDependentMethodType && mexists(methType.paramss)(_.info exists (_.isImmediatelyDependent))
+ }
+
+ /**
+ * @return (newVparamss, newRhs)
+ */
+ def erase(dd: DefDef): (List[List[ValDef]], Tree) = {
+ import dd.{ vparamss, rhs }
+ val vparamSyms = vparamss flatMap (_ map (_.symbol))
+
+ val paramTransforms: List[ParamTransform] =
+ vparamss.flatten.map { p =>
+ val declaredType = p.symbol.info
+ // existentially abstract over value parameters
+ val packedType = typer.packSymbols(vparamSyms, declaredType)
+ if (packedType =:= declaredType) Identity(p)
+ else {
+ // Change the type of the param symbol
+ p.symbol updateInfo packedType
+
+ // Create a new param tree
+ val newParam: ValDef = copyValDef(p)(tpt = TypeTree(packedType))
+
+ // Within the method body, we'll cast the parameter to the originally
+ // declared type and assign this to a synthetic val. Later, we'll patch
+ // the method body to refer to this, rather than the parameter.
+ val tempVal: ValDef = {
+ val tempValName = unit freshTermName (p.name + "$")
+ val newSym = dd.symbol.newTermSymbol(tempValName, p.pos, SYNTHETIC).setInfo(declaredType)
+ atPos(p.pos)(ValDef(newSym, gen.mkAttributedCast(Ident(p.symbol), declaredType)))
+ }
+ Packed(newParam, tempVal)
+ }
+ }
+
+ val allParams = paramTransforms map (_.param)
+ val (packedParams, tempVals) = paramTransforms.collect {
+ case Packed(param, tempVal) => (param, tempVal)
+ }.unzip
+
+ val rhs1 = localTyper.typedPos(rhs.pos) {
+ // Patch the method body to refer to the temp vals
+ val rhsSubstituted = rhs.substituteSymbols(packedParams map (_.symbol), tempVals map (_.symbol))
+ // The new method body: { val p$1 = p.asInstanceOf[<dependent type>]; ...; <rhsSubstituted> }
+ Block(tempVals, rhsSubstituted)
+ }
+
+ // update the type of the method after uncurry.
+ dd.symbol updateInfo {
+ val GenPolyType(tparams, tp) = dd.symbol.info
+ logResult("erased dependent param types for ${dd.symbol.info}") {
+ GenPolyType(tparams, MethodType(allParams map (_.symbol), tp.finalResultType))
+ }
+ }
+ (allParams :: Nil, rhs1)
+ }
+ }
+
+
/* Analyzes repeated params if method is annotated as `varargs`.
* If the repeated params exist, it saves them into the `repeatedParams` map,
* which is used later.
diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
index 4268398081..dc367b11fd 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
@@ -730,7 +730,7 @@ trait ContextErrors {
} catch {
// the code above tries various tricks to detect the relevant portion of the stack trace
// if these tricks fail, just fall back to uninformative, but better than nothing, getMessage
- case NonFatal(ex) =>
+ case NonFatal(ex) => // currently giving a spurious warning, see SI-6994
macroLogVerbose("got an exception when processing a macro generated exception\n" +
"offender = " + stackTraceString(realex) + "\n" +
"error = " + stackTraceString(ex))
diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
index cdceb2d992..69bbab6e42 100644
--- a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
@@ -3244,6 +3244,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// TODO: make more fine-grained, as we don't always need to jump
def canJump: Boolean
+ /** Should exhaustivity analysis be skipped? */
def unchecked: Boolean
@@ -3477,12 +3478,10 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
case Some(cds) => cds
}
- val allReachable = unchecked || {
- // a switch with duplicate cases yields a verify error,
- // and a switch with duplicate cases and guards cannot soundly be rewritten to an unguarded switch
- // (even though the verify error would disappear, the behaviour would change)
- unreachableCase(caseDefsWithGuards) map (cd => reportUnreachable(cd.body.pos)) isEmpty
- }
+ // a switch with duplicate cases yields a verify error,
+ // and a switch with duplicate cases and guards cannot soundly be rewritten to an unguarded switch
+ // (even though the verify error would disappear, the behaviour would change)
+ val allReachable = unreachableCase(caseDefsWithGuards) map (cd => reportUnreachable(cd.body.pos)) isEmpty
if (!allReachable) Nil
else if (noGuards(caseDefsWithGuards)) {
@@ -3731,10 +3730,10 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
with SymbolicMatchAnalysis
with DPLLSolver { self: TreeMakers =>
override def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type, unchecked: Boolean): (List[List[TreeMaker]], List[Tree]) = {
+ unreachableCase(prevBinder, cases, pt) foreach { caseIndex =>
+ reportUnreachable(cases(caseIndex).last.pos)
+ }
if (!unchecked) {
- unreachableCase(prevBinder, cases, pt) foreach { caseIndex =>
- reportUnreachable(cases(caseIndex).last.pos)
- }
val counterExamples = exhaustive(prevBinder, cases, pt)
if (counterExamples.nonEmpty)
reportMissingCases(prevBinder.pos, counterExamples)
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index 7118375b82..969bb8aceb 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -1062,6 +1062,12 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
def apply(tp: Type) = mapOver(tp).normalize
}
+ def checkImplicitViewOptionApply(pos: Position, fn: Tree, args: List[Tree]): Unit = if (settings.lint.value) (fn, args) match {
+ case (tap@TypeApply(fun, targs), List(view: ApplyImplicitView)) if fun.symbol == Option_apply =>
+ unit.warning(pos, s"Suspicious application of an implicit view (${view.fun}) in the argument to Option.apply.") // SI-6567
+ case _ =>
+ }
+
def checkSensible(pos: Position, fn: Tree, args: List[Tree]) = fn match {
case Select(qual, name @ (nme.EQ | nme.NE | nme.eq | nme.ne)) if args.length == 1 =>
def isReferenceOp = name == nme.eq || name == nme.ne
@@ -1563,7 +1569,10 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
case Apply(fn, args) =>
// sensicality should be subsumed by the unreachability/exhaustivity/irrefutability analyses in the pattern matcher
- if (!inPattern) checkSensible(tree.pos, fn, args)
+ if (!inPattern) {
+ checkImplicitViewOptionApply(tree.pos, fn, args)
+ checkSensible(tree.pos, fn, args)
+ }
currentApplication = tree
tree
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index cbcddba487..553583e6b7 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -1748,8 +1748,8 @@ trait Typers extends Modes with Adaptations with Tags {
*/
def validateParentClasses(parents: List[Tree], selfType: Type) {
val pending = ListBuffer[AbsTypeError]()
- def validateDynamicParent(parent: Symbol) =
- if (parent == DynamicClass) checkFeature(parent.pos, DynamicsFeature)
+ def validateDynamicParent(parent: Symbol, parentPos: Position) =
+ if (parent == DynamicClass) checkFeature(parentPos, DynamicsFeature)
def validateParentClass(parent: Tree, superclazz: Symbol) =
if (!parent.isErrorTyped) {
@@ -1799,7 +1799,7 @@ trait Typers extends Modes with Adaptations with Tags {
if (parents exists (p => p != parent && p.tpe.typeSymbol == psym && !psym.isError))
pending += ParentInheritedTwiceError(parent, psym)
- validateDynamicParent(psym)
+ validateDynamicParent(psym, parent.pos)
}
if (!parents.isEmpty && parents.forall(!_.isErrorTyped)) {
@@ -5367,7 +5367,7 @@ trait Typers extends Modes with Adaptations with Tags {
var block1 = typed(tree.block, pt)
var catches1 = typedCases(tree.catches, ThrowableClass.tpe, pt)
- for (cdef <- catches1 if cdef.guard.isEmpty) {
+ for (cdef <- catches1 if !isPastTyper && cdef.guard.isEmpty) {
def warn(name: Name) = context.warning(cdef.pat.pos, s"This catches all Throwables. If this is really intended, use `case ${name.decoded} : Throwable` to clear this warning.")
def unbound(t: Tree) = t.symbol == null || t.symbol == NoSymbol
cdef.pat match {
diff --git a/src/compiler/scala/tools/util/SocketServer.scala b/src/compiler/scala/tools/util/SocketServer.scala
index d29a370c28..1b06ce2ff2 100644
--- a/src/compiler/scala/tools/util/SocketServer.scala
+++ b/src/compiler/scala/tools/util/SocketServer.scala
@@ -16,8 +16,8 @@ trait CompileOutputCommon {
def verbose: Boolean
def info(msg: String) = if (verbose) echo(msg)
- def echo(msg: String) = Console println msg
- def warn(msg: String) = System.err println msg
+ def echo(msg: String) = {Console println msg; Console.flush}
+ def warn(msg: String) = {Console.err println msg; Console.flush}
def fatal(msg: String) = { warn(msg) ; sys.exit(1) }
}
diff --git a/src/library/scala/Array.scala b/src/library/scala/Array.scala
index 90684b5fdd..b9f51803ec 100644
--- a/src/library/scala/Array.scala
+++ b/src/library/scala/Array.scala
@@ -115,6 +115,8 @@ object Array extends FallbackArrayBuilding {
* @param xs the elements to put in the array
* @return an array containing all elements from xs.
*/
+ // Subject to a compiler optimization in Cleanup.
+ // Array(e0, ..., en) is translated to { val a = new Array(3); a(i) = ei; a }
def apply[T: ClassTag](xs: T*): Array[T] = {
val array = new Array[T](xs.length)
var i = 0
@@ -123,6 +125,7 @@ object Array extends FallbackArrayBuilding {
}
/** Creates an array of `Boolean` objects */
+ // Subject to a compiler optimization in Cleanup, see above.
def apply(x: Boolean, xs: Boolean*): Array[Boolean] = {
val array = new Array[Boolean](xs.length + 1)
array(0) = x
@@ -132,6 +135,7 @@ object Array extends FallbackArrayBuilding {
}
/** Creates an array of `Byte` objects */
+ // Subject to a compiler optimization in Cleanup, see above.
def apply(x: Byte, xs: Byte*): Array[Byte] = {
val array = new Array[Byte](xs.length + 1)
array(0) = x
@@ -141,6 +145,7 @@ object Array extends FallbackArrayBuilding {
}
/** Creates an array of `Short` objects */
+ // Subject to a compiler optimization in Cleanup, see above.
def apply(x: Short, xs: Short*): Array[Short] = {
val array = new Array[Short](xs.length + 1)
array(0) = x
@@ -150,6 +155,7 @@ object Array extends FallbackArrayBuilding {
}
/** Creates an array of `Char` objects */
+ // Subject to a compiler optimization in Cleanup, see above.
def apply(x: Char, xs: Char*): Array[Char] = {
val array = new Array[Char](xs.length + 1)
array(0) = x
@@ -159,6 +165,7 @@ object Array extends FallbackArrayBuilding {
}
/** Creates an array of `Int` objects */
+ // Subject to a compiler optimization in Cleanup, see above.
def apply(x: Int, xs: Int*): Array[Int] = {
val array = new Array[Int](xs.length + 1)
array(0) = x
@@ -168,6 +175,7 @@ object Array extends FallbackArrayBuilding {
}
/** Creates an array of `Long` objects */
+ // Subject to a compiler optimization in Cleanup, see above.
def apply(x: Long, xs: Long*): Array[Long] = {
val array = new Array[Long](xs.length + 1)
array(0) = x
@@ -177,6 +185,7 @@ object Array extends FallbackArrayBuilding {
}
/** Creates an array of `Float` objects */
+ // Subject to a compiler optimization in Cleanup, see above.
def apply(x: Float, xs: Float*): Array[Float] = {
val array = new Array[Float](xs.length + 1)
array(0) = x
@@ -186,6 +195,7 @@ object Array extends FallbackArrayBuilding {
}
/** Creates an array of `Double` objects */
+ // Subject to a compiler optimization in Cleanup, see above.
def apply(x: Double, xs: Double*): Array[Double] = {
val array = new Array[Double](xs.length + 1)
array(0) = x
diff --git a/src/library/scala/collection/mutable/HashSet.scala b/src/library/scala/collection/mutable/HashSet.scala
index 74f2a6c762..c60e363f8f 100644
--- a/src/library/scala/collection/mutable/HashSet.scala
+++ b/src/library/scala/collection/mutable/HashSet.scala
@@ -88,7 +88,7 @@ extends AbstractSet[A]
}
private def readObject(in: java.io.ObjectInputStream) {
- init(in, x => x)
+ init(in, x => ())
}
/** Toggles whether a size map is used to track hash map statistics.
diff --git a/src/library/scala/collection/parallel/mutable/ParArray.scala b/src/library/scala/collection/parallel/mutable/ParArray.scala
index e4c8e5fae2..0a4f30131f 100644
--- a/src/library/scala/collection/parallel/mutable/ParArray.scala
+++ b/src/library/scala/collection/parallel/mutable/ParArray.scala
@@ -469,7 +469,6 @@ self =>
Array.copy(arr, i, targetarr, 0, until - i)
pac.buff.size = pac.buff.size + until - i
pac.buff.lastPtr.size = until - i
- pac
} otherwise {
copy2builder_quick(cb, arr, until, i)
i = until
@@ -531,7 +530,6 @@ self =>
val targetarr: Array[Any] = pac.lastbuff.internalArray.asInstanceOf[Array[Any]]
reverse2combiner_quick(targetarr, arr, 0, i, until)
pac.lastbuff.setInternalSize(sz)
- pac
} otherwise {
cb.ifIs[UnrolledParArrayCombiner[T]] {
pac =>
@@ -542,7 +540,6 @@ self =>
reverse2combiner_quick(targetarr, arr, 0, i, until)
pac.buff.size = pac.buff.size + sz
pac.buff.lastPtr.size = sz
- pac
} otherwise super.reverse2combiner(cb)
}
cb
diff --git a/src/library/scala/collection/parallel/mutable/ParHashSet.scala b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
index 3b1278f3be..57fab57348 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashSet.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
@@ -85,7 +85,7 @@ extends ParSet[T]
}
private def readObject(in: java.io.ObjectInputStream) {
- init(in, x => x)
+ init(in, x => ())
}
import scala.collection.DebugUtils._
diff --git a/src/library/scala/concurrent/BatchingExecutor.scala b/src/library/scala/concurrent/BatchingExecutor.scala
new file mode 100644
index 0000000000..a0d7aaea47
--- /dev/null
+++ b/src/library/scala/concurrent/BatchingExecutor.scala
@@ -0,0 +1,117 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.concurrent
+
+import java.util.concurrent.Executor
+import scala.annotation.tailrec
+
+/**
+ * Mixin trait for an Executor
+ * which groups multiple nested `Runnable.run()` calls
+ * into a single Runnable passed to the original
+ * Executor. This can be a useful optimization
+ * because it bypasses the original context's task
+ * queue and keeps related (nested) code on a single
+ * thread which may improve CPU affinity. However,
+ * if tasks passed to the Executor are blocking
+ * or expensive, this optimization can prevent work-stealing
+ * and make performance worse. Also, some ExecutionContext
+ * may be fast enough natively that this optimization just
+ * adds overhead.
+ * The default ExecutionContext.global is already batching
+ * or fast enough not to benefit from it; while
+ * `fromExecutor` and `fromExecutorService` do NOT add
+ * this optimization since they don't know whether the underlying
+ * executor will benefit from it.
+ * A batching executor can create deadlocks if code does
+ * not use `scala.concurrent.blocking` when it should,
+ * because tasks created within other tasks will block
+ * on the outer task completing.
+ * This executor may run tasks in any order, including LIFO order.
+ * There are no ordering guarantees.
+ *
+ * WARNING: The underlying Executor's execute-method must not execute the submitted Runnable
+ * in the calling thread synchronously. It must enqueue/handoff the Runnable.
+ */
+private[concurrent] trait BatchingExecutor extends Executor {
+
+ // invariant: if "_tasksLocal.get ne null" then we are inside BatchingRunnable.run; if it is null, we are outside
+ private val _tasksLocal = new ThreadLocal[List[Runnable]]()
+
+ private class Batch(val initial: List[Runnable]) extends Runnable with BlockContext {
+ private var parentBlockContext: BlockContext = _
+ // this method runs in the delegate ExecutionContext's thread
+ override def run(): Unit = {
+ require(_tasksLocal.get eq null)
+
+ val prevBlockContext = BlockContext.current
+ BlockContext.withBlockContext(this) {
+ try {
+ parentBlockContext = prevBlockContext
+
+ @tailrec def processBatch(batch: List[Runnable]): Unit = batch match {
+ case Nil => ()
+ case head :: tail =>
+ _tasksLocal set tail
+ try {
+ head.run()
+ } catch {
+ case t: Throwable =>
+ // if one task throws, move the
+ // remaining tasks to another thread
+ // so we can throw the exception
+ // up to the invoking executor
+ val remaining = _tasksLocal.get
+ _tasksLocal set Nil
+ unbatchedExecute(new Batch(remaining)) //TODO what if this submission fails?
+ throw t // rethrow
+ }
+ processBatch(_tasksLocal.get) // since head.run() can add entries, always do _tasksLocal.get here
+ }
+
+ processBatch(initial)
+ } finally {
+ _tasksLocal.remove()
+ parentBlockContext = null
+ }
+ }
+ }
+
+ override def blockOn[T](thunk: => T)(implicit permission: CanAwait): T = {
+ // if we know there will be blocking, we don't want to keep tasks queued up because it could deadlock.
+ {
+ val tasks = _tasksLocal.get
+ _tasksLocal set Nil
+ if ((tasks ne null) && tasks.nonEmpty)
+ unbatchedExecute(new Batch(tasks))
+ }
+
+ // now delegate the blocking to the previous BC
+ require(parentBlockContext ne null)
+ parentBlockContext.blockOn(thunk)
+ }
+ }
+
+ protected def unbatchedExecute(r: Runnable): Unit
+
+ override def execute(runnable: Runnable): Unit = {
+ if (batchable(runnable)) { // If we can batch the runnable
+ _tasksLocal.get match {
+ case null => unbatchedExecute(new Batch(List(runnable))) // If we aren't in batching mode yet, enqueue batch
+ case some => _tasksLocal.set(runnable :: some) // If we are already in batching mode, add to batch
+ }
+ } else unbatchedExecute(runnable) // If not batchable, just delegate to underlying
+ }
+
+ /** Override this to define which runnables will be batched. */
+ def batchable(runnable: Runnable): Boolean = runnable match {
+ case _: OnCompleteRunnable => true
+ case _ => false
+ }
+}
diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala
index 4b9e74708d..36f3be341f 100644
--- a/src/library/scala/concurrent/Future.scala
+++ b/src/library/scala/concurrent/Future.scala
@@ -675,9 +675,9 @@ object Future {
// by just not ever using it itself. scala.concurrent
// doesn't need to create defaultExecutionContext as
// a side effect.
- private[concurrent] object InternalCallbackExecutor extends ExecutionContext {
- override def execute(runnable: Runnable): Unit =
- runnable.run()
+ private[concurrent] object InternalCallbackExecutor extends ExecutionContext with BatchingExecutor {
+ override protected def unbatchedExecute(r: Runnable): Unit =
+ r.run()
override def reportFailure(t: Throwable): Unit =
throw new IllegalStateException("problem in scala.concurrent internal callback", t)
}
diff --git a/src/partest/scala/tools/partest/PartestTask.scala b/src/partest/scala/tools/partest/PartestTask.scala
index d9f2bfe765..0199400ada 100644
--- a/src/partest/scala/tools/partest/PartestTask.scala
+++ b/src/partest/scala/tools/partest/PartestTask.scala
@@ -355,7 +355,7 @@ class PartestTask extends Task with CompilationPathProperty {
javacmd foreach (x => antFileManager.JAVACMD = x.getAbsolutePath)
javaccmd foreach (x => antFileManager.JAVAC_CMD = x.getAbsolutePath)
- scalacArgsFlat foreach (antFileManager.SCALAC_OPTS = _)
+ scalacArgsFlat foreach (antFileManager.SCALAC_OPTS ++= _)
timeout foreach (antFileManager.timeout = _)
type TFSet = (Array[File], String, String)
diff --git a/src/partest/scala/tools/partest/javaagent/ASMTransformer.java b/src/partest/scala/tools/partest/javaagent/ASMTransformer.java
index 494a5a99be..878c8613d5 100644
--- a/src/partest/scala/tools/partest/javaagent/ASMTransformer.java
+++ b/src/partest/scala/tools/partest/javaagent/ASMTransformer.java
@@ -26,9 +26,18 @@ public class ASMTransformer implements ClassFileTransformer {
className.startsWith("instrumented/"));
}
- public byte[] transform(ClassLoader loader, String className, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) {
+ public byte[] transform(final ClassLoader classLoader, final String className, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) {
if (shouldTransform(className)) {
- ClassWriter writer = new ClassWriter(ClassWriter.COMPUTE_FRAMES | ClassWriter.COMPUTE_MAXS);
+ ClassWriter writer = new ClassWriter(ClassWriter.COMPUTE_MAXS) {
+ @Override protected String getCommonSuperClass(final String type1, final String type2) {
+ // Since we are not recomputing stack frame map, this should never be called we override this method because
+ // default implementation uses reflection for implementation and might try to load the class that we are
+ // currently processing. That leads to weird results like swallowed exceptions and classes being not
+ // transformed.
+ throw new RuntimeException("Unexpected call to getCommonSuperClass(" + type1 + ", " + type2 +
+ ") while transforming " + className);
+ }
+ };
ProfilerVisitor visitor = new ProfilerVisitor(writer);
ClassReader reader = new ClassReader(classfileBuffer);
reader.accept(visitor, 0);
diff --git a/src/partest/scala/tools/partest/javaagent/ProfilerVisitor.java b/src/partest/scala/tools/partest/javaagent/ProfilerVisitor.java
index ac83f66506..8306327b14 100644
--- a/src/partest/scala/tools/partest/javaagent/ProfilerVisitor.java
+++ b/src/partest/scala/tools/partest/javaagent/ProfilerVisitor.java
@@ -33,6 +33,19 @@ public class ProfilerVisitor extends ClassVisitor implements Opcodes {
// only instrument non-abstract methods
if((access & ACC_ABSTRACT) == 0) {
assert(className != null);
+ /* The following instructions do not modify compressed stack frame map so
+ * we don't need to worry about recalculating stack frame map. Specifically,
+ * let's quote "ASM 4.0, A Java bytecode engineering library" guide (p. 40):
+ *
+ * In order to save space, a compiled method does not contain one frame per
+ * instruction: in fact it contains only the frames for the instructions
+ * that correspond to jump targets or exception handlers, or that follow
+ * unconditional jump instructions. Indeed the other frames can be easily
+ * and quickly inferred from these ones.
+ *
+ * Instructions below are just loading constants and calling a method so according
+ * to definition above they do not contribute to compressed stack frame map.
+ */
mv.visitLdcInsn(className);
mv.visitLdcInsn(name);
mv.visitLdcInsn(desc);
diff --git a/src/partest/scala/tools/partest/javaagent/ProfilingAgent.java b/src/partest/scala/tools/partest/javaagent/ProfilingAgent.java
index c2e4dc69f4..3b18987040 100644
--- a/src/partest/scala/tools/partest/javaagent/ProfilingAgent.java
+++ b/src/partest/scala/tools/partest/javaagent/ProfilingAgent.java
@@ -20,6 +20,6 @@ public class ProfilingAgent {
// and the test-case itself won't be loaded yet. We rely here on the fact that ASMTransformer does
// not depend on Scala library. In case our assumptions are wrong we can always insert call to
// inst.retransformClasses.
- inst.addTransformer(new ASMTransformer(), true);
+ inst.addTransformer(new ASMTransformer(), false);
}
}
diff --git a/src/partest/scala/tools/partest/nest/SBTRunner.scala b/src/partest/scala/tools/partest/nest/SBTRunner.scala
index b0ce6579ac..20f9c701d5 100644
--- a/src/partest/scala/tools/partest/nest/SBTRunner.scala
+++ b/src/partest/scala/tools/partest/nest/SBTRunner.scala
@@ -46,7 +46,7 @@ object SBTRunner extends DirectRunner {
case x => sys.error("Unknown command line options: " + x)
}
val config = parseArgs(args, CommandLineOptions())
- fileManager.SCALAC_OPTS = config.scalacOptions
+ fileManager.SCALAC_OPTS ++= config.scalacOptions
fileManager.CLASSPATH = config.classpath getOrElse sys.error("No classpath set")
def findClasspath(jar: String, name: String): Option[String] = {
diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala
index 2a7b55cb5a..4269b65297 100644
--- a/src/reflect/scala/reflect/internal/Definitions.scala
+++ b/src/reflect/scala/reflect/internal/Definitions.scala
@@ -337,12 +337,13 @@ trait Definitions extends api.StandardDefinitions {
lazy val PredefModule = requiredModule[scala.Predef.type]
lazy val PredefModuleClass = PredefModule.moduleClass
- def Predef_classOf = getMemberMethod(PredefModule, nme.classOf)
- def Predef_identity = getMemberMethod(PredefModule, nme.identity)
- def Predef_conforms = getMemberMethod(PredefModule, nme.conforms)
- def Predef_wrapRefArray = getMemberMethod(PredefModule, nme.wrapRefArray)
- def Predef_??? = getMemberMethod(PredefModule, nme.???)
- def Predef_implicitly = getMemberMethod(PredefModule, nme.implicitly)
+ def Predef_classOf = getMemberMethod(PredefModule, nme.classOf)
+ def Predef_identity = getMemberMethod(PredefModule, nme.identity)
+ def Predef_conforms = getMemberMethod(PredefModule, nme.conforms)
+ def Predef_wrapRefArray = getMemberMethod(PredefModule, nme.wrapRefArray)
+ def Predef_wrapArray(tp: Type) = getMemberMethod(PredefModule, wrapArrayMethodName(tp))
+ def Predef_??? = getMemberMethod(PredefModule, nme.???)
+ def Predef_implicitly = getMemberMethod(PredefModule, nme.implicitly)
/** Is `sym` a member of Predef with the given name?
* Note: DON't replace this by sym == Predef_conforms/etc, as Predef_conforms is a `def`
@@ -466,6 +467,8 @@ trait Definitions extends api.StandardDefinitions {
// arrays and their members
lazy val ArrayModule = requiredModule[scala.Array.type]
lazy val ArrayModule_overloadedApply = getMemberMethod(ArrayModule, nme.apply)
+ def ArrayModule_genericApply = ArrayModule_overloadedApply.suchThat(_.paramss.flatten.last.tpe.typeSymbol == ClassTagClass) // [T: ClassTag](xs: T*): Array[T]
+ def ArrayModule_apply(tp: Type) = ArrayModule_overloadedApply.suchThat(_.tpe.resultType =:= arrayType(tp)) // (p1: AnyVal1, ps: AnyVal1*): Array[AnyVal1]
lazy val ArrayClass = getRequiredClass("scala.Array") // requiredClass[scala.Array[_]]
lazy val Array_apply = getMemberMethod(ArrayClass, nme.apply)
lazy val Array_update = getMemberMethod(ArrayClass, nme.update)
@@ -536,10 +539,12 @@ trait Definitions extends api.StandardDefinitions {
lazy val ScalaLongSignatureAnnotation = requiredClass[scala.reflect.ScalaLongSignature]
// Option classes
- lazy val OptionClass: ClassSymbol = requiredClass[Option[_]]
- lazy val SomeClass: ClassSymbol = requiredClass[Some[_]]
- lazy val NoneModule: ModuleSymbol = requiredModule[scala.None.type]
- lazy val SomeModule: ModuleSymbol = requiredModule[scala.Some.type]
+ lazy val OptionClass: ClassSymbol = requiredClass[Option[_]]
+ lazy val OptionModule: ModuleSymbol = requiredModule[scala.Option.type]
+ lazy val Option_apply = getMemberMethod(OptionModule, nme.apply)
+ lazy val SomeClass: ClassSymbol = requiredClass[Some[_]]
+ lazy val NoneModule: ModuleSymbol = requiredModule[scala.None.type]
+ lazy val SomeModule: ModuleSymbol = requiredModule[scala.Some.type]
def compilerTypeFromTag(tt: ApiUniverse # WeakTypeTag[_]): Type = tt.in(rootMirror).tpe
def compilerSymbolFromTag(tt: ApiUniverse # WeakTypeTag[_]): Symbol = tt.in(rootMirror).tpe.typeSymbol
diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala
index 8908036442..e1a18570b2 100644
--- a/src/reflect/scala/reflect/internal/TreeInfo.scala
+++ b/src/reflect/scala/reflect/internal/TreeInfo.scala
@@ -234,6 +234,20 @@ abstract class TreeInfo {
tree
}
+ /** Strips layers of `.asInstanceOf[T]` / `_.$asInstanceOf[T]()` from an expression */
+ def stripCast(tree: Tree): Tree = tree match {
+ case TypeApply(sel @ Select(inner, _), _) if isCastSymbol(sel.symbol) =>
+ stripCast(inner)
+ case Apply(TypeApply(sel @ Select(inner, _), _), Nil) if isCastSymbol(sel.symbol) =>
+ stripCast(inner)
+ case t =>
+ t
+ }
+
+ object StripCast {
+ def unapply(tree: Tree): Some[Tree] = Some(stripCast(tree))
+ }
+
/** Is tree a self or super constructor call? */
def isSelfOrSuperConstrCall(tree: Tree) = {
// stripNamedApply for SI-3584: adaptToImplicitMethod in Typers creates a special context
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index c2637e6967..dbc00edb1a 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -2481,8 +2481,10 @@ trait Types extends api.Types { self: SymbolTable =>
// from (T1, T2) => R.
targs match {
case in :: out :: Nil if !isTupleType(in) =>
- // A => B => C should be (A => B) => C or A => (B => C)
- val in_s = if (isFunctionType(in)) "(" + in + ")" else "" + in
+ // A => B => C should be (A => B) => C or A => (B => C).
+ // Also if A is byname, then we want (=> A) => B because => is right associative and => A => B
+ // would mean => (A => B) which is a different type
+ val in_s = if (isFunctionType(in) || isByNameParamType(in)) "(" + in + ")" else "" + in
val out_s = if (isFunctionType(out)) "(" + out + ")" else "" + out
in_s + " => " + out_s
case xs =>
diff --git a/src/reflect/scala/reflect/macros/Attachments.scala b/src/reflect/scala/reflect/macros/Attachments.scala
index a77cebf415..eeb87fafcc 100644
--- a/src/reflect/scala/reflect/macros/Attachments.scala
+++ b/src/reflect/scala/reflect/macros/Attachments.scala
@@ -44,17 +44,19 @@ abstract class Attachments { self =>
* Replaces an existing payload of the same type, if exists.
*/
def update[T: ClassTag](attachment: T): Attachments { type Pos = self.Pos } =
- new NonemptyAttachments(this.pos, remove[T].all + attachment)
+ new NonemptyAttachments[Pos](this.pos, remove[T].all + attachment)
/** Creates a copy of this attachment with the payload of the given class type `T` removed. */
def remove[T: ClassTag]: Attachments { type Pos = self.Pos } = {
val newAll = all filterNot matchesTag[T]
if (newAll.isEmpty) pos.asInstanceOf[Attachments { type Pos = self.Pos }]
- else new NonemptyAttachments(this.pos, newAll)
+ else new NonemptyAttachments[Pos](this.pos, newAll)
}
+}
- private class NonemptyAttachments(override val pos: Pos, override val all: Set[Any]) extends Attachments {
- type Pos = self.Pos
- def withPos(newPos: Pos) = new NonemptyAttachments(newPos, all)
- }
+// SI-7018: This used to be an inner class of `Attachments`, but that led to a memory leak in the
+// IDE via $outer pointers.
+private final class NonemptyAttachments[P >: Null](override val pos: P, override val all: Set[Any]) extends Attachments {
+ type Pos = P
+ def withPos(newPos: Pos) = new NonemptyAttachments(newPos, all)
}
diff --git a/test/files/instrumented/t6611.check b/test/files/instrumented/t6611.check
new file mode 100644
index 0000000000..5cd691e93a
--- /dev/null
+++ b/test/files/instrumented/t6611.check
@@ -0,0 +1 @@
+Method call statistics:
diff --git a/test/files/instrumented/t6611.scala b/test/files/instrumented/t6611.scala
new file mode 100644
index 0000000000..4c52f8a5ef
--- /dev/null
+++ b/test/files/instrumented/t6611.scala
@@ -0,0 +1,35 @@
+import scala.tools.partest.instrumented.Instrumentation._
+
+object Test {
+ def main(args: Array[String]) {
+ startProfiling()
+
+ // tests optimization in Cleanup for varargs reference arrays
+ Array("")
+
+
+ Array(true)
+ Array(true, false)
+ Array(1: Byte)
+ Array(1: Byte, 2: Byte)
+ Array(1: Short)
+ Array(1: Short, 2: Short)
+ Array(1)
+ Array(1, 2)
+ Array(1L)
+ Array(1L, 2L)
+ Array(1d)
+ Array(1d, 2d)
+ Array(1f)
+ Array(1f, 2f)
+
+ /* Not currently optimized:
+ Array[Int](1, 2) etc
+ Array(())
+ Array((), ())
+ */
+
+ stopProfiling()
+ printStatistics()
+ }
+}
diff --git a/test/files/jvm/scala-concurrent-tck.scala b/test/files/jvm/scala-concurrent-tck.scala
index b529bca38a..b2b4183564 100644
--- a/test/files/jvm/scala-concurrent-tck.scala
+++ b/test/files/jvm/scala-concurrent-tck.scala
@@ -134,6 +134,12 @@ trait FutureCallbacks extends TestBase {
assert(false)
}
}
+
+ def testThatNestedCallbacksDoNotYieldStackOverflow(): Unit = {
+ val promise = Promise[Int]
+ (0 to 10000).map(Future(_)).foldLeft(promise.future)((f1, f2) => f2.flatMap(i => f1))
+ promise.success(-1)
+ }
testOnSuccess()
testOnSuccessWhenCompleted()
@@ -143,6 +149,7 @@ trait FutureCallbacks extends TestBase {
// testOnFailureWhenSpecialThrowable(6, new scala.util.control.ControlThrowable { })
//TODO: this test is currently problematic, because NonFatal does not match InterruptedException
//testOnFailureWhenSpecialThrowable(7, new InterruptedException)
+ testThatNestedCallbacksDoNotYieldStackOverflow()
testOnFailureWhenTimeoutException()
}
diff --git a/test/files/neg/t6040.check b/test/files/neg/t6040.check
index f6757f97e3..f91df0c46d 100644
--- a/test/files/neg/t6040.check
+++ b/test/files/neg/t6040.check
@@ -1,7 +1,9 @@
-error: extension of type scala.Dynamic needs to be enabled
+t6040.scala:1: error: extension of type scala.Dynamic needs to be enabled
by making the implicit value language.dynamics visible.
This can be achieved by adding the import clause 'import scala.language.dynamics'
or by setting the compiler option -language:dynamics.
See the Scala docs for value scala.language.dynamics for a discussion
why the feature needs to be explicitly enabled.
+class X extends Dynamic
+ ^
one error found
diff --git a/test/files/neg/t6231.check b/test/files/neg/t6231.check
new file mode 100644
index 0000000000..b27961d393
--- /dev/null
+++ b/test/files/neg/t6231.check
@@ -0,0 +1,6 @@
+t6231.scala:4: error: Implementation restriction: local trait Bug$X$1 is unable to automatically capture the
+free variable value ev$1 on behalf of anonymous class anonfun$qux$1. You can manually assign it to a val inside the trait,
+and refer that that val in anonymous class anonfun$qux$1. For more details, see SI-6231.
+ def qux = { () => ev }
+ ^
+one error found
diff --git a/test/files/neg/t6231.scala b/test/files/neg/t6231.scala
new file mode 100644
index 0000000000..1e5b4e0e1a
--- /dev/null
+++ b/test/files/neg/t6231.scala
@@ -0,0 +1,15 @@
+object Bug {
+ def bar(ev: Any) = {
+ trait X {
+ def qux = { () => ev }
+ }
+ new X {}.qux()
+
+ // workaround
+ trait Y {
+ val ev2 = ev // manually capture `ev` so that `ev2` is added to the trait interface.
+ def qux = { () => ev2 }
+ }
+ }
+}
+
diff --git a/test/files/neg/t6443c.check b/test/files/neg/t6443c.check
new file mode 100644
index 0000000000..7cf8d23f4b
--- /dev/null
+++ b/test/files/neg/t6443c.check
@@ -0,0 +1,7 @@
+t6443c.scala:16: error: double definition:
+method foo:(d: B.D)(a: Any)(d2: d.type)Unit and
+method foo:(d: B.D)(a: Any, d2: d.type)Unit at line 11
+have same type after erasure: (d: B.D, a: Object, d2: B.D)Unit
+ def foo(d: D)(a: Any)(d2: d.type): Unit = ()
+ ^
+one error found
diff --git a/test/files/neg/t6443c.scala b/test/files/neg/t6443c.scala
new file mode 100644
index 0000000000..817224e043
--- /dev/null
+++ b/test/files/neg/t6443c.scala
@@ -0,0 +1,21 @@
+trait A {
+ type D >: Null <: C
+ def foo(d: D)(a: Any, d2: d.type): Unit
+ trait C {
+ def bar: Unit = foo(null)(null, null)
+ }
+}
+object B extends A {
+ class D extends C
+
+ def foo(d: D)(a: Any, d2: d.type): Unit = () // Bridge method required here!
+
+ // No bridge method should be added, but we'll be happy enough if
+ // the "same type after erasure" error kicks in before the duplicated
+ // bridge causes a problem.
+ def foo(d: D)(a: Any)(d2: d.type): Unit = ()
+}
+
+object Test extends App {
+ new B.D().bar
+}
diff --git a/test/files/neg/t6567.check b/test/files/neg/t6567.check
new file mode 100644
index 0000000000..4c513e64cd
--- /dev/null
+++ b/test/files/neg/t6567.check
@@ -0,0 +1,7 @@
+t6567.scala:8: error: Suspicious application of an implicit view (Test.this.a2b) in the argument to Option.apply.
+ Option[B](a)
+ ^
+t6567.scala:10: error: Suspicious application of an implicit view (Test.this.a2b) in the argument to Option.apply.
+ val b: Option[B] = Option(a)
+ ^
+two errors found
diff --git a/test/files/neg/t6567.flags b/test/files/neg/t6567.flags
new file mode 100644
index 0000000000..e93641e931
--- /dev/null
+++ b/test/files/neg/t6567.flags
@@ -0,0 +1 @@
+-Xlint -Xfatal-warnings \ No newline at end of file
diff --git a/test/files/neg/t6567.scala b/test/files/neg/t6567.scala
new file mode 100644
index 0000000000..650e5e39ae
--- /dev/null
+++ b/test/files/neg/t6567.scala
@@ -0,0 +1,11 @@
+class A
+class B
+
+object Test {
+ val a: A = null
+ implicit def a2b(a: A) = new B
+
+ Option[B](a)
+
+ val b: Option[B] = Option(a)
+}
diff --git a/test/files/neg/t6666.check b/test/files/neg/t6666.check
new file mode 100644
index 0000000000..d0378173ea
--- /dev/null
+++ b/test/files/neg/t6666.check
@@ -0,0 +1,40 @@
+t6666.scala:23: error: Implementation restriction: access of method x$2 in object O1 from anonymous class 2, would require illegal premature access to object O1
+ F.byname(x)
+ ^
+t6666.scala:30: error: Implementation restriction: access of value x$3 in object O2 from anonymous class 3, would require illegal premature access to object O2
+ F.byname(x)
+ ^
+t6666.scala:37: error: Implementation restriction: access of method x$4 in object O3 from anonymous class 4, would require illegal premature access to object O3
+ F.hof(() => x)
+ ^
+t6666.scala:50: error: Implementation restriction: access of method x$6 in class C1 from anonymous class 7, would require illegal premature access to the unconstructed `this` of class C1
+ F.byname(x)
+ ^
+t6666.scala:54: error: Implementation restriction: access of value x$7 in class C2 from anonymous class 8, would require illegal premature access to the unconstructed `this` of class C2
+ F.byname(x)
+ ^
+t6666.scala:58: error: Implementation restriction: access of method x$8 in class C3 from anonymous class 9, would require illegal premature access to the unconstructed `this` of class C3
+ F.hof(() => x)
+ ^
+t6666.scala:62: error: Implementation restriction: access of method x$9 in class C4 from object Nested$5, would require illegal premature access to the unconstructed `this` of class C4
+ object Nested { def xx = x}
+ ^
+t6666.scala:68: error: Implementation restriction: access of method x$10 in class C5 from object Nested$6, would require illegal premature access to the unconstructed `this` of class C5
+ object Nested { def xx = x}
+ ^
+t6666.scala:83: error: Implementation restriction: access of method x$12 in class C11 from anonymous class 12, would require illegal premature access to the unconstructed `this` of class C11
+ F.byname(x)
+ ^
+t6666.scala:102: error: Implementation restriction: access of method x$13 in class C13 from anonymous class 13, would require illegal premature access to the unconstructed `this` of class C13
+ F.hof(() => x)
+ ^
+t6666.scala:111: error: Implementation restriction: access of method x$14 in class C14 from object Nested$7, would require illegal premature access to the unconstructed `this` of class C14
+ object Nested { def xx = x}
+ ^
+t6666.scala:122: error: Implementation restriction: access of method x$15 in class C15 from object Nested$8, would require illegal premature access to the unconstructed `this` of class C15
+ object Nested { def xx = x}
+ ^
+t6666.scala:131: error: Implementation restriction: access of method foo$1 in class COuter from class CInner$1, would require illegal premature access to the unconstructed `this` of class COuter
+ class CInner extends C({foo})
+ ^
+13 errors found
diff --git a/test/files/neg/t6666.scala b/test/files/neg/t6666.scala
new file mode 100644
index 0000000000..d37ffaf141
--- /dev/null
+++ b/test/files/neg/t6666.scala
@@ -0,0 +1,132 @@
+class C(a: Any)
+object F {
+ def byname(a: => Any) = println(a)
+ def hof(a: () => Any) = println(a())
+}
+
+class COkay extends C(0) {
+ def this(a: Any) {
+ this()
+ def x = "".toString
+ F.byname(x)
+ }
+}
+
+//
+// The thunk's apply method accesses the MODULE$
+// field before it is set.
+//
+// 0: getstatic #23; //Field O1$.MODULE$:LO1$;
+// 3: invokevirtual #26; //Method O1$.O1$$x$1:()Ljava/lang/String;
+object O1 extends C({
+ def x = "".toString
+ F.byname(x)
+})
+
+// java.lang.NullPointerException
+// at O2$$anonfun$$init$$1.apply(<console>:11)
+object O2 extends C({
+ lazy val x = "".toString
+ F.byname(x)
+})
+
+// java.lang.NullPointerException
+// at O3$$anonfun$$init$$1.apply(<console>:11)
+object O3 extends C({
+ def x = "".toString
+ F.hof(() => x)
+})
+
+// Okay, the nested classes don't get an outer pointer passed,
+// just an extra param for `x: String`.
+object O6 extends C({
+ val x = "".toString
+ F.byname(x); F.hof(() => x); (new { val xx = x }.xx)
+})
+
+
+class C1 extends C({
+ def x = "".toString
+ F.byname(x)
+})
+class C2 extends C({
+ lazy val x = "".toString
+ F.byname(x)
+})
+class C3 extends C({
+ def x = "".toString
+ F.hof(() => x)
+})
+class C4 extends C({
+ def x = "".toString
+ object Nested { def xx = x}
+ Nested.xx
+})
+class C5 extends C({
+ def x = "".toString
+ val y = {
+ object Nested { def xx = x}
+ Nested.xx
+ }
+})
+
+// okay, for same reason as O6
+class C6 extends C({
+ val x = "".toString
+ F.byname(x); F.hof(() => x); (new { val xx = x }.xx)
+})
+
+class C11(a: Any) {
+ def this() = {
+ this({
+ def x = "".toString
+ F.byname(x)
+ })
+ }
+}
+
+// Crashes earlier in lazyVals.
+// class C12(a: Any) {
+// def this() = {
+// this({
+// lazy val x = "".toString
+// F.byname(x)
+// })
+// }
+// }
+
+class C13(a: Any) {
+ def this() = {
+ this({
+ def x = "".toString
+ F.hof(() => x)
+ })
+ }
+}
+
+class C14(a: Any) {
+ def this() = {
+ this({
+ def x = "".toString
+ object Nested { def xx = x}
+ Nested.xx
+ })
+ }
+}
+
+class C15(a: Any) {
+ def this() = {
+ this({
+ def x = "".toString
+ val y = {
+ object Nested { def xx = x}
+ Nested.xx
+ }
+ })
+ }
+}
+
+class COuter extends C({
+ def foo = 0
+ class CInner extends C({foo})
+}) \ No newline at end of file
diff --git a/test/files/neg/t6902.check b/test/files/neg/t6902.check
new file mode 100644
index 0000000000..8ad7fd37f9
--- /dev/null
+++ b/test/files/neg/t6902.check
@@ -0,0 +1,10 @@
+t6902.scala:4: error: unreachable code
+ case Some(b) => 3 // no warning was emitted
+ ^
+t6902.scala:9: error: unreachable code
+ case Some(b) => 3 // no warning was emitted
+ ^
+t6902.scala:21: error: unreachable code
+ case 1 => 3 // crash
+ ^
+three errors found
diff --git a/test/files/neg/t6902.flags b/test/files/neg/t6902.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/neg/t6902.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/neg/t6902.scala b/test/files/neg/t6902.scala
new file mode 100644
index 0000000000..ce5ff8b6fb
--- /dev/null
+++ b/test/files/neg/t6902.scala
@@ -0,0 +1,23 @@
+object Test {
+ Some(Some(1)) collect {
+ case Some(a) => 2
+ case Some(b) => 3 // no warning was emitted
+ }
+
+ (Some(1): @ unchecked) match {
+ case Some(a) => 2
+ case Some(b) => 3 // no warning was emitted
+ }
+
+ // A variation of SI-6011, which eluded the fix
+ // in 2.10.0.
+ //
+ // duplicate keys in SWITCH, can't pick arbitrarily one of them to evict, see SI-6011.
+ // at scala.reflect.internal.SymbolTable.abort(SymbolTable.scala:50)
+ // at scala.tools.nsc.Global.abort(Global.scala:249)
+ // at scala.tools.nsc.backend.jvm.GenASM$JPlainBuilder$jcode$.emitSWITCH(GenASM.scala:1850)
+ ((1: Byte): @unchecked @annotation.switch) match {
+ case 1 => 2
+ case 1 => 3 // crash
+ }
+}
diff --git a/test/files/neg/t6952.check b/test/files/neg/t6952.check
new file mode 100644
index 0000000000..f1e1881404
--- /dev/null
+++ b/test/files/neg/t6952.check
@@ -0,0 +1,13 @@
+t6952.scala:2: error: extension of type scala.Dynamic needs to be enabled
+by making the implicit value language.dynamics visible.
+This can be achieved by adding the import clause 'import scala.language.dynamics'
+or by setting the compiler option -language:dynamics.
+See the Scala docs for value scala.language.dynamics for a discussion
+why the feature needs to be explicitly enabled.
+trait B extends Dynamic
+ ^
+t6952.scala:3: error: extension of type scala.Dynamic needs to be enabled
+by making the implicit value language.dynamics visible.
+trait C extends A with Dynamic
+ ^
+two errors found
diff --git a/test/files/neg/t6952.scala b/test/files/neg/t6952.scala
new file mode 100644
index 0000000000..257ea3be68
--- /dev/null
+++ b/test/files/neg/t6952.scala
@@ -0,0 +1,4 @@
+trait A
+trait B extends Dynamic
+trait C extends A with Dynamic
+trait D extends B
diff --git a/test/files/pos/t6976/Exts_1.scala b/test/files/pos/t6976/Exts_1.scala
new file mode 100644
index 0000000000..9b3a69edd9
--- /dev/null
+++ b/test/files/pos/t6976/Exts_1.scala
@@ -0,0 +1,10 @@
+object Exts {
+ implicit class AnyExts[T](val o: T) extends AnyVal {
+ def moo = "moo!"
+ }
+}
+
+trait Exts {
+ import language.implicitConversions
+ implicit def AnyExts[T](o: T) = Exts.AnyExts(o)
+}
diff --git a/test/files/pos/t6976/ImplicitBug_1.scala b/test/files/pos/t6976/ImplicitBug_1.scala
new file mode 100644
index 0000000000..c9031bab2e
--- /dev/null
+++ b/test/files/pos/t6976/ImplicitBug_1.scala
@@ -0,0 +1,27 @@
+// This one is weird and nasty. Not sure if this is scalac or sbt
+// (tried with 0.12 & 0.12.2-RC2) bug.
+//
+// A level of indirection is required to trigger this bug.
+// Exts seems to need to be defined in separate file.
+//
+// Steps to reproduce:
+// 1. sbt clean
+// 2. sbt run (it works)
+// 3. Comment A & uncomment B.
+// 4. sbt run (it fails)
+// 5. Switch it back & sbt run. It still fails.
+//
+// In this project sbt clean helps. However in a large project where this
+// bug was found compiler crashed even after doing sbt clean. The only
+// way to work around this was to reference Exts object explicitly (C) in
+// the source file using its implicit classes.
+
+// Lets suppose this is a mega-trait combining all sorts of helper
+// functionality.
+trait Support extends Exts
+
+object ImplicitsBug extends App with Support { // A
+// object ImplicitsBug extends App with Exts { // B
+ //Exts // C) this reference helped in the large project.
+ println(3.moo)
+}
diff --git a/test/files/pos/t6976/ImplicitBug_2.scala b/test/files/pos/t6976/ImplicitBug_2.scala
new file mode 100644
index 0000000000..2fea5e2993
--- /dev/null
+++ b/test/files/pos/t6976/ImplicitBug_2.scala
@@ -0,0 +1,7 @@
+trait Support extends Exts
+
+// object ImplicitsBug extends App with Support { // A
+object ImplicitsBug extends App with Exts { // B
+ //Exts // C) this reference helped in the large project.
+ println(3.moo)
+}
diff --git a/test/files/pos/t6994.flags b/test/files/pos/t6994.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/pos/t6994.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/pos/t6994.scala b/test/files/pos/t6994.scala
new file mode 100644
index 0000000000..d707196423
--- /dev/null
+++ b/test/files/pos/t6994.scala
@@ -0,0 +1,8 @@
+object Test {
+ object NF {
+ def unapply(t: Throwable): Option[Throwable] = None
+ }
+ val x = (try { None } catch { case NF(ex) => None }) getOrElse 0
+ // Was emitting a spurious warning post typer:
+ // "This catches all Throwables. If this is really intended, use `case ex6 : Throwable` to clear this warning."
+}
diff --git a/test/files/pos/t7011.flags b/test/files/pos/t7011.flags
new file mode 100644
index 0000000000..a4c161553e
--- /dev/null
+++ b/test/files/pos/t7011.flags
@@ -0,0 +1 @@
+-Ydebug -Xfatal-warnings \ No newline at end of file
diff --git a/test/files/pos/t7011.scala b/test/files/pos/t7011.scala
new file mode 100644
index 0000000000..539f662bc0
--- /dev/null
+++ b/test/files/pos/t7011.scala
@@ -0,0 +1,7 @@
+object bar {
+ def foo {
+ lazy val x = 42
+
+ {()=>x}
+ }
+} \ No newline at end of file
diff --git a/test/files/run/reify_magicsymbols.check b/test/files/run/reify_magicsymbols.check
index e2aa46a364..c9d892d793 100644
--- a/test/files/run/reify_magicsymbols.check
+++ b/test/files/run/reify_magicsymbols.check
@@ -10,4 +10,4 @@ List[Null]
List[Nothing]
AnyRef{def foo(x: Int): Int}
Int* => Unit
-=> Int => Unit
+(=> Int) => Unit
diff --git a/test/files/run/t6011c.scala b/test/files/run/t6011c.scala
new file mode 100644
index 0000000000..0647e3f81a
--- /dev/null
+++ b/test/files/run/t6011c.scala
@@ -0,0 +1,13 @@
+object Test extends App {
+ // A variation of SI-6011, which eluded the fix
+ // in 2.10.0.
+ //
+ // duplicate keys in SWITCH, can't pick arbitrarily one of them to evict, see SI-6011.
+ // at scala.reflect.internal.SymbolTable.abort(SymbolTable.scala:50)
+ // at scala.tools.nsc.Global.abort(Global.scala:249)
+ // at scala.tools.nsc.backend.jvm.GenASM$JPlainBuilder$jcode$.emitSWITCH(GenASM.scala:1850)
+ ((1: Byte): @unchecked @annotation.switch) match {
+ case 1 => 2
+ case 1 => 3 // crash
+ }
+}
diff --git a/test/files/run/t6028.check b/test/files/run/t6028.check
index 34f4b22134..79deaacf3a 100644
--- a/test/files/run/t6028.check
+++ b/test/files/run/t6028.check
@@ -15,7 +15,7 @@ package <empty> {
}
};
def bar(barParam: Int): Object = {
- @volatile var MethodLocalObject$module: runtime.VolatileObjectRef = new runtime.VolatileObjectRef(<empty>);
+ @volatile var MethodLocalObject$module: runtime.VolatileObjectRef = new runtime.VolatileObjectRef(null);
T.this.MethodLocalObject$1(barParam, MethodLocalObject$module)
};
def tryy(tryyParam: Int): Function0 = {
diff --git a/test/files/run/t6135.scala b/test/files/run/t6135.scala
new file mode 100644
index 0000000000..c0f8f3fd1d
--- /dev/null
+++ b/test/files/run/t6135.scala
@@ -0,0 +1,13 @@
+object Test extends App {
+ class A { class V }
+
+ abstract class B[S] {
+ def foo(t: S, a: A)(v: a.V)
+ }
+
+ val b1 = new B[String] {
+ def foo(t: String, a: A)(v: a.V) = () // Bridge method required here!
+ }
+
+ b1.foo("", null)(null)
+}
diff --git a/test/files/run/t6434.check b/test/files/run/t6434.check
new file mode 100644
index 0000000000..f898b6b781
--- /dev/null
+++ b/test/files/run/t6434.check
@@ -0,0 +1,10 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala> def f(x: => Int): Int = x
+f: (x: => Int)Int
+
+scala> f _
+res0: (=> Int) => Int = <function1>
+
+scala>
diff --git a/test/files/run/t6434.scala b/test/files/run/t6434.scala
new file mode 100644
index 0000000000..e4a4579613
--- /dev/null
+++ b/test/files/run/t6434.scala
@@ -0,0 +1,8 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code =
+"""def f(x: => Int): Int = x
+f _
+"""
+}
diff --git a/test/files/run/t6439.check b/test/files/run/t6439.check
new file mode 100644
index 0000000000..178ea739f5
--- /dev/null
+++ b/test/files/run/t6439.check
@@ -0,0 +1,66 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> class A
+defined class A
+
+scala> object A // warn
+defined module A
+warning: previously defined class A is not a companion to object A.
+Companions must be defined together; you may wish to use :paste mode for this.
+
+scala> trait B
+defined trait B
+
+scala> object B // warn
+defined module B
+warning: previously defined trait B is not a companion to object B.
+Companions must be defined together; you may wish to use :paste mode for this.
+
+scala> object C
+defined module C
+
+scala> object Bippy
+defined module Bippy
+
+scala> class C // warn
+defined class C
+warning: previously defined object C is not a companion to class C.
+Companions must be defined together; you may wish to use :paste mode for this.
+
+scala> class D
+defined class D
+
+scala> def D = 0 // no warn
+D: Int
+
+scala> val D = 0 // no warn
+D: Int = 0
+
+scala> object E
+defined module E
+
+scala> var E = 0 // no warn
+E: Int = 0
+
+scala> object F
+defined module F
+
+scala> type F = Int // no warn
+defined type alias F
+
+scala> :power
+** Power User mode enabled - BEEP WHIR GYVE **
+** :phase has been set to 'typer'. **
+** scala.tools.nsc._ has been imported **
+** global._, definitions._ also imported **
+** Try :help, :vals, power.<tab> **
+
+scala> intp("F") // this now works as a result of changing .typeSymbol to .typeSymbolDirect in IMain#Request#definedSymbols
+res0: $r.intp.global.Symbol = type F
+
+scala>
+
+scala>
diff --git a/test/files/run/t6439.scala b/test/files/run/t6439.scala
new file mode 100644
index 0000000000..70a2dbafaf
--- /dev/null
+++ b/test/files/run/t6439.scala
@@ -0,0 +1,22 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """
+class A
+object A // warn
+trait B
+object B // warn
+object C
+object Bippy
+class C // warn
+class D
+def D = 0 // no warn
+val D = 0 // no warn
+object E
+var E = 0 // no warn
+object F
+type F = Int // no warn
+:power
+intp("F") // this now works as a result of changing .typeSymbol to .typeSymbolDirect in IMain#Request#definedSymbols
+ """
+}
diff --git a/test/files/run/t6443-by-name.check b/test/files/run/t6443-by-name.check
new file mode 100644
index 0000000000..6f98fa4a28
--- /dev/null
+++ b/test/files/run/t6443-by-name.check
@@ -0,0 +1,3 @@
+1
+foo
+foo
diff --git a/test/files/run/t6443-by-name.scala b/test/files/run/t6443-by-name.scala
new file mode 100644
index 0000000000..bfd9bf9791
--- /dev/null
+++ b/test/files/run/t6443-by-name.scala
@@ -0,0 +1,18 @@
+object Test {
+
+ def main(args: Array[String]) {
+ def foo = {println("foo"); 0}
+ lazyDep(X)(foo)
+ }
+
+ trait T {
+ type U
+ }
+ object X extends T { type U = Int }
+
+ def lazyDep(t: T)(u: => t.U) {
+ println("1")
+ u
+ u
+ }
+}
diff --git a/test/files/run/t6443-varargs.check b/test/files/run/t6443-varargs.check
new file mode 100644
index 0000000000..257cc5642c
--- /dev/null
+++ b/test/files/run/t6443-varargs.check
@@ -0,0 +1 @@
+foo
diff --git a/test/files/run/t6443-varargs.scala b/test/files/run/t6443-varargs.scala
new file mode 100644
index 0000000000..9cbae3e99c
--- /dev/null
+++ b/test/files/run/t6443-varargs.scala
@@ -0,0 +1,16 @@
+object Test {
+
+ def main(args: Array[String]) {
+ def foo = {println("foo"); 0}
+ lazyDep(X)(foo)
+ }
+
+ trait T {
+ type U
+ }
+ object X extends T { type U = Int }
+
+ def lazyDep(t: T)(us: t.U*) {
+ List(us: _*)
+ }
+}
diff --git a/test/files/run/t6443.scala b/test/files/run/t6443.scala
new file mode 100644
index 0000000000..67fe2cab22
--- /dev/null
+++ b/test/files/run/t6443.scala
@@ -0,0 +1,15 @@
+class Base
+class Derived extends Base
+
+trait A {
+ def foo(d: String)(d2: d.type): Base
+ val s = ""
+ def bar: Unit = foo(s)(s)
+}
+object B extends A {
+ def foo(d: String)(d2: d.type): D forSome { type D <: S; type S <: Derived } = {d2.isEmpty; null} // Bridge method required here!
+}
+
+object Test extends App {
+ B.bar
+}
diff --git a/test/files/run/t6443b.scala b/test/files/run/t6443b.scala
new file mode 100644
index 0000000000..9320b1dcfe
--- /dev/null
+++ b/test/files/run/t6443b.scala
@@ -0,0 +1,16 @@
+trait A {
+ type D >: Null <: C
+ def foo(d: D)(d2: d.type): Unit
+ trait C {
+ def bar: Unit = foo(null)(null)
+ }
+}
+object B extends A {
+ class D extends C
+
+ def foo(d: D)(d2: d.type): Unit = () // Bridge method required here!
+}
+
+object Test extends App {
+ new B.D().bar
+}
diff --git a/test/files/run/t6611.scala b/test/files/run/t6611.scala
new file mode 100644
index 0000000000..0947a48f90
--- /dev/null
+++ b/test/files/run/t6611.scala
@@ -0,0 +1,61 @@
+object Test extends App {
+ locally {
+ val a = Array("1")
+ val a2 = Array(a: _*)
+ assert(a ne a2)
+ }
+
+ locally {
+ val a = Array("1": Object)
+ val a2 = Array[Object](a: _*)
+ assert(a ne a2)
+ }
+
+ locally {
+ val a = Array(true)
+ val a2 = Array[Boolean](a: _*)
+ assert(a ne a2)
+ }
+
+ locally {
+ val a = Array(1: Short)
+ val a2 = Array[Short](a: _*)
+ assert(a ne a2)
+ }
+
+ locally {
+ val a = Array(1: Byte)
+ val a2 = Array[Byte](a: _*)
+ assert(a ne a2)
+ }
+
+ locally {
+ val a = Array(1)
+ val a2 = Array[Int](a: _*)
+ assert(a ne a2)
+ }
+
+ locally {
+ val a = Array(1L)
+ val a2 = Array[Long](a: _*)
+ assert(a ne a2)
+ }
+
+ locally {
+ val a = Array(1f)
+ val a2 = Array[Float](a: _*)
+ assert(a ne a2)
+ }
+
+ locally {
+ val a = Array(1d)
+ val a2 = Array[Double](a: _*)
+ assert(a ne a2)
+ }
+
+ locally {
+ val a = Array(())
+ val a2 = Array[Unit](a: _*)
+ assert(a ne a2)
+ }
+}
diff --git a/test/files/run/t6637.check b/test/files/run/t6637.check
new file mode 100644
index 0000000000..9766475a41
--- /dev/null
+++ b/test/files/run/t6637.check
@@ -0,0 +1 @@
+ok
diff --git a/test/files/run/t6637.scala b/test/files/run/t6637.scala
new file mode 100644
index 0000000000..d3c380370b
--- /dev/null
+++ b/test/files/run/t6637.scala
@@ -0,0 +1,8 @@
+
+object Test extends App {
+ try {
+ class A ; class B ; List().head.isInstanceOf[A with B]
+ } catch {
+ case _ :java.util.NoSuchElementException => println("ok")
+ }
+}
diff --git a/test/files/run/t6863.scala b/test/files/run/t6863.scala
new file mode 100644
index 0000000000..d77adb6af4
--- /dev/null
+++ b/test/files/run/t6863.scala
@@ -0,0 +1,114 @@
+/** Make sure that when a variable is captured its initialization expression is handled properly */
+object Test {
+ def lazyVal() = {
+ // internally lazy vals become vars which are initialized with "_", so they need to be tested just like vars do
+ lazy val x = "42"
+ assert({ () => x }.apply == "42")
+ }
+ def ident() = {
+ val y = "42"
+ var x = y
+ assert({ () => x }.apply == "42")
+ }
+ def apply() = {
+ def y(x : Int) = x.toString
+ var x = y(42)
+ assert({ () => x }.apply == "42")
+ }
+ def literal() = {
+ var x = "42"
+ assert({ () => x }.apply == "42")
+ }
+ def `new`() = {
+ var x = new String("42")
+ assert({ () => x }.apply == "42")
+ }
+ def select() = {
+ object Foo{val bar = "42"}
+ var x = Foo.bar
+ assert({ () => x }.apply == "42")
+ }
+ def `throw`() = {
+ var x = if (true) "42" else throw new Exception("42")
+ assert({ () => x }.apply == "42")
+ }
+ def assign() = {
+ var y = 1
+ var x = y = 42
+ assert({ () => x}.apply == ())
+ }
+ def valDef() = {
+ var x = {val y = 42}
+ assert({ () => x}.apply == ())
+ }
+ def `return`(): String = {
+ var x = if (true) return "42" else ()
+ assert({ () => x}.apply == ())
+ "42"
+ }
+ def tryFinally() = {
+ var x = try { "42" } finally ()
+ assert({ () => x }.apply == "42")
+ }
+ def tryCatch() = {
+ var x = try { "42" } catch { case _ => "43" }
+ assert({ () => x }.apply == "42")
+ }
+ def `if`() = {
+ var x = if (true) ()
+ assert({ () => x }.apply == ())
+ }
+ def ifElse() = {
+ var x = if(true) "42" else "43"
+ assert({ () => x }.apply == "42")
+ }
+ def matchCase() = {
+ var x = 100 match {
+ case 100 => "42"
+ case _ => "43"
+ }
+ assert({ () => x }.apply == "42")
+ }
+ def block() = {
+ var x = {
+ val y = 42
+ "42"
+ }
+ assert({ () => x }.apply == "42")
+ }
+ def labelDef() = {
+ var x = 100 match {
+ case 100 => try "42" finally ()
+ }
+ assert({ () => x }.apply == "42")
+ }
+ def nested() = {
+ var x = {
+ val y = 42
+ if(true) try "42" catch {case _ => "43"}
+ else "44"
+ }
+ assert({ () => x }.apply == "42")
+ }
+ def main(args: Array[String]) {
+ lazyVal()
+ ident()
+ apply()
+ literal()
+ `new`()
+ select()
+ `throw`()
+ assign()
+ valDef()
+ `return`()
+ tryFinally()
+ tryCatch()
+ ifElse()
+ `if`()
+ matchCase()
+ block()
+ labelDef()
+ nested()
+ }
+}
+
diff --git a/test/files/run/t6969.check b/test/files/run/t6969.check
new file mode 100644
index 0000000000..78297812c9
--- /dev/null
+++ b/test/files/run/t6969.check
@@ -0,0 +1 @@
+All threads completed.
diff --git a/test/files/run/t6969.scala b/test/files/run/t6969.scala
new file mode 100644
index 0000000000..8cfc28c1e5
--- /dev/null
+++ b/test/files/run/t6969.scala
@@ -0,0 +1,28 @@
+object Test {
+ private type Clearable = { def clear(): Unit }
+ private def choke() = {
+ try new Array[Object]((Runtime.getRuntime().maxMemory min Int.MaxValue).toInt)
+ catch {
+ case _: OutOfMemoryError => // what do you mean, out of memory?
+ case t: Throwable => println(t)
+ }
+ }
+ private def f(x: Clearable) = x.clear()
+ class Choker(id: Int) extends Thread {
+ private def g(iteration: Int) = {
+ val map = scala.collection.mutable.Map[Int, Int](1 -> 2)
+ try f(map) catch { case t: NullPointerException => println(s"Failed at $id/$iteration") ; throw t }
+ choke()
+ }
+ override def run() {
+ 1 to 50 foreach g
+ }
+ }
+
+ def main(args: Array[String]): Unit = {
+ val threads = 1 to 3 map (id => new Choker(id))
+ threads foreach (_.start())
+ threads foreach (_.join())
+ println("All threads completed.")
+ }
+}
diff --git a/test/files/run/t6987.check b/test/files/run/t6987.check
new file mode 100644
index 0000000000..86fc96c679
--- /dev/null
+++ b/test/files/run/t6987.check
@@ -0,0 +1 @@
+got successful verbose results!
diff --git a/test/files/run/t6987.scala b/test/files/run/t6987.scala
new file mode 100644
index 0000000000..37e91d61ae
--- /dev/null
+++ b/test/files/run/t6987.scala
@@ -0,0 +1,43 @@
+import java.io._
+import tools.nsc.{CompileClient, CompileServer}
+import java.util.concurrent.{CountDownLatch, TimeUnit}
+
+object Test extends App {
+ val startupLatch = new CountDownLatch(1)
+ // we have to explicitly launch our server because when the client launches a server it uses
+ // the "scala" shell command meaning whatever version of scala (and whatever version of libraries)
+ // happens to be in the path gets used
+ val t = new Thread(new Runnable {
+ def run() = {
+ CompileServer.execute(() => startupLatch.countDown(), Array[String]())
+ }
+ })
+ t setDaemon true
+ t.start()
+ if (!startupLatch.await(2, TimeUnit.MINUTES))
+ sys error "Timeout waiting for server to start"
+
+ val baos = new ByteArrayOutputStream()
+ val ps = new PrintStream(baos)
+
+ val success = (scala.Console withOut ps) {
+ // shut down the server via the client using the verbose flag
+ CompileClient.process(Array("-shutdown", "-verbose"))
+ }
+
+ // now make sure we got success and a verbose result
+ val msg = baos.toString()
+
+ if (success) {
+ if (msg contains "Settings after normalizing paths") {
+ println("got successful verbose results!")
+ } else {
+ println("did not get the string expected, full results were:")
+ println(msg)
+ }
+ } else {
+ println("got a failure. Full results were:")
+ println(msg)
+ }
+ scala.Console.flush
+}
diff --git a/test/partest b/test/partest
index 8352f8a946..dd57137b21 100755
--- a/test/partest
+++ b/test/partest
@@ -74,10 +74,10 @@ if $cygwin; then
EXT_CLASSPATH=`cygpath --path --$format "$EXT_CLASSPATH"`
fi
-# last arg wins, so if JAVA_OPTS already contains -Xmx or -Xms the
-# supplied argument will be used.
-JAVA_OPTS="-Xmx1024M -Xms64M $JAVA_OPTS"
-[ -n "$SCALAC_OPTS" ] || SCALAC_OPTS="-deprecation"
+# last arg wins, so if JAVA_OPTS already contains one of these options
+# the supplied argument will be used.
+# At this writing it is reported test/partest --all requires 108m permgen.
+JAVA_OPTS="-Xmx1024M -Xms64M -XX:MaxPermSize=128M $JAVA_OPTS"
partestDebugStr=""
if [ ! -z "${PARTEST_DEBUG}" ] ; then