summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorLukas Rytz <lukas.rytz@gmail.com>2015-04-21 10:15:16 +0200
committerLukas Rytz <lukas.rytz@gmail.com>2015-04-21 10:15:16 +0200
commit2586e6c65c698e30eb467ef0d4a6cd731fb5650f (patch)
treec9db3f5ddcfb4071d622444a850111f1009e82dc /src
parent15b1637e543ce22056919b85e76eaa3dc48f64bc (diff)
parent555f8f09c90b7014f153488e442f2d107b18b6e5 (diff)
downloadscala-2586e6c65c698e30eb467ef0d4a6cd731fb5650f.tar.gz
scala-2586e6c65c698e30eb467ef0d4a6cd731fb5650f.tar.bz2
scala-2586e6c65c698e30eb467ef0d4a6cd731fb5650f.zip
Merge commit '555f8f0' into merge/2.11-to-2.12-apr-21
Diffstat (limited to 'src')
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala129
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala24
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaSettings.scala12
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala22
-rw-r--r--src/compiler/scala/tools/nsc/transform/AddInterfaces.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/Delambdafy.scala115
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala10
-rw-r--r--src/compiler/scala/tools/nsc/transform/LambdaLift.scala14
-rw-r--r--src/compiler/scala/tools/nsc/transform/UnCurry.scala18
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/Logic.scala41
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala86
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/Solving.scala79
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala9
-rw-r--r--src/partest-extras/scala/tools/partest/ParserTest.scala21
-rw-r--r--src/reflect/scala/reflect/internal/Symbols.scala1
-rw-r--r--src/repl/scala/tools/nsc/interpreter/JavapClass.scala34
16 files changed, 397 insertions, 220 deletions
diff --git a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
index 96939e616c..52b8a51a79 100755
--- a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
@@ -6,6 +6,7 @@
package scala.tools.nsc
package ast.parser
+import scala.annotation.tailrec
import scala.collection.mutable
import mutable.{ Buffer, ArrayBuffer, ListBuffer }
import scala.util.control.ControlThrowable
@@ -172,20 +173,19 @@ trait MarkupParsers {
}
def appendText(pos: Position, ts: Buffer[Tree], txt: String): Unit = {
- def append(t: String) = ts append handle.text(pos, t)
-
- if (preserveWS) append(txt)
- else {
+ def append(text: String): Unit = {
+ val tree = handle.text(pos, text)
+ ts append tree
+ }
+ val clean = if (preserveWS) txt else {
val sb = new StringBuilder()
-
txt foreach { c =>
if (!isSpace(c)) sb append c
else if (sb.isEmpty || !isSpace(sb.last)) sb append ' '
}
-
- val trimmed = sb.toString.trim
- if (!trimmed.isEmpty) append(trimmed)
+ sb.toString.trim
}
+ if (!clean.isEmpty) append(clean)
}
/** adds entity/character to ts as side-effect
@@ -216,44 +216,75 @@ trait MarkupParsers {
if (xCheckEmbeddedBlock) ts append xEmbeddedExpr
else appendText(p, ts, xText)
- /** Returns true if it encounters an end tag (without consuming it),
- * appends trees to ts as side-effect.
+ /** At an open angle-bracket, detects an end tag
+ * or consumes CDATA, comment, PI or element.
+ * Trees are appended to `ts` as a side-effect.
+ * @return true if an end tag (without consuming it)
*/
- private def content_LT(ts: ArrayBuffer[Tree]): Boolean = {
- if (ch == '/')
- return true // end tag
-
- val toAppend = ch match {
- case '!' => nextch() ; if (ch =='[') xCharData else xComment // CDATA or Comment
- case '?' => nextch() ; xProcInstr // PI
- case _ => element // child node
+ private def content_LT(ts: ArrayBuffer[Tree]): Boolean =
+ (ch == '/') || {
+ val toAppend = ch match {
+ case '!' => nextch() ; if (ch =='[') xCharData else xComment // CDATA or Comment
+ case '?' => nextch() ; xProcInstr // PI
+ case _ => element // child node
+ }
+ ts append toAppend
+ false
}
- ts append toAppend
- false
- }
-
def content: Buffer[Tree] = {
val ts = new ArrayBuffer[Tree]
- while (true) {
- if (xEmbeddedBlock)
+ val coalescing = settings.XxmlSettings.isCoalescing
+ @tailrec def loopContent(): Unit =
+ if (xEmbeddedBlock) {
ts append xEmbeddedExpr
- else {
+ loopContent()
+ } else {
tmppos = o2p(curOffset)
ch match {
- // end tag, cdata, comment, pi or child node
- case '<' => nextch() ; if (content_LT(ts)) return ts
- // either the character '{' or an embedded scala block }
- case '{' => content_BRACE(tmppos, ts) // }
- // EntityRef or CharRef
- case '&' => content_AMP(ts)
- case SU => return ts
- // text content - here xEmbeddedBlock might be true
- case _ => appendText(tmppos, ts, xText)
+ case '<' => // end tag, cdata, comment, pi or child node
+ nextch()
+ if (!content_LT(ts)) loopContent()
+ case '{' => // } literal brace or embedded Scala block
+ content_BRACE(tmppos, ts)
+ loopContent()
+ case '&' => // EntityRef or CharRef
+ content_AMP(ts)
+ loopContent()
+ case SU => ()
+ case _ => // text content - here xEmbeddedBlock might be true
+ appendText(tmppos, ts, xText)
+ loopContent()
}
}
+ // merge text sections and strip attachments
+ def coalesce(): ArrayBuffer[Tree] = {
+ def copy() = {
+ val buf = new ArrayBuffer[Tree]
+ var acc = new StringBuilder
+ var pos: Position = NoPosition
+ def emit() = if (acc.nonEmpty) {
+ appendText(pos, buf, acc.toString)
+ acc.clear()
+ }
+ for (t <- ts)
+ t.attachments.get[handle.TextAttache] match {
+ case Some(ta) =>
+ if (acc.isEmpty) pos = ta.pos
+ acc append ta.text
+ case _ =>
+ emit()
+ buf += t
+ }
+ emit()
+ buf
+ }
+ val res = if (ts.count(_.hasAttachment[handle.TextAttache]) > 1) copy() else ts
+ for (t <- res) t.removeAttachment[handle.TextAttache]
+ res
}
- unreachable
+ loopContent()
+ if (coalescing) coalesce() else ts
}
/** '<' element ::= xmlTag1 '>' { xmlExpr | '{' simpleExpr '}' } ETag
@@ -289,20 +320,16 @@ trait MarkupParsers {
private def xText: String = {
assert(!xEmbeddedBlock, "internal error: encountered embedded block")
val buf = new StringBuilder
- def done = buf.toString
-
- while (ch != SU) {
- if (ch == '}') {
- if (charComingAfter(nextch()) == '}') nextch()
- else errorBraces()
- }
-
- buf append ch
- nextch()
- if (xCheckEmbeddedBlock || ch == '<' || ch == '&')
- return done
- }
- done
+ if (ch != SU)
+ do {
+ if (ch == '}') {
+ if (charComingAfter(nextch()) == '}') nextch()
+ else errorBraces()
+ }
+ buf append ch
+ nextch()
+ } while (!(ch == SU || xCheckEmbeddedBlock || ch == '<' || ch == '&'))
+ buf.toString
}
/** Some try/catch/finally logic used by xLiteral and xLiteralPattern. */
@@ -344,12 +371,12 @@ trait MarkupParsers {
tmppos = o2p(curOffset) // Iuli: added this line, as it seems content_LT uses tmppos when creating trees
content_LT(ts)
- // parse more XML ?
+ // parse more XML?
if (charComingAfter(xSpaceOpt()) == '<') {
do {
xSpaceOpt()
nextch()
- ts append element
+ content_LT(ts)
} while (charComingAfter(xSpaceOpt()) == '<')
handle.makeXMLseq(r2p(start, start, curOffset), ts)
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
index d2a999cdec..67241ef639 100755
--- a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
@@ -36,6 +36,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
val _MetaData: NameType = "MetaData"
val _NamespaceBinding: NameType = "NamespaceBinding"
val _NodeBuffer: NameType = "NodeBuffer"
+ val _PCData: NameType = "PCData"
val _PrefixedAttribute: NameType = "PrefixedAttribute"
val _ProcInstr: NameType = "ProcInstr"
val _Text: NameType = "Text"
@@ -46,6 +47,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
private object xmlterms extends TermNames {
val _Null: NameType = "Null"
val __Elem: NameType = "Elem"
+ val _PCData: NameType = "PCData"
val __Text: NameType = "Text"
val _buf: NameType = "$buf"
val _md: NameType = "$md"
@@ -55,10 +57,15 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
val _xml: NameType = "xml"
}
- import xmltypes.{_Comment, _Elem, _EntityRef, _Group, _MetaData, _NamespaceBinding, _NodeBuffer,
- _PrefixedAttribute, _ProcInstr, _Text, _Unparsed, _UnprefixedAttribute}
+ import xmltypes.{
+ _Comment, _Elem, _EntityRef, _Group, _MetaData, _NamespaceBinding, _NodeBuffer,
+ _PCData, _PrefixedAttribute, _ProcInstr, _Text, _Unparsed, _UnprefixedAttribute
+ }
+
+ import xmlterms.{ _Null, __Elem, __Text, _buf, _md, _plus, _scope, _tmpscope, _xml }
- import xmlterms.{_Null, __Elem, __Text, _buf, _md, _plus, _scope, _tmpscope, _xml}
+ /** Attachment for trees deriving from text nodes (Text, CData, entities). Used for coalescing. */
+ case class TextAttache(pos: Position, text: String)
// convenience methods
private def LL[A](x: A*): List[List[A]] = List(List(x:_*))
@@ -108,16 +115,21 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
final def entityRef(pos: Position, n: String) =
atPos(pos)( New(_scala_xml_EntityRef, LL(const(n))) )
+ private def coalescing = settings.XxmlSettings.isCoalescing
+
// create scala.xml.Text here <: scala.xml.Node
final def text(pos: Position, txt: String): Tree = atPos(pos) {
- if (isPattern) makeTextPat(const(txt))
- else makeText1(const(txt))
+ val t = if (isPattern) makeTextPat(const(txt)) else makeText1(const(txt))
+ if (coalescing) t updateAttachment TextAttache(pos, txt) else t
}
def makeTextPat(txt: Tree) = Apply(_scala_xml__Text, List(txt))
def makeText1(txt: Tree) = New(_scala_xml_Text, LL(txt))
def comment(pos: Position, text: String) = atPos(pos)( Comment(const(text)) )
- def charData(pos: Position, txt: String) = atPos(pos)( makeText1(const(txt)) )
+ def charData(pos: Position, txt: String) = if (coalescing) text(pos, txt) else atPos(pos) {
+ if (isPattern) Apply(_scala_xml(xmlterms._PCData), List(const(txt)))
+ else New(_scala_xml(_PCData), LL(const(txt)))
+ }
def procInstr(pos: Position, target: String, txt: String) =
atPos(pos)( ProcInstr(const(target), const(txt)) )
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
index 03fd0976e5..630276e412 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
@@ -139,6 +139,18 @@ trait ScalaSettings extends AbsScalaSettings
val XnoPatmatAnalysis = BooleanSetting ("-Xno-patmat-analysis", "Don't perform exhaustivity/unreachability analysis. Also, ignore @switch annotation.")
val XfullLubs = BooleanSetting ("-Xfull-lubs", "Retains pre 2.10 behavior of less aggressive truncation of least upper bounds.")
+ // XML parsing options
+ object XxmlSettings extends MultiChoiceEnumeration {
+ val coalescing = Choice("coalescing", "Convert PCData to Text and coalesce sibling nodes")
+ def isCoalescing = (Xxml contains coalescing) || (!isScala212 && !Xxml.isSetByUser)
+ }
+ val Xxml = MultiChoiceSetting(
+ name = "-Xxml",
+ helpArg = "property",
+ descr = "Configure XML parsing",
+ domain = XxmlSettings
+ )
+
/** Compatibility stubs for options whose value name did
* not previously match the option name.
*/
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index 1ffa064b78..994bcd8359 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -15,6 +15,7 @@ import scala.collection.mutable.{ ListBuffer, ArrayBuffer }
import scala.annotation.switch
import scala.reflect.internal.{ JavaAccFlags }
import scala.reflect.internal.pickling.{PickleBuffer, ByteCodecs}
+import scala.reflect.io.NoAbstractFile
import scala.tools.nsc.io.AbstractFile
import scala.tools.nsc.util.ClassFileLookup
@@ -1022,11 +1023,18 @@ abstract class ClassfileParser {
val sflags = jflags.toScalaFlags
val owner = ownerForFlags(jflags)
val scope = getScope(jflags)
- val innerClass = owner.newClass(name.toTypeName, NoPosition, sflags) setInfo completer
- val innerModule = owner.newModule(name.toTermName, NoPosition, sflags) setInfo completer
+ def newStub(name: Name) =
+ owner.newStubSymbol(name, s"Class file for ${entry.externalName} not found").setFlag(JAVA)
- innerModule.moduleClass setInfo loaders.moduleClassLoader
- List(innerClass, innerModule.moduleClass) foreach (_.associatedFile = file)
+ val (innerClass, innerModule) = if (file == NoAbstractFile) {
+ (newStub(name.toTypeName), newStub(name.toTermName))
+ } else {
+ val cls = owner.newClass(name.toTypeName, NoPosition, sflags) setInfo completer
+ val mod = owner.newModule(name.toTermName, NoPosition, sflags) setInfo completer
+ mod.moduleClass setInfo loaders.moduleClassLoader
+ List(cls, mod.moduleClass) foreach (_.associatedFile = file)
+ (cls, mod)
+ }
scope enter innerClass
scope enter innerModule
@@ -1046,10 +1054,8 @@ abstract class ClassfileParser {
for (entry <- innerClasses.entries) {
// create a new class member for immediate inner classes
if (entry.outerName == currentClass) {
- val file = classFileLookup.findClassFile(entry.externalName.toString) getOrElse {
- throw new AssertionError(s"Class file for ${entry.externalName} not found")
- }
- enterClassAndModule(entry, file)
+ val file = classFileLookup.findClassFile(entry.externalName.toString)
+ enterClassAndModule(entry, file.getOrElse(NoAbstractFile))
}
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
index 3591372bbe..79776485de 100644
--- a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
+++ b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
@@ -207,7 +207,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
}
def transformMixinInfo(tp: Type): Type = tp match {
- case ClassInfoType(parents, decls, clazz) =>
+ case ClassInfoType(parents, decls, clazz) if clazz.isPackageClass || !clazz.isJavaDefined =>
if (clazz.needsImplClass)
implClass(clazz setFlag lateINTERFACE) // generate an impl class
diff --git a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala
index 94e88589f5..2d33b35241 100644
--- a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala
+++ b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala
@@ -15,13 +15,12 @@ import scala.collection.mutable.LinkedHashMap
* Currently Uncurry is responsible for that transformation.
*
* From a lambda, Delambdafy will create
- * 1) a static forwarder at the top level of the class that contained the lambda
- * 2) a new top level class that
+ * 1) a new top level class that
a) has fields and a constructor taking the captured environment (including possibly the "this"
* reference)
- * b) an apply method that calls the static forwarder
+ * b) an apply method that calls the target method
* c) if needed a bridge method for the apply method
- * 3) an instantiation of the newly created class which replaces the lambda
+ * 2) an instantiation of the newly created class which replaces the lambda
*
* TODO the main work left to be done is to plug into specialization. Primarily that means choosing a
* specialized FunctionN trait instead of the generic FunctionN trait as a parent and creating the
@@ -76,36 +75,25 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre
referrers
}
- val accessorMethods = mutable.ArrayBuffer[Tree]()
-
- // the result of the transformFunction method. A class definition for the lambda, an expression
- // insantiating the lambda class, and an accessor method for the lambda class to be able to
- // call the implementation
- case class TransformedFunction(lambdaClassDef: ClassDef, newExpr: Tree, accessorMethod: Tree)
+ // the result of the transformFunction method.
+ sealed abstract class TransformedFunction
+ // A class definition for the lambda, an expression insantiating the lambda class
+ case class DelambdafyAnonClass(lambdaClassDef: ClassDef, newExpr: Tree) extends TransformedFunction
// here's the main entry point of the transform
override def transform(tree: Tree): Tree = tree match {
// the main thing we care about is lambdas
case fun @ Function(_, _) =>
- // a lambda beccomes a new class, an instantiation expression, and an
- // accessor method
- val TransformedFunction(lambdaClassDef, newExpr, accessorMethod) = transformFunction(fun)
- // we'll add accessor methods to the current template later
- accessorMethods += accessorMethod
- val pkg = lambdaClassDef.symbol.owner
-
- // we'll add the lambda class to the package later
- lambdaClassDefs(pkg) = lambdaClassDef :: lambdaClassDefs(pkg)
-
- super.transform(newExpr)
- // when we encounter a template (basically the thing that holds body of a class/trait)
- // we need to updated it to include newly created accessor methods after transforming it
- case Template(_, _, _) =>
- try {
- // during this call accessorMethods will be populated from the Function case
- val Template(parents, self, body) = super.transform(tree)
- Template(parents, self, body ++ accessorMethods)
- } finally accessorMethods.clear()
+ transformFunction(fun) match {
+ case DelambdafyAnonClass(lambdaClassDef, newExpr) =>
+ // a lambda beccomes a new class, an instantiation expression
+ val pkg = lambdaClassDef.symbol.owner
+
+ // we'll add the lambda class to the package later
+ lambdaClassDefs(pkg) = lambdaClassDef :: lambdaClassDefs(pkg)
+
+ super.transform(newExpr)
+ }
case _ => super.transform(tree)
}
@@ -120,8 +108,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre
private def optionSymbol(sym: Symbol): Option[Symbol] = if (sym.exists) Some(sym) else None
- // turns a lambda into a new class def, a New expression instantiating that class, and an
- // accessor method fo the body of the lambda
+ // turns a lambda into a new class def, a New expression instantiating that class
private def transformFunction(originalFunction: Function): TransformedFunction = {
val functionTpe = originalFunction.tpe
val targs = functionTpe.typeArgs
@@ -132,46 +119,16 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre
// passed into the constructor of the anonymous function class
val captures = FreeVarTraverser.freeVarsOf(originalFunction)
- /**
- * Creates the apply method for the anonymous subclass of FunctionN
- */
- def createAccessorMethod(thisProxy: Symbol, fun: Function): DefDef = {
- val target = targetMethod(fun)
- if (!thisProxy.exists) {
- target setFlag STATIC
- }
- val params = ((optionSymbol(thisProxy) map {proxy:Symbol => ValDef(proxy)}) ++ (target.paramss.flatten map ValDef.apply)).toList
-
- val methSym = oldClass.newMethod(unit.freshTermName(nme.accessor.toString() + "$"), target.pos, FINAL | BRIDGE | SYNTHETIC | PROTECTED | STATIC)
+ val target = targetMethod(originalFunction)
+ target.makeNotPrivate(target.owner)
+ if (!thisReferringMethods.contains(target))
+ target setFlag STATIC
- val paramSyms = params map {param => methSym.newSyntheticValueParam(param.symbol.tpe, param.name) }
-
- params zip paramSyms foreach { case (valdef, sym) => valdef.symbol = sym }
- params foreach (_.symbol.owner = methSym)
-
- val methodType = MethodType(paramSyms, restpe)
- methSym setInfo methodType
-
- oldClass.info.decls enter methSym
-
- val body = localTyper.typed {
- val newTarget = Select(if (thisProxy.exists) gen.mkAttributedRef(paramSyms(0)) else gen.mkAttributedThis(oldClass), target)
- val newParams = paramSyms drop (if (thisProxy.exists) 1 else 0) map Ident
- Apply(newTarget, newParams)
- } setPos fun.pos
- val methDef = DefDef(methSym, List(params), body)
-
- // Have to repack the type to avoid mismatches when existentials
- // appear in the result - see SI-4869.
- // TODO probably don't need packedType
- methDef.tpt setType localTyper.packedType(body, methSym)
- methDef
- }
/**
* Creates the apply method for the anonymous subclass of FunctionN
*/
- def createApplyMethod(newClass: Symbol, fun: Function, accessor: DefDef, thisProxy: Symbol): DefDef = {
+ def createApplyMethod(newClass: Symbol, fun: Function, thisProxy: Symbol): DefDef = {
val methSym = newClass.newMethod(nme.apply, fun.pos, FINAL | SYNTHETIC)
val params = fun.vparams map (_.duplicate)
@@ -187,8 +144,12 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre
newClass.info.decls enter methSym
val Apply(_, oldParams) = fun.body
+ val qual = if (thisProxy.exists)
+ Select(gen.mkAttributedThis(newClass), thisProxy)
+ else
+ gen.mkAttributedThis(oldClass) // sort of a lie, EmptyTree.<static method> would be more honest, but the backend chokes on that.
- val body = localTyper typed Apply(Select(gen.mkAttributedThis(oldClass), accessor.symbol), (optionSymbol(thisProxy) map {tp => Select(gen.mkAttributedThis(newClass), tp)}).toList ++ oldParams)
+ val body = localTyper typed Apply(Select(qual, target), oldParams)
body.substituteSymbols(fun.vparams map (_.symbol), params map (_.symbol))
body changeOwner (fun.symbol -> methSym)
@@ -271,18 +232,16 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre
// the Optional proxy that will hold a reference to the 'this'
// object used by the lambda, if any. NoSymbol if there is no this proxy
val thisProxy = {
- val target = targetMethod(originalFunction)
- if (thisReferringMethods contains target) {
+ if (target.hasFlag(STATIC))
+ NoSymbol
+ else {
val sym = lambdaClass.newVariable(nme.FAKE_LOCAL_THIS, originalFunction.pos, SYNTHETIC)
- sym.info = oldClass.tpe
- sym
- } else NoSymbol
+ sym.setInfo(oldClass.tpe)
+ }
}
val decapturify = new DeCapturifyTransformer(captureProxies2, unit, oldClass, lambdaClass, originalFunction.symbol.pos, thisProxy)
- val accessorMethod = createAccessorMethod(thisProxy, originalFunction)
-
val decapturedFunction = decapturify.transform(originalFunction).asInstanceOf[Function]
val members = (optionSymbol(thisProxy).toList ++ (captureProxies2 map (_._2))) map {member =>
@@ -294,7 +253,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre
val constr = createConstructor(lambdaClass, members)
// apply method with same arguments and return type as original lambda.
- val applyMethodDef = createApplyMethod(lambdaClass, decapturedFunction, accessorMethod, thisProxy)
+ val applyMethodDef = createApplyMethod(lambdaClass, decapturedFunction, thisProxy)
val bridgeMethod = createBridgeMethod(lambdaClass, originalFunction, applyMethodDef)
@@ -312,10 +271,10 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre
val body = members ++ List(constr, applyMethodDef) ++ bridgeMethod
// TODO if member fields are private this complains that they're not accessible
- (localTyper.typedPos(decapturedFunction.pos)(ClassDef(lambdaClass, body)).asInstanceOf[ClassDef], thisProxy, accessorMethod)
+ (localTyper.typedPos(decapturedFunction.pos)(ClassDef(lambdaClass, body)).asInstanceOf[ClassDef], thisProxy)
}
- val (anonymousClassDef, thisProxy, accessorMethod) = makeAnonymousClass
+ val (anonymousClassDef, thisProxy) = makeAnonymousClass
pkg.info.decls enter anonymousClassDef.symbol
@@ -327,7 +286,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre
val typedNewStat = localTyper.typedPos(originalFunction.pos)(newStat)
- TransformedFunction(anonymousClassDef, typedNewStat, accessorMethod)
+ DelambdafyAnonClass(anonymousClassDef, typedNewStat)
}
/**
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index 2f0afe79b6..84be9c6e95 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -188,14 +188,16 @@ abstract class Erasure extends AddInterfaces
/* Drop redundant types (ones which are implemented by some other parent) from the immediate parents.
* This is important on Android because there is otherwise an interface explosion.
*/
- def minimizeParents(parents: List[Type]): List[Type] = {
- var rest = parents
- var leaves = collection.mutable.ListBuffer.empty[Type]
+ def minimizeParents(parents: List[Type]): List[Type] = if (parents.isEmpty) parents else {
+ def isInterfaceOrTrait(sym: Symbol) = sym.isInterface || sym.isTrait
+
+ var rest = parents.tail
+ var leaves = collection.mutable.ListBuffer.empty[Type] += parents.head
while(rest.nonEmpty) {
val candidate = rest.head
val nonLeaf = leaves exists { t => t.typeSymbol isSubClass candidate.typeSymbol }
if(!nonLeaf) {
- leaves = leaves filterNot { t => candidate.typeSymbol isSubClass t.typeSymbol }
+ leaves = leaves filterNot { t => isInterfaceOrTrait(t.typeSymbol) && (candidate.typeSymbol isSubClass t.typeSymbol) }
leaves += candidate
}
rest = rest.tail
diff --git a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
index 5e2fe21eec..d1be1558b9 100644
--- a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
+++ b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
@@ -376,7 +376,7 @@ abstract class LambdaLift extends InfoTransform {
private def addFreeArgs(pos: Position, sym: Symbol, args: List[Tree]) = {
free get sym match {
- case Some(fvs) => args ++ (fvs.toList map (fv => atPos(pos)(proxyRef(fv))))
+ case Some(fvs) => addFree(sym, free = fvs.toList map (fv => atPos(pos)(proxyRef(fv))), original = args)
case _ => args
}
}
@@ -388,9 +388,9 @@ abstract class LambdaLift extends InfoTransform {
case DefDef(_, _, _, vparams :: _, _, _) =>
val addParams = cloneSymbols(ps).map(_.setFlag(PARAM))
sym.updateInfo(
- lifted(MethodType(sym.info.params ::: addParams, sym.info.resultType)))
+ lifted(MethodType(addFree(sym, free = addParams, original = sym.info.params), sym.info.resultType)))
- copyDefDef(tree)(vparamss = List(vparams ++ freeParams))
+ copyDefDef(tree)(vparamss = List(addFree(sym, free = freeParams, original = vparams)))
case ClassDef(_, _, _, _) =>
// SI-6231
// Disabled attempt to to add getters to freeParams
@@ -571,4 +571,12 @@ abstract class LambdaLift extends InfoTransform {
}
} // class LambdaLifter
+ private def addFree[A](sym: Symbol, free: List[A], original: List[A]): List[A] = {
+ val prependFree = (
+ !sym.isConstructor // this condition is redundant for now. It will be needed if we remove the second condition in 2.12.x
+ && (settings.Ydelambdafy.value == "method" && sym.isDelambdafyTarget) // SI-8359 Makes the lambda body a viable as the target MethodHandle for a call to LambdaMetafactory
+ )
+ if (prependFree) free ::: original
+ else original ::: free
+ }
}
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index 3330fbcae2..6484d96a52 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -71,6 +71,14 @@ abstract class UnCurry extends InfoTransform
private val noApply = mutable.HashSet[Tree]()
private val newMembers = mutable.Map[Symbol, mutable.Buffer[Tree]]()
+ private lazy val forceSpecializationInfoTransformOfFunctionN: Unit = {
+ if (currentRun.specializePhase != NoPhase) { // be robust in case of -Ystop-after:uncurry
+ exitingSpecialize {
+ FunctionClass.seq.foreach(cls => cls.info)
+ }
+ }
+ }
+
/** Add a new synthetic member for `currentOwner` */
private def addNewMember(t: Tree): Unit =
newMembers.getOrElseUpdate(currentOwner, mutable.Buffer()) += t
@@ -221,8 +229,16 @@ abstract class UnCurry extends InfoTransform
def mkMethod(owner: Symbol, name: TermName, additionalFlags: FlagSet = NoFlags): DefDef =
gen.mkMethodFromFunction(localTyper)(fun, owner, name, additionalFlags)
- val canUseDelamdafyMethod = (inConstructorFlag == 0) // Avoiding synthesizing code prone to SI-6666, SI-8363 by using old-style lambda translation
+ def isSpecialized = {
+ forceSpecializationInfoTransformOfFunctionN
+ val specialized = specializeTypes.specializedType(fun.tpe)
+ !(specialized =:= fun.tpe)
+ }
+ def canUseDelamdafyMethod = (
+ (inConstructorFlag == 0) // Avoiding synthesizing code prone to SI-6666, SI-8363 by using old-style lambda translation
+ && !isSpecialized // DelambdafyTransformer currently only emits generic FunctionN-s, use the old style in the meantime
+ )
if (inlineFunctionExpansion || !canUseDelamdafyMethod) {
val parents = addSerializable(abstractFunctionForFunctionType(fun.tpe))
val anonClass = fun.symbol.owner newAnonymousFunctionClass(fun.pos, inConstructorFlag) addAnnotation SerialVersionUIDAnnotation
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
index 4ea569c8e6..cef22d7d6b 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
@@ -10,6 +10,7 @@ package tools.nsc.transform.patmat
import scala.language.postfixOps
import scala.collection.mutable
import scala.reflect.internal.util.{NoPosition, Position, Statistics, HashSet}
+import scala.tools.nsc.Global
trait Logic extends Debugging {
import PatternMatchingStats._
@@ -90,6 +91,8 @@ trait Logic extends Debugging {
// compute the domain and return it (call registerNull first!)
def domainSyms: Option[Set[Sym]]
+ def groupedDomains: List[Set[Sym]]
+
// the symbol for this variable being equal to its statically known type
// (only available if registerEquality has been called for that type before)
def symForStaticTp: Option[Sym]
@@ -118,6 +121,9 @@ trait Logic extends Debugging {
final case class Not(a: Prop) extends Prop
+ // mutually exclusive (i.e., not more than one symbol is set)
+ final case class AtMostOne(ops: List[Sym]) extends Prop
+
case object True extends Prop
case object False extends Prop
@@ -192,7 +198,8 @@ trait Logic extends Debugging {
case Not(negated) => negationNormalFormNot(negated)
case True
| False
- | (_: Sym) => p
+ | (_: Sym)
+ | (_: AtMostOne) => p
}
def simplifyProp(p: Prop): Prop = p match {
@@ -252,6 +259,7 @@ trait Logic extends Debugging {
case Not(a) => apply(a)
case Eq(a, b) => applyVar(a); applyConst(b)
case s: Sym => applySymbol(s)
+ case AtMostOne(ops) => ops.foreach(applySymbol)
case _ =>
}
def applyVar(x: Var): Unit = {}
@@ -374,7 +382,23 @@ trait Logic extends Debugging {
// when sym is true, what must hold...
implied foreach (impliedSym => addAxiom(Or(Not(sym), impliedSym)))
// ... and what must not?
- excluded foreach (excludedSym => addAxiom(Or(Not(sym), Not(excludedSym))))
+ excluded foreach {
+ excludedSym =>
+ val related = Set(sym, excludedSym)
+ val exclusive = v.groupedDomains.exists {
+ domain => related subsetOf domain.toSet
+ }
+
+ // TODO: populate `v.exclusiveDomains` with `Set`s from the start, and optimize to:
+ // val exclusive = v.exclusiveDomains.exists { inDomain => inDomain(sym) && inDomain(excludedSym) }
+ if (!exclusive)
+ addAxiom(Or(Not(sym), Not(excludedSym)))
+ }
+ }
+
+ // all symbols in a domain are mutually exclusive
+ v.groupedDomains.foreach {
+ syms => if (syms.size > 1) addAxiom(AtMostOne(syms.toList))
}
}
@@ -449,7 +473,9 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
// once we go to run-time checks (on Const's), convert them to checkable types
// TODO: there seems to be bug for singleton domains (variable does not show up in model)
lazy val domain: Option[Set[Const]] = {
- val subConsts = enumerateSubtypes(staticTp).map{ tps =>
+ val subConsts =
+ enumerateSubtypes(staticTp, grouped = false)
+ .headOption.map { tps =>
tps.toSet[Type].map{ tp =>
val domainC = TypeConst(tp)
registerEquality(domainC)
@@ -467,6 +493,15 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
observed(); allConsts
}
+ lazy val groupedDomains: List[Set[Sym]] = {
+ val subtypes = enumerateSubtypes(staticTp, grouped = true)
+ subtypes.map {
+ subTypes =>
+ val syms = subTypes.flatMap(tpe => symForEqualsTo.get(TypeConst(tpe))).toSet
+ if (mayBeNull) syms + symForEqualsTo(NullConst) else syms
+ }.filter(_.nonEmpty)
+ }
+
// populate equalitySyms
// don't care about the result, but want only one fresh symbol per distinct constant c
def registerEquality(c: Const): Unit = {ensureCanModify(); symForEqualsTo getOrElseUpdate(c, Sym(this, c))}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala
index cecb5c37be..a11906ace1 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala
@@ -95,58 +95,84 @@ trait TreeAndTypeAnalysis extends Debugging {
val typer: Typer
// TODO: domain of other feasibly enumerable built-in types (char?)
- def enumerateSubtypes(tp: Type): Option[List[Type]] =
+ def enumerateSubtypes(tp: Type, grouped: Boolean): List[List[Type]] =
tp.typeSymbol match {
// TODO case _ if tp.isTupleType => // recurse into component types?
- case UnitClass =>
- Some(List(UnitTpe))
- case BooleanClass =>
- Some(ConstantTrue :: ConstantFalse :: Nil)
+ case UnitClass if !grouped =>
+ List(List(UnitTpe))
+ case BooleanClass if !grouped =>
+ List(ConstantTrue :: ConstantFalse :: Nil)
// TODO case _ if tp.isTupleType => // recurse into component types
- case modSym: ModuleClassSymbol =>
- Some(List(tp))
+ case modSym: ModuleClassSymbol if !grouped =>
+ List(List(tp))
case sym: RefinementClassSymbol =>
- val parentSubtypes: List[Option[List[Type]]] = tp.parents.map(parent => enumerateSubtypes(parent))
- if (parentSubtypes exists (_.isDefined))
+ val parentSubtypes = tp.parents.flatMap(parent => enumerateSubtypes(parent, grouped))
+ if (parentSubtypes exists (_.nonEmpty)) {
// If any of the parents is enumerable, then the refinement type is enumerable.
- Some(
- // We must only include subtypes of the parents that conform to `tp`.
- // See neg/virtpatmat_exhaust_compound.scala for an example.
- parentSubtypes flatMap (_.getOrElse(Nil)) filter (_ <:< tp)
- )
- else None
+ // We must only include subtypes of the parents that conform to `tp`.
+ // See neg/virtpatmat_exhaust_compound.scala for an example.
+ parentSubtypes map (_.filter(_ <:< tp))
+ }
+ else Nil
// make sure it's not a primitive, else (5: Byte) match { case 5 => ... } sees no Byte
case sym if sym.isSealed =>
- val subclasses = debug.patmatResult(s"enum $sym sealed, subclasses")(
- // symbols which are both sealed and abstract need not be covered themselves, because
- // all of their children must be and they cannot otherwise be created.
- sym.sealedDescendants.toList
- sortBy (_.sealedSortName)
- filterNot (x => x.isSealed && x.isAbstractClass && !isPrimitiveValueClass(x))
- )
val tpApprox = typer.infer.approximateAbstracts(tp)
val pre = tpApprox.prefix
- Some(debug.patmatResult(s"enum sealed tp=$tp, tpApprox=$tpApprox as") {
- // valid subtypes are turned into checkable types, as we are entering the realm of the dynamic
- subclasses flatMap { sym =>
+ def filterChildren(children: List[Symbol]): List[Type] = {
+ children flatMap { sym =>
// have to filter out children which cannot match: see ticket #3683 for an example
// compare to the fully known type `tp` (modulo abstract types),
// so that we can rule out stuff like: sealed trait X[T]; class XInt extends X[Int] --> XInt not valid when enumerating X[String]
// however, must approximate abstract types in
- val memberType = nestedMemberType(sym, pre, tpApprox.typeSymbol.owner)
- val subTp = appliedType(memberType, sym.typeParams.map(_ => WildcardType))
+ val memberType = nestedMemberType(sym, pre, tpApprox.typeSymbol.owner)
+ val subTp = appliedType(memberType, sym.typeParams.map(_ => WildcardType))
val subTpApprox = typer.infer.approximateAbstracts(subTp) // TODO: needed?
// debug.patmat("subtp"+(subTpApprox <:< tpApprox, subTpApprox, tpApprox))
if (subTpApprox <:< tpApprox) Some(checkableType(subTp))
else None
}
- })
+ }
+
+ if(grouped) {
+ def enumerateChildren(sym: Symbol) = {
+ sym.children.toList
+ .sortBy(_.sealedSortName)
+ .filterNot(x => x.isSealed && x.isAbstractClass && !isPrimitiveValueClass(x))
+ }
+
+ // enumerate only direct subclasses,
+ // subclasses of subclasses are enumerated in the next iteration
+ // and added to a new group
+ def groupChildren(wl: List[Symbol],
+ acc: List[List[Type]]): List[List[Type]] = wl match {
+ case hd :: tl =>
+ val children = enumerateChildren(hd)
+ groupChildren(tl ++ children, acc :+ filterChildren(children))
+ case Nil => acc
+ }
+
+ groupChildren(sym :: Nil, Nil)
+ } else {
+ val subclasses = debug.patmatResult(s"enum $sym sealed, subclasses")(
+ // symbols which are both sealed and abstract need not be covered themselves, because
+ // all of their children must be and they cannot otherwise be created.
+ sym.sealedDescendants.toList
+ sortBy (_.sealedSortName)
+ filterNot (x => x.isSealed && x.isAbstractClass && !isPrimitiveValueClass(x))
+ )
+
+ List(debug.patmatResult(s"enum sealed tp=$tp, tpApprox=$tpApprox as") {
+ // valid subtypes are turned into checkable types, as we are entering the realm of the dynamic
+ filterChildren(subclasses)
+ })
+ }
+
case sym =>
debug.patmat("enum unsealed "+ ((tp, sym, sym.isSealed, isPrimitiveValueClass(sym))))
- None
+ Nil
}
// approximate a type to the static type that is fully checkable at run time,
@@ -176,7 +202,7 @@ trait TreeAndTypeAnalysis extends Debugging {
def uncheckableType(tp: Type): Boolean = {
val checkable = (
(isTupleType(tp) && tupleComponents(tp).exists(tp => !uncheckableType(tp)))
- || enumerateSubtypes(tp).nonEmpty)
+ || enumerateSubtypes(tp, grouped = false).nonEmpty)
// if (!checkable) debug.patmat("deemed uncheckable: "+ tp)
!checkable
}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala
index c43f1b6209..9710c5c66b 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala
@@ -65,11 +65,22 @@ trait Solving extends Logic {
def size = symbols.size
}
+ def cnfString(f: Array[Clause]): String
+
final case class Solvable(cnf: Cnf, symbolMapping: SymbolMapping) {
def ++(other: Solvable) = {
require(this.symbolMapping eq other.symbolMapping)
Solvable(cnf ++ other.cnf, symbolMapping)
}
+
+ override def toString: String = {
+ "Solvable\nLiterals:\n" +
+ (for {
+ (lit, sym) <- symbolMapping.symForVar.toSeq.sortBy(_._1)
+ } yield {
+ s"$lit -> $sym"
+ }).mkString("\n") + "Cnf:\n" + cnfString(cnf)
+ }
}
trait CnfBuilder {
@@ -140,20 +151,23 @@ trait Solving extends Logic {
def apply(p: Prop): Solvable = {
- def convert(p: Prop): Lit = {
+ def convert(p: Prop): Option[Lit] = {
p match {
case And(fv) =>
- and(fv.map(convert))
+ Some(and(fv.flatMap(convert)))
case Or(fv) =>
- or(fv.map(convert))
+ Some(or(fv.flatMap(convert)))
case Not(a) =>
- not(convert(a))
+ convert(a).map(not)
case sym: Sym =>
- convertSym(sym)
+ Some(convertSym(sym))
case True =>
- constTrue
+ Some(constTrue)
case False =>
- constFalse
+ Some(constFalse)
+ case AtMostOne(ops) =>
+ atMostOne(ops)
+ None
case _: Eq =>
throw new MatchError(p)
}
@@ -199,8 +213,57 @@ trait Solving extends Logic {
// no need for auxiliary variable
def not(a: Lit): Lit = -a
+ /**
+ * This encoding adds 3n-4 variables auxiliary variables
+ * to encode that at most 1 symbol can be set.
+ * See also "Towards an Optimal CNF Encoding of Boolean Cardinality Constraints"
+ * http://www.carstensinz.de/papers/CP-2005.pdf
+ */
+ def atMostOne(ops: List[Sym]) {
+ (ops: @unchecked) match {
+ case hd :: Nil => convertSym(hd)
+ case x1 :: tail =>
+ // sequential counter: 3n-4 clauses
+ // pairwise encoding: n*(n-1)/2 clauses
+ // thus pays off only if n > 5
+ if (ops.lengthCompare(5) > 0) {
+
+ @inline
+ def /\(a: Lit, b: Lit) = addClauseProcessed(clause(a, b))
+
+ val (mid, xn :: Nil) = tail.splitAt(tail.size - 1)
+
+ // 1 <= x1,...,xn <==>
+ //
+ // (!x1 \/ s1) /\ (!xn \/ !sn-1) /\
+ //
+ // /\
+ // / \ (!xi \/ si) /\ (!si-1 \/ si) /\ (!xi \/ !si-1)
+ // 1 < i < n
+ val s1 = newLiteral()
+ /\(-convertSym(x1), s1)
+ val snMinus = mid.foldLeft(s1) {
+ case (siMinus, sym) =>
+ val xi = convertSym(sym)
+ val si = newLiteral()
+ /\(-xi, si)
+ /\(-siMinus, si)
+ /\(-xi, -siMinus)
+ si
+ }
+ /\(-convertSym(xn), -snMinus)
+ } else {
+ ops.map(convertSym).combinations(2).foreach {
+ case a :: b :: Nil =>
+ addClauseProcessed(clause(-a, -b))
+ case _ =>
+ }
+ }
+ }
+ }
+
// add intermediate variable since we want the formula to be SAT!
- addClauseProcessed(clause(convert(p)))
+ addClauseProcessed(convert(p).toSet)
Solvable(buildCnf, symbolMapping)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 2c8cc897f7..010b924ba0 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -4868,10 +4868,11 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
(// this -> Foo.this
if (sym.isThisSym)
typed1(This(sym.owner) setPos tree.pos, mode, pt)
- // Inferring classOf type parameter from expected type. Otherwise an
- // actual call to the stubbed classOf method is generated, returning null.
- else if (isPredefClassOf(sym) && pt.typeSymbol == ClassClass && pt.typeArgs.nonEmpty)
- typedClassOf(tree, TypeTree(pt.typeArgs.head))
+ else if (isPredefClassOf(sym) && pt.typeSymbol == ClassClass && pt.typeArgs.nonEmpty) {
+ // Inferring classOf type parameter from expected type. Otherwise an
+ // actual call to the stubbed classOf method is generated, returning null.
+ typedClassOf(tree, TypeTree(pt.typeArgs.head).setPos(tree.pos.focus))
+ }
else {
val pre1 = if (sym.isTopLevel) sym.owner.thisType else if (qual == EmptyTree) NoPrefix else qual.tpe
val tree1 = if (qual == EmptyTree) tree else atPos(tree.pos)(Select(atPos(tree.pos.focusStart)(qual), name))
diff --git a/src/partest-extras/scala/tools/partest/ParserTest.scala b/src/partest-extras/scala/tools/partest/ParserTest.scala
new file mode 100644
index 0000000000..e4c92e3dc3
--- /dev/null
+++ b/src/partest-extras/scala/tools/partest/ParserTest.scala
@@ -0,0 +1,21 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2014 LAMP/EPFL
+ */
+
+package scala.tools.partest
+
+/** A class for testing parser output.
+ * Just supply the `code` and update the check file.
+ */
+abstract class ParserTest extends DirectTest {
+
+ override def extraSettings: String = "-usejavacp -Ystop-after:parser -Xprint:parser"
+
+ override def show(): Unit = {
+ // redirect err to out, for logging
+ val prevErr = System.err
+ System.setErr(System.out)
+ compile()
+ System.setErr(prevErr)
+ }
+}
diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala
index ea2a3df7d5..6adfb4b104 100644
--- a/src/reflect/scala/reflect/internal/Symbols.scala
+++ b/src/reflect/scala/reflect/internal/Symbols.scala
@@ -794,6 +794,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
final def isAnonymousFunction = isSynthetic && (name containsName tpnme.ANON_FUN_NAME)
final def isDelambdafyFunction = isSynthetic && (name containsName tpnme.DELAMBDAFY_LAMBDA_CLASS_NAME)
+ final def isDelambdafyTarget = isSynthetic && isMethod && (name containsName tpnme.ANON_FUN_NAME)
final def isDefinedInPackage = effectiveOwner.isPackageClass
final def needsFlatClasses = phase.flatClasses && rawowner != NoSymbol && !rawowner.isPackageClass
diff --git a/src/repl/scala/tools/nsc/interpreter/JavapClass.scala b/src/repl/scala/tools/nsc/interpreter/JavapClass.scala
index c80b94bf89..1ccade2172 100644
--- a/src/repl/scala/tools/nsc/interpreter/JavapClass.scala
+++ b/src/repl/scala/tools/nsc/interpreter/JavapClass.scala
@@ -8,6 +8,7 @@ package tools.nsc
package interpreter
import java.lang.{ ClassLoader => JavaClassLoader, Iterable => JIterable }
+import scala.tools.asm.Opcodes
import scala.tools.nsc.util.ScalaClassLoader
import java.io.{ ByteArrayInputStream, CharArrayWriter, FileNotFoundException, PrintWriter, StringWriter, Writer }
import java.util.{ Locale }
@@ -758,32 +759,19 @@ object JavapClass {
import scala.tools.asm.ClassReader
import scala.tools.asm.Opcodes.INVOKESTATIC
import scala.tools.asm.tree.{ ClassNode, MethodInsnNode }
- // the accessor methods invoked statically by the apply of the given closure class
- def accesses(s: String): Seq[(String, String)] = {
- val accessor = """accessor\$\d+""".r
+ def callees(s: String): List[(String, String)] = {
loader classReader s withMethods { ms =>
- ms filter (_.name == "apply") flatMap (_.instructions.toArray.collect {
- case i: MethodInsnNode if i.getOpcode == INVOKESTATIC && when(i.name) { case accessor(_*) => true } => (i.owner, i.name)
- })
+ val nonBridgeApplyMethods = ms filter (_.name == "apply") filter (n => (n.access & Opcodes.ACC_BRIDGE) == 0)
+ val instructions = nonBridgeApplyMethods flatMap (_.instructions.toArray)
+ instructions.collect {
+ case i: MethodInsnNode => (i.owner, i.name)
+ }.toList
}
}
- // get the k.$anonfun for the accessor k.m
- def anonOf(k: String, m: String): String = {
- val res =
- loader classReader k withMethods { ms =>
- ms filter (_.name == m) flatMap (_.instructions.toArray.collect {
- case i: MethodInsnNode if i.getOpcode == INVOKESTATIC && i.name.startsWith("$anonfun") => i.name
- })
- }
- assert(res.size == 1)
- res.head
- }
- // the lambdas invoke accessors that call the anonfuns of interest. Filter k on the k#$anonfuns.
- val ack = accesses(lambda)
- assert(ack.size == 1) // There can be only one.
- ack.head match {
- case (k, _) if target.isModule && !(k endsWith "$") => None
- case (k, m) => Some(s"${k}#${anonOf(k, m)}")
+ callees(lambda) match {
+ case (k, _) :: Nil if target.isModule && !(k endsWith "$") => None
+ case (k, m) :: _ => Some(s"${k}#${m}")
+ case _ => None
}
}
/** Translate the supplied targets to patterns for anonfuns.