From 1e7a990e028dffc5fa8ac07a02ff3f79bde24749 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 4 Mar 2015 10:53:04 +1000 Subject: SI-8359 Adjust parameter order of accessor method in Delambdafy Under `-Ydelambdafy:method`, a public, static accessor method is created to expose the private method containing the body of the lambda. Currently this accessor method has its parameters in the same order structure as those of the lambda body method. What is this order? There are three categories of parameters: 1. lambda parameters 2. captured parameters (added by lambdalift) 3. self parameters (added to lambda bodies that end up in trait impl classes by mixin, and added unconditionally to the static accessor method.) These are currently emitted in order #3, #1, #2. Here are examples of the current behaviour: BEFORE (trait): ``` % cat sandbox/test.scala && scalac-hash v2.11.5 -Ydelambdafy:method sandbox/test.scala && javap -private -classpath . 'Test$class' trait Member; class Capture; trait LambdaParam trait Test { def member: Member def foo { val local = new Capture (arg: LambdaParam) => "" + arg + member + local } } Compiled from "test.scala" public abstract class Test$class { public static void foo(Test); private static final java.lang.String $anonfun$1(Test, LambdaParam, Capture); public static void $init$(Test); public static final java.lang.String accessor$1(Test, LambdaParam, Capture); } ``` BEFORE (class): ``` % cat sandbox/test.scala && scalac-hash v2.11.5 -Ydelambdafy:method sandbox/test.scala && javap -private -classpath . Test trait Member; class Capture; trait LambdaParam abstract class Test { def member: Member def foo { val local = new Capture (arg: LambdaParam) => "" + arg + member + local } } Compiled from "test.scala" public abstract class Test { public abstract Member member(); public void foo(); private final java.lang.String $anonfun$1(LambdaParam, Capture); public Test(); public static final java.lang.String accessor$1(Test, LambdaParam, Capture); } ``` Contrasting the class case with Java: ``` % cat sandbox/Test.java && javac -d . sandbox/Test.java && javap -private -classpath . Test public abstract class Test { public static class Member {}; public static class Capture {}; public static class LambaParam {}; public static interface I { public abstract Object c(LambaParam arg); } public abstract Member member(); public void test() { Capture local = new Capture(); I i1 = (LambaParam arg) -> "" + member() + local; } } Compiled from "Test.java" public abstract class Test { public Test(); public abstract Test$Member member(); public void test(); private java.lang.Object lambda$test$0(Test$Capture, Test$LambaParam); } ``` We can see that in Java 8 lambda parameters come after captures. If we want to use Java's LambdaMetafactory to spin up our anoymous FunctionN subclasses on the fly, our ordering must change. I can see three options for change: 1. Adjust `LambdaLift` to always prepend captured parameters, rather than appending them. I think we could leave `Mixin` as it is, it already prepends the self parameter. This would result a parameter ordering, in terms of the list above: #3, #2, #1. 2. More conservatively, do this just for methods known to hold lambda bodies. This might avoid needlessly breaking code that has come to depend on our binary encoding. 3. Adjust the parameters of the accessor method only. The body of this method can permute params before calling the lambda body method. This commit implements option #2. In also prototyped #1, and found it worked so long as I limited it to non-constructors, to sidestep the need to make corresponding changes elsewhere in the compiler to avoid the crasher shown in the enclosed test case, which was minimized from a bootstrap failure from an earlier a version of this patch. We would need to defer option #1 to 2.12 in any case, as some of these lifted methods are publicied by the optimizer, and we must leave the signatures alone to comply with MiMa. I've included a test that shows this in all in action. However, that is currently disabled, as we don't have a partest category for tests that require Java 8. --- test/files/pos/t8359-closelim-crash.flags | 1 + test/files/pos/t8359-closelim-crash.scala | 23 ++++++++++ .../pending/run/delambdafy-lambdametafactory.scala | 50 ++++++++++++++++++++++ 3 files changed, 74 insertions(+) create mode 100644 test/files/pos/t8359-closelim-crash.flags create mode 100644 test/files/pos/t8359-closelim-crash.scala create mode 100644 test/pending/run/delambdafy-lambdametafactory.scala (limited to 'test') diff --git a/test/files/pos/t8359-closelim-crash.flags b/test/files/pos/t8359-closelim-crash.flags new file mode 100644 index 0000000000..49d036a887 --- /dev/null +++ b/test/files/pos/t8359-closelim-crash.flags @@ -0,0 +1 @@ +-optimize diff --git a/test/files/pos/t8359-closelim-crash.scala b/test/files/pos/t8359-closelim-crash.scala new file mode 100644 index 0000000000..1413694d10 --- /dev/null +++ b/test/files/pos/t8359-closelim-crash.scala @@ -0,0 +1,23 @@ +package test + +// This is a minimization of code that crashed the compiler during bootstrapping +// in the first iteration of https://github.com/scala/scala/pull/4373, the PR +// that adjusted the order of free and declared params in LambdaLift. + +// Was: +// java.lang.AssertionError: assertion failed: +// Record Record(<$anon: Function1>,Map(value a$1 -> Deref(LocalVar(value b)))) does not contain a field value b$1 +// at scala.tools.nsc.Global.assert(Global.scala:262) +// at scala.tools.nsc.backend.icode.analysis.CopyPropagation$copyLattice$State.getFieldNonRecordValue(CopyPropagation.scala:113) +// at scala.tools.nsc.backend.icode.analysis.CopyPropagation$copyLattice$State.getFieldNonRecordValue(CopyPropagation.scala:122) +// at scala.tools.nsc.backend.opt.ClosureElimination$ClosureElim$$anonfun$analyzeMethod$1$$anonfun$apply$2.replaceFieldAccess$1(ClosureElimination.scala:124) +class Typer { + def bar(a: Boolean, b: Boolean): Unit = { + @inline + def baz(): Unit = { + ((_: Any) => (Typer.this, a, b)).apply("") + } + ((_: Any) => baz()).apply("") + } +} + diff --git a/test/pending/run/delambdafy-lambdametafactory.scala b/test/pending/run/delambdafy-lambdametafactory.scala new file mode 100644 index 0000000000..daea8a39fe --- /dev/null +++ b/test/pending/run/delambdafy-lambdametafactory.scala @@ -0,0 +1,50 @@ +// +// Tests that the static accessor method for lambda bodies +// (generated under -Ydelambdafy:method) are compatible with +// Java 8's LambdaMetafactory. +// +import java.lang.invoke._ + +class C { + def test1: Unit = { + (x: String) => x.reverse + } + def test2: Unit = { + val capture1 = "capture1" + (x: String) => capture1 + " " + x.reverse + } + def test3: Unit = { + (x: String) => C.this + " " + x.reverse + } +} +trait T { + def test4: Unit = { + (x: String) => x.reverse + } +} + +// A functional interface. Function1 contains abstract methods that are filled in by mixin +trait Function1ish[A, B] { + def apply(a: A): B +} + +object Test { + def lambdaFactory[A, B](hostClass: Class[_], instantiatedParam: Class[A], instantiatedRet: Class[B], accessorName: String, + capturedParams: Array[(Class[_], AnyRef)] = Array()) = { + val caller = MethodHandles.lookup + val methodType = MethodType.methodType(classOf[AnyRef], Array[Class[_]](classOf[AnyRef])) + val instantiatedMethodType = MethodType.methodType(instantiatedRet, Array[Class[_]](instantiatedParam)) + val (capturedParamTypes, captured) = capturedParams.unzip + val targetMethodType = MethodType.methodType(instantiatedRet, capturedParamTypes :+ instantiatedParam) + val invokedType = MethodType.methodType(classOf[Function1ish[_, _]], capturedParamTypes) + val target = caller.findStatic(hostClass, accessorName, targetMethodType) + val site = LambdaMetafactory.metafactory(caller, "apply", invokedType, methodType, target, instantiatedMethodType) + site.getTarget.invokeWithArguments(captured: _*).asInstanceOf[Function1ish[A, B]] + } + def main(args: Array[String]) { + println(lambdaFactory(classOf[C], classOf[String], classOf[String], "accessor$1").apply("abc")) + println(lambdaFactory(classOf[C], classOf[String], classOf[String], "accessor$2", Array(classOf[String] -> "capture1")).apply("abc")) + println(lambdaFactory(classOf[C], classOf[String], classOf[String], "accessor$3", Array(classOf[C] -> new C)).apply("abc")) + println(lambdaFactory(Class.forName("T$class"), classOf[String], classOf[String], "accessor$4", Array(classOf[T] -> new T{})).apply("abc")) + } +} -- cgit v1.2.3 From bbd693ae44bab4be2be7930641f8ca2bf27c962c Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 2 Mar 2015 15:48:25 +1000 Subject: SI-7741 Tread more lightly during classfile parsing 1. Avoid forcing info of non-Scala interface members This avoids parsing the ostensibly malformed class definitions that correspond to a Groovy lambda defined in an interface. 2. Be more tolerant of absent inner classfiles Taking a leaf out of javac's book (see transcript below), we can use stub symbols for InnerClass entries that don't have corresponding class files on the compilation classpath. This will limit failures to code that directly refers to the inner class, rather than any code that simply refers to the enclosing class. It seems that groovyc has a habit of emitting incongrous bytecode in this regard. But this change seems generally useful. ``` % cat sandbox/{Test,Client}.java public class Test { public class Inner {} } public class Client { public Test.Inner x() { return null; } } % javac -d . sandbox/Test.java && javac -classpath . sandbox/Client.java % javac -d . sandbox/Test.java && rm 'Test$Inner.class' && javac -classpath . sandbox/Client.java sandbox/Client.java:2: error: cannot access Inner public Test.Inner x() { return null; } ^ class file for Test$Inner not found 1 error % cat sandbox/{Test,Client}.java public class Test { public class Inner {} } public class Client { public Test.NeverExisted x() { return null; } } % javac -classpath . sandbox/Client.java sandbox/Client.java:2: error: cannot find symbol public Test.NeverExisted x() { return null; } ^ symbol: class NeverExisted location: class Test 1 error % cat sandbox/{Test,Client}.java public class Test { public class Inner {} } public class Client { public Test x() { return null; } } topic/groovy-interop ~/code/scala2 javac -d . sandbox/Test.java && rm 'Test$Inner.class' && javac -classpath . sandbox/Client.java # allowed ``` --- .../nsc/symtab/classfile/ClassfileParser.scala | 22 +- .../scala/tools/nsc/transform/AddInterfaces.scala | 2 +- test/files/run/t7741a/GroovyInterface$1Dump.java | 222 +++++++++++++++++++++ test/files/run/t7741a/GroovyInterfaceDump.java | 51 +++++ test/files/run/t7741a/Test.scala | 47 +++++ test/files/run/t7741b.check | 3 + test/files/run/t7741b/HasInner.java | 3 + test/files/run/t7741b/Test.scala | 29 +++ 8 files changed, 370 insertions(+), 9 deletions(-) create mode 100644 test/files/run/t7741a/GroovyInterface$1Dump.java create mode 100644 test/files/run/t7741a/GroovyInterfaceDump.java create mode 100644 test/files/run/t7741a/Test.scala create mode 100644 test/files/run/t7741b.check create mode 100644 test/files/run/t7741b/HasInner.java create mode 100644 test/files/run/t7741b/Test.scala (limited to 'test') diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index 4d08be3c24..602d18a651 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -15,6 +15,7 @@ import scala.collection.mutable.{ ListBuffer, ArrayBuffer } import scala.annotation.switch import scala.reflect.internal.{ JavaAccFlags } import scala.reflect.internal.pickling.{PickleBuffer, ByteCodecs} +import scala.reflect.io.NoAbstractFile import scala.tools.nsc.io.AbstractFile import scala.tools.nsc.util.ClassFileLookup @@ -1022,11 +1023,18 @@ abstract class ClassfileParser { val sflags = jflags.toScalaFlags val owner = ownerForFlags(jflags) val scope = getScope(jflags) - val innerClass = owner.newClass(name.toTypeName, NoPosition, sflags) setInfo completer - val innerModule = owner.newModule(name.toTermName, NoPosition, sflags) setInfo completer + def newStub(name: Name) = + owner.newStubSymbol(name, s"Class file for ${entry.externalName} not found").setFlag(JAVA) - innerModule.moduleClass setInfo loaders.moduleClassLoader - List(innerClass, innerModule.moduleClass) foreach (_.associatedFile = file) + val (innerClass, innerModule) = if (file == NoAbstractFile) { + (newStub(name.toTypeName), newStub(name.toTermName)) + } else { + val cls = owner.newClass(name.toTypeName, NoPosition, sflags) setInfo completer + val mod = owner.newModule(name.toTermName, NoPosition, sflags) setInfo completer + mod.moduleClass setInfo loaders.moduleClassLoader + List(cls, mod.moduleClass) foreach (_.associatedFile = file) + (cls, mod) + } scope enter innerClass scope enter innerModule @@ -1046,10 +1054,8 @@ abstract class ClassfileParser { for (entry <- innerClasses.entries) { // create a new class member for immediate inner classes if (entry.outerName == currentClass) { - val file = classFileLookup.findClassFile(entry.externalName.toString) getOrElse { - throw new AssertionError(s"Class file for ${entry.externalName} not found") - } - enterClassAndModule(entry, file) + val file = classFileLookup.findClassFile(entry.externalName.toString) + enterClassAndModule(entry, file.getOrElse(NoAbstractFile)) } } } diff --git a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala index f786ffb8f3..443e07ef3f 100644 --- a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala +++ b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala @@ -207,7 +207,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure => } def transformMixinInfo(tp: Type): Type = tp match { - case ClassInfoType(parents, decls, clazz) => + case ClassInfoType(parents, decls, clazz) if clazz.isPackageClass || !clazz.isJavaDefined => if (clazz.needsImplClass) implClass(clazz setFlag lateINTERFACE) // generate an impl class diff --git a/test/files/run/t7741a/GroovyInterface$1Dump.java b/test/files/run/t7741a/GroovyInterface$1Dump.java new file mode 100644 index 0000000000..0c0eab3f1b --- /dev/null +++ b/test/files/run/t7741a/GroovyInterface$1Dump.java @@ -0,0 +1,222 @@ +import java.util.*; +import scala.tools.asm.*; + +// generated with +// git clone alewando/scala_groovy_interop +// SCALA_HOME=... GROOVY_HOME=... ant +// cd /code/scala2 +// java -classpath build/asm/classes:/Users/jason/code/scala_groovy_interop/classes:/code/scala2/build/pack/lib/scala-library.jar:/usr/local/Cellar/groovy/2.4.1/libexec/embeddable/groovy-all-2.4.1.jar scala.tools.asm.util.ASMifier 'GroovyInterface$1' +// java -classpath build/asm/classes:/Users/jason/code/scala_groovy_interop/classes:/code/scala2/build/pack/lib/scala-library.jar:/usr/local/Cellar/groovy/2.4.1/libexec/embeddable/groovy-all-2.4.1.jar scala.tools.asm.util.ASMifier 'GroovyInterface$1' +public class GroovyInterface$1Dump implements Opcodes { + + public static byte[] dump () throws Exception { + + ClassWriter cw = new ClassWriter(0); + FieldVisitor fv; + MethodVisitor mv; + AnnotationVisitor av0; + + cw.visit(V1_5, ACC_SUPER + ACC_SYNTHETIC, "GroovyInterface$1", null, "java/lang/Object", new String[] {}); + + cw.visitInnerClass("GroovyInterface$1", "GroovyInterface", "1", ACC_SYNTHETIC); + + { + fv = cw.visitField(ACC_STATIC + ACC_SYNTHETIC, "$class$GroovyInterface", "Ljava/lang/Class;", null, null); + fv.visitEnd(); + } + { + fv = cw.visitField(ACC_PRIVATE + ACC_STATIC + ACC_SYNTHETIC, "$staticClassInfo", "Lorg/codehaus/groovy/reflection/ClassInfo;", null, null); + fv.visitEnd(); + } + { + fv = cw.visitField(ACC_PUBLIC + ACC_STATIC + ACC_TRANSIENT + ACC_SYNTHETIC, "__$stMC", "Z", null, null); + fv.visitEnd(); + } + { + fv = cw.visitField(ACC_PRIVATE + ACC_TRANSIENT + ACC_SYNTHETIC, "metaClass", "Lgroovy/lang/MetaClass;", null, null); + fv.visitEnd(); + } + { + fv = cw.visitField(ACC_PRIVATE + ACC_STATIC + ACC_SYNTHETIC, "$callSiteArray", "Ljava/lang/ref/SoftReference;", null, null); + fv.visitEnd(); + } + { + mv = cw.visitMethod(ACC_PUBLIC, "", "()V", null, null); + mv.visitCode(); + mv.visitVarInsn(ALOAD, 0); + mv.visitMethodInsn(INVOKESPECIAL, "java/lang/Object", "", "()V", false); + mv.visitMethodInsn(INVOKESTATIC, "GroovyInterface$1", "$getCallSiteArray", "()[Lorg/codehaus/groovy/runtime/callsite/CallSite;", false); + mv.visitVarInsn(ASTORE, 1); + mv.visitVarInsn(ALOAD, 0); + mv.visitMethodInsn(INVOKEVIRTUAL, "GroovyInterface$1", "$getStaticMetaClass", "()Lgroovy/lang/MetaClass;", false); + mv.visitVarInsn(ASTORE, 2); + mv.visitVarInsn(ALOAD, 2); + mv.visitVarInsn(ALOAD, 0); + mv.visitInsn(SWAP); + mv.visitFieldInsn(PUTFIELD, "GroovyInterface$1", "metaClass", "Lgroovy/lang/MetaClass;"); + mv.visitVarInsn(ALOAD, 2); + mv.visitInsn(POP); + mv.visitInsn(RETURN); + mv.visitMaxs(2, 3); + mv.visitEnd(); + } + { + mv = cw.visitMethod(ACC_PROTECTED + ACC_SYNTHETIC, "$getStaticMetaClass", "()Lgroovy/lang/MetaClass;", null, null); + mv.visitCode(); + mv.visitVarInsn(ALOAD, 0); + mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/Object", "getClass", "()Ljava/lang/Class;", false); + mv.visitLdcInsn(Type.getType("LGroovyInterface$1;")); + Label l0 = new Label(); + mv.visitJumpInsn(IF_ACMPEQ, l0); + mv.visitVarInsn(ALOAD, 0); + mv.visitMethodInsn(INVOKESTATIC, "org/codehaus/groovy/runtime/ScriptBytecodeAdapter", "initMetaClass", "(Ljava/lang/Object;)Lgroovy/lang/MetaClass;", false); + mv.visitInsn(ARETURN); + mv.visitLabel(l0); + mv.visitFieldInsn(GETSTATIC, "GroovyInterface$1", "$staticClassInfo", "Lorg/codehaus/groovy/reflection/ClassInfo;"); + mv.visitVarInsn(ASTORE, 1); + mv.visitVarInsn(ALOAD, 1); + Label l1 = new Label(); + mv.visitJumpInsn(IFNONNULL, l1); + mv.visitVarInsn(ALOAD, 0); + mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/Object", "getClass", "()Ljava/lang/Class;", false); + mv.visitMethodInsn(INVOKESTATIC, "org/codehaus/groovy/reflection/ClassInfo", "getClassInfo", "(Ljava/lang/Class;)Lorg/codehaus/groovy/reflection/ClassInfo;", false); + mv.visitInsn(DUP); + mv.visitVarInsn(ASTORE, 1); + mv.visitFieldInsn(PUTSTATIC, "GroovyInterface$1", "$staticClassInfo", "Lorg/codehaus/groovy/reflection/ClassInfo;"); + mv.visitLabel(l1); + mv.visitVarInsn(ALOAD, 1); + mv.visitMethodInsn(INVOKEVIRTUAL, "org/codehaus/groovy/reflection/ClassInfo", "getMetaClass", "()Lgroovy/lang/MetaClass;", false); + mv.visitInsn(ARETURN); + mv.visitMaxs(2, 2); + mv.visitEnd(); + } + { + mv = cw.visitMethod(ACC_PUBLIC + ACC_SYNTHETIC, "getMetaClass", "()Lgroovy/lang/MetaClass;", null, null); + mv.visitCode(); + mv.visitVarInsn(ALOAD, 0); + mv.visitFieldInsn(GETFIELD, "GroovyInterface$1", "metaClass", "Lgroovy/lang/MetaClass;"); + mv.visitInsn(DUP); + Label l0 = new Label(); + mv.visitJumpInsn(IFNULL, l0); + mv.visitInsn(ARETURN); + mv.visitLabel(l0); + mv.visitInsn(POP); + mv.visitVarInsn(ALOAD, 0); + mv.visitInsn(DUP); + mv.visitMethodInsn(INVOKEVIRTUAL, "GroovyInterface$1", "$getStaticMetaClass", "()Lgroovy/lang/MetaClass;", false); + mv.visitFieldInsn(PUTFIELD, "GroovyInterface$1", "metaClass", "Lgroovy/lang/MetaClass;"); + mv.visitVarInsn(ALOAD, 0); + mv.visitFieldInsn(GETFIELD, "GroovyInterface$1", "metaClass", "Lgroovy/lang/MetaClass;"); + mv.visitInsn(ARETURN); + mv.visitMaxs(2, 1); + mv.visitEnd(); + } + { + mv = cw.visitMethod(ACC_PUBLIC + ACC_SYNTHETIC, "setMetaClass", "(Lgroovy/lang/MetaClass;)V", null, null); + mv.visitCode(); + mv.visitVarInsn(ALOAD, 0); + mv.visitVarInsn(ALOAD, 1); + mv.visitFieldInsn(PUTFIELD, "GroovyInterface$1", "metaClass", "Lgroovy/lang/MetaClass;"); + mv.visitInsn(RETURN); + mv.visitMaxs(2, 2); + mv.visitEnd(); + } + { + mv = cw.visitMethod(ACC_PUBLIC + ACC_SYNTHETIC, "invokeMethod", "(Ljava/lang/String;Ljava/lang/Object;)Ljava/lang/Object;", null, null); + mv.visitCode(); + mv.visitVarInsn(ALOAD, 0); + mv.visitMethodInsn(INVOKEVIRTUAL, "GroovyInterface$1", "getMetaClass", "()Lgroovy/lang/MetaClass;", false); + mv.visitVarInsn(ALOAD, 0); + mv.visitVarInsn(ALOAD, 1); + mv.visitVarInsn(ALOAD, 2); + mv.visitMethodInsn(INVOKEINTERFACE, "groovy/lang/MetaClass", "invokeMethod", "(Ljava/lang/Object;Ljava/lang/String;Ljava/lang/Object;)Ljava/lang/Object;", true); + mv.visitInsn(ARETURN); + mv.visitMaxs(4, 3); + mv.visitEnd(); + } + { + mv = cw.visitMethod(ACC_PUBLIC + ACC_SYNTHETIC, "getProperty", "(Ljava/lang/String;)Ljava/lang/Object;", null, null); + mv.visitCode(); + mv.visitVarInsn(ALOAD, 0); + mv.visitMethodInsn(INVOKEVIRTUAL, "GroovyInterface$1", "getMetaClass", "()Lgroovy/lang/MetaClass;", false); + mv.visitVarInsn(ALOAD, 0); + mv.visitVarInsn(ALOAD, 1); + mv.visitMethodInsn(INVOKEINTERFACE, "groovy/lang/MetaClass", "getProperty", "(Ljava/lang/Object;Ljava/lang/String;)Ljava/lang/Object;", true); + mv.visitInsn(ARETURN); + mv.visitMaxs(3, 2); + mv.visitEnd(); + } + { + mv = cw.visitMethod(ACC_PUBLIC + ACC_SYNTHETIC, "setProperty", "(Ljava/lang/String;Ljava/lang/Object;)V", null, null); + mv.visitCode(); + mv.visitVarInsn(ALOAD, 0); + mv.visitMethodInsn(INVOKEVIRTUAL, "GroovyInterface$1", "getMetaClass", "()Lgroovy/lang/MetaClass;", false); + mv.visitVarInsn(ALOAD, 0); + mv.visitVarInsn(ALOAD, 1); + mv.visitVarInsn(ALOAD, 2); + mv.visitMethodInsn(INVOKEINTERFACE, "groovy/lang/MetaClass", "setProperty", "(Ljava/lang/Object;Ljava/lang/String;Ljava/lang/Object;)V", true); + mv.visitInsn(RETURN); + mv.visitMaxs(4, 3); + mv.visitEnd(); + } + { + mv = cw.visitMethod(ACC_STATIC, "", "()V", null, null); + mv.visitCode(); + mv.visitLdcInsn(Type.getType("LGroovyInterface;")); + mv.visitVarInsn(ASTORE, 0); + mv.visitVarInsn(ALOAD, 0); + mv.visitFieldInsn(PUTSTATIC, "GroovyInterface$1", "$class$GroovyInterface", "Ljava/lang/Class;"); + mv.visitVarInsn(ALOAD, 0); + mv.visitInsn(POP); + mv.visitInsn(RETURN); + mv.visitMaxs(1, 1); + mv.visitEnd(); + } + { + mv = cw.visitMethod(ACC_PRIVATE + ACC_STATIC + ACC_SYNTHETIC, "$createCallSiteArray", "()Lorg/codehaus/groovy/runtime/callsite/CallSiteArray;", null, null); + mv.visitCode(); + mv.visitLdcInsn(new Integer(0)); + mv.visitTypeInsn(ANEWARRAY, "java/lang/String"); + mv.visitVarInsn(ASTORE, 0); + mv.visitTypeInsn(NEW, "org/codehaus/groovy/runtime/callsite/CallSiteArray"); + mv.visitInsn(DUP); + mv.visitLdcInsn(Type.getType("LGroovyInterface$1;")); + mv.visitVarInsn(ALOAD, 0); + mv.visitMethodInsn(INVOKESPECIAL, "org/codehaus/groovy/runtime/callsite/CallSiteArray", "", "(Ljava/lang/Class;[Ljava/lang/String;)V", false); + mv.visitInsn(ARETURN); + mv.visitMaxs(4, 1); + mv.visitEnd(); + } + { + mv = cw.visitMethod(ACC_PUBLIC + ACC_STATIC + ACC_SYNTHETIC, "$getCallSiteArray", "()[Lorg/codehaus/groovy/runtime/callsite/CallSite;", null, null); + mv.visitCode(); + mv.visitFieldInsn(GETSTATIC, "GroovyInterface$1", "$callSiteArray", "Ljava/lang/ref/SoftReference;"); + Label l0 = new Label(); + mv.visitJumpInsn(IFNULL, l0); + mv.visitFieldInsn(GETSTATIC, "GroovyInterface$1", "$callSiteArray", "Ljava/lang/ref/SoftReference;"); + mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/ref/SoftReference", "get", "()Ljava/lang/Object;", false); + mv.visitTypeInsn(CHECKCAST, "org/codehaus/groovy/runtime/callsite/CallSiteArray"); + mv.visitInsn(DUP); + mv.visitVarInsn(ASTORE, 0); + Label l1 = new Label(); + mv.visitJumpInsn(IFNONNULL, l1); + mv.visitLabel(l0); + mv.visitMethodInsn(INVOKESTATIC, "GroovyInterface$1", "$createCallSiteArray", "()Lorg/codehaus/groovy/runtime/callsite/CallSiteArray;", false); + mv.visitVarInsn(ASTORE, 0); + mv.visitTypeInsn(NEW, "java/lang/ref/SoftReference"); + mv.visitInsn(DUP); + mv.visitVarInsn(ALOAD, 0); + mv.visitMethodInsn(INVOKESPECIAL, "java/lang/ref/SoftReference", "", "(Ljava/lang/Object;)V", false); + mv.visitFieldInsn(PUTSTATIC, "GroovyInterface$1", "$callSiteArray", "Ljava/lang/ref/SoftReference;"); + mv.visitLabel(l1); + mv.visitVarInsn(ALOAD, 0); + mv.visitFieldInsn(GETFIELD, "org/codehaus/groovy/runtime/callsite/CallSiteArray", "array", "[Lorg/codehaus/groovy/runtime/callsite/CallSite;"); + mv.visitInsn(ARETURN); + mv.visitMaxs(3, 1); + mv.visitEnd(); + } + cw.visitEnd(); + + return cw.toByteArray(); + } +} diff --git a/test/files/run/t7741a/GroovyInterfaceDump.java b/test/files/run/t7741a/GroovyInterfaceDump.java new file mode 100644 index 0000000000..87c09e272f --- /dev/null +++ b/test/files/run/t7741a/GroovyInterfaceDump.java @@ -0,0 +1,51 @@ +import java.util.*; +import scala.tools.asm.*; + +// generated with +// git clone alewando/scala_groovy_interop +// SCALA_HOME=... GROOVY_HOME=... ant +// cd /code/scala2 +// java -classpath build/asm/classes:/Users/jason/code/scala_groovy_interop/classes:/code/scala2/build/pack/lib/scala-library.jar:/usr/local/Cellar/groovy/2.4.1/libexec/embeddable/groovy-all-2.4.1.jar scala.tools.asm.util.ASMifier 'GroovyInterface$1' +// java -classpath build/asm/classes:/Users/jason/code/scala_groovy_interop/classes:/code/scala2/build/pack/lib/scala-library.jar:/usr/local/Cellar/groovy/2.4.1/libexec/embeddable/groovy-all-2.4.1.jar scala.tools.asm.util.ASMifier 'GroovyInterface$1' +public class GroovyInterfaceDump implements Opcodes { + + public static byte[] dump () throws Exception { + + ClassWriter cw = new ClassWriter(0); + FieldVisitor fv; + MethodVisitor mv; + AnnotationVisitor av0; + + cw.visit(V1_5, ACC_PUBLIC + ACC_ABSTRACT + ACC_INTERFACE, "GroovyInterface", null, "java/lang/Object", null); + + cw.visitInnerClass("GroovyInterface$1", "GroovyInterface", "1", ACC_SYNTHETIC); + + cw.visitInnerClass("GroovyInterface$__clinit__closure1", null, null, 0); + + { + fv = cw.visitField(ACC_PUBLIC + ACC_FINAL + ACC_STATIC, "closure", "Ljava/lang/Object;", null, null); + fv.visitEnd(); + } + { + mv = cw.visitMethod(ACC_STATIC, "", "()V", null, null); + mv.visitCode(); + mv.visitTypeInsn(NEW, "GroovyInterface$__clinit__closure1"); + mv.visitInsn(DUP); + mv.visitFieldInsn(GETSTATIC, "GroovyInterface$1", "$class$GroovyInterface", "Ljava/lang/Class;"); + mv.visitFieldInsn(GETSTATIC, "GroovyInterface$1", "$class$GroovyInterface", "Ljava/lang/Class;"); + mv.visitMethodInsn(INVOKESPECIAL, "GroovyInterface$__clinit__closure1", "", "(Ljava/lang/Object;Ljava/lang/Object;)V", false); + mv.visitVarInsn(ASTORE, 0); + mv.visitVarInsn(ALOAD, 0); + mv.visitFieldInsn(PUTSTATIC, "GroovyInterface", "closure", "Ljava/lang/Object;"); + mv.visitVarInsn(ALOAD, 0); + mv.visitInsn(POP); + mv.visitInsn(RETURN); + mv.visitMaxs(4, 1); + mv.visitEnd(); + } + cw.visitEnd(); + + return cw.toByteArray(); + } +} + diff --git a/test/files/run/t7741a/Test.scala b/test/files/run/t7741a/Test.scala new file mode 100644 index 0000000000..a75cb6c9eb --- /dev/null +++ b/test/files/run/t7741a/Test.scala @@ -0,0 +1,47 @@ +import java.io.{ByteArrayInputStream, FileOutputStream, BufferedOutputStream} +import java.util + +import java.io.File + +import scala.tools.partest.DirectTest + +object Test extends DirectTest { + + def code = "" + + override def show(): Unit = { + + val class1: Array[Byte] = GroovyInterfaceDump.dump() + val class2: Array[Byte] = GroovyInterface$1Dump.dump() + def writeFile(contents: Array[Byte], f: java.io.File): Unit = { + val out = new BufferedOutputStream(new FileOutputStream(f)) + try { + out.write(contents) + } finally out.close() + } + + val outdir = testOutput.jfile + + // interface GroovyInterface { + // + // // This is the line that causes scalac to choke. + // // It results in a GroovyInterface$1 class, which is a non-static inner class but it's constructor does not + // // include the implicit parameter that is the immediate enclosing instance. + // // See http://jira.codehaus.org/browse/GROOVY-7312 + // // + // // Scalac error: + // // [scalac] error: error while loading 1, class file '..../scala_groovy_interop/classes/com/example/groovy/GroovyInterface$1.class' is broken + // // [scalac] (class java.util.NoSuchElementException/head of empty list) + // final static def closure = { x -> "banana" } + // + // } + writeFile(GroovyInterfaceDump.dump(), new File(outdir, "GroovyInterface.class")) + writeFile(GroovyInterface$1Dump.dump(), new File(outdir, "GroovyInterface$1.class")) + compileCode("object Test { def foo(g: GroovyInterface) = g.toString }") + } + + def compileCode(code: String) = { + val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator") + compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(code) + } +} diff --git a/test/files/run/t7741b.check b/test/files/run/t7741b.check new file mode 100644 index 0000000000..a19e54aa3a --- /dev/null +++ b/test/files/run/t7741b.check @@ -0,0 +1,3 @@ +1. Don't refer to Inner +2. Refering to Inner +pos: NoPosition Class file for HasInner$Inner not found ERROR diff --git a/test/files/run/t7741b/HasInner.java b/test/files/run/t7741b/HasInner.java new file mode 100644 index 0000000000..a1d0d0d81a --- /dev/null +++ b/test/files/run/t7741b/HasInner.java @@ -0,0 +1,3 @@ +class HasInner { + class Inner {} +} diff --git a/test/files/run/t7741b/Test.scala b/test/files/run/t7741b/Test.scala new file mode 100644 index 0000000000..569ae6b679 --- /dev/null +++ b/test/files/run/t7741b/Test.scala @@ -0,0 +1,29 @@ +import java.io.File + +import scala.tools.partest.StoreReporterDirectTest + +object Test extends StoreReporterDirectTest { + + def code = "" + + override def show(): Unit = { + deleteClass("HasInner$Inner") + println("1. Don't refer to Inner") + compileCode("class Test { def test(x: HasInner) = x }") + assert(filteredInfos.isEmpty, filteredInfos) + println("2. Refering to Inner") + compileCode("class Test { def test(x: HasInner#Inner) = x }") + println(filteredInfos.mkString("\n")) + } + + def deleteClass(name: String) { + val classFile = new File(testOutput.path, name + ".class") + assert(classFile.exists) + assert(classFile.delete()) + } + + def compileCode(code: String) = { + val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator") + compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(code) + } +} -- cgit v1.2.3 From e9bfa33c1eef822eb9d3d0f471f4bbbe390e1e65 Mon Sep 17 00:00:00 2001 From: Antoine Gourlay Date: Wed, 25 Mar 2015 19:26:36 +0100 Subject: SI-9239 fix java generic signature when traits extend classes MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Given the following code: class C1[T] trait T1[T] extends C1[T] class C2[T] extends T1[T] The generic signature of C2 changed after ced3ca8 from "C1[T], T1[T]" to "Object, T1[T]", even though C1 was still marked as a superclass in the bytecode... C1 was removed because a subclass (T1) appeared later on, and it was possible for a trait to cause a class to be evicted. It turns out that if a class A appears in "parents", it *always* has to stay in the signature: if a trait later in the list inherits from some class, that class has to be a superclass of A (per SLS ยง5.1), or A itself, so in any case, the most specific superclass is A, so A should stay in the signature. Thus minimizeParents now only allows traits/interfaces to be removed from the list (the refactoring in 7552739, moving from mixinClasses to parents, makes this much easier to fix). This didn't happen for non-generic signatures because there are two separate fields in the classfile for this (one for the superclass, one for interfaces), so interfaces were processed on their own. Thus non-parametrized classes were not affected. Anything that used erased types at runtime was also fine (like isInstanceOf checks), and anything that used ScalaSignature was also unaffected. Maybethat's why so few things broke... See the test for how this affects Java (hint: badly). This changes very few things when building scala itself, and the changes seem sensible: --- sandbox/lib/scala-library.jar#!scala/runtime/NonLocalReturnControl.class +++ build/pack/lib/scala-library.jar#!scala/runtime/NonLocalReturnControl.class - Generic Signature: Ljava/lang/Object;Lscala/util/control/ControlThrowable; + Generic Signature: Ljava/lang/Throwable;Lscala/util/control/ControlThrowable; --- sandbox/lib/scala-library.jar#!scala/collection/mutable/QueueProxy$$anon$1.class +++ build/pack/lib/scala-library.jar#!scala/collection/mutable/QueueProxy$$anon$1.class - Generic Signature: Ljava/lang/Object;Lscala/collection/mutable/QueueProxy; + Generic Signature: Lscala/collection/mutable/Queue;Lscala/collection/mutable/QueueProxy; --- sandbox/lib/scala-library.jar#!scala/collection/mutable/Iterable$.class +++ build/pack/lib/scala-library.jar#!scala/collection/mutable/Iterable$.class - Generic Signature: Ljava/lang/Object;Lscala/collection/generic/TraversableFactory; + Generic Signature: Lscala/collection/generic/GenTraversableFactory;Lscala/collection/generic/TraversableFactory; --- sandbox/lib/scala-library.jar#!scala/collection/immutable/Traversable$.class +++ build/pack/lib/scala-library.jar#!scala/collection/immutable/Traversable$.class - Generic Signature: Ljava/lang/Object;Lscala/collection/generic/TraversableFactory; + Generic Signature: Lscala/collection/generic/GenTraversableFactory;Lscala/collection/generic/TraversableFactory; --- sandbox/lib/scala-library.jar#!scala/collection/immutable/Iterable$.class +++ build/pack/lib/scala-library.jar#!scala/collection/immutable/Iterable$.class - Generic Signature: Ljava/lang/Object;Lscala/collection/generic/TraversableFactory; + Generic Signature: Lscala/collection/generic/GenTraversableFactory;Lscala/collection/generic/TraversableFactory; --- sandbox/lib/scala-library.jar#!scala/collection/mutable/Traversable$.class +++ build/pack/lib/scala-library.jar#!scala/collection/mutable/Traversable$.class - Generic Signature: Ljava/lang/Object;Lscala/collection/generic/TraversableFactory; + Generic Signature: Lscala/collection/generic/GenTraversableFactory;Lscala/collection/generic/TraversableFactory; --- sandbox/lib/scala-library.jar#!scala/throws.class +++ build/pack/lib/scala-library.jar#!scala/throws.class - Generic Signature: Ljava/lang/Object;Lscala/annotation/StaticAnnotation; + Generic Signature: Lscala/annotation/Annotation;Lscala/annotation/StaticAnnotation; --- sandbox/lib/scala-library.jar#!scala/collection/mutable/StackProxy$$anon$1.class +++ build/pack/lib/scala-library.jar#!scala/collection/mutable/StackProxy$$anon$1.class - Generic Signature: Ljava/lang/Object;Lscala/collection/mutable/StackProxy; + Generic Signature: Lscala/collection/mutable/Stack;Lscala/collection/mutable/StackProxy; --- sandbox/lib/scala-library.jar#!scala/collection/convert/Wrappers$IterableWrapper.class +++ build/pack/lib/scala-library.jar#!scala/collection/convert/Wrappers$IterableWrapper.class - Generic Signature: Ljava/lang/Object;Lscala/collection/convert/Wrappers$IterableWrapperTrait;Lscala/Product;Lscala/Serializable; + Generic Signature: Ljava/util/AbstractCollection;Lscala/collection/convert/Wrappers$IterableWrapperTrait;Lscala/Product;Lscala/Serializable; --- sandbox/lib/scala-library.jar#!scala/collection/Iterable$.class +++ build/pack/lib/scala-library.jar#!scala/collection/Iterable$.class - Generic Signature: Ljava/lang/Object;Lscala/collection/generic/TraversableFactory; + Generic Signature: Lscala/collection/generic/GenTraversableFactory;Lscala/collection/generic/TraversableFactory; --- sandbox/lib/scala-library.jar#!scala/collection/Traversable$.class +++ build/pack/lib/scala-library.jar#!scala/collection/Traversable$.class - Generic Signature: Ljava/lang/Object;Lscala/collection/generic/TraversableFactory; + Generic Signature: Lscala/collection/generic/GenTraversableFactory;Lscala/collection/generic/TraversableFactory; --- sandbox/lib/scala-library.jar#!scala/collection/parallel/AdaptiveWorkStealingForkJoinTasks$WrappedTask.class +++ build/pack/lib/scala-library.jar#!scala/collection/parallel/AdaptiveWorkStealingForkJoinTasks$WrappedTask.class - Generic Signature: Ljava/lang/Object;Lscala/collection/parallel/ForkJoinTasks$WrappedTask;Lscala/collection/parallel/AdaptiveWorkStealingTasks$WrappedTask; + Generic Signature: Lscala/concurrent/forkjoin/RecursiveAction;Lscala/collection/parallel/ForkJoinTasks$WrappedTask;Lscala/collection/parallel/AdaptiveWorkStealingTasks$WrappedTask; --- src/compiler/scala/tools/nsc/transform/Erasure.scala | 10 ++++++---- test/files/pos/t9239/Declaration.scala | 3 +++ test/files/pos/t9239/Usage.java | 15 +++++++++++++++ 3 files changed, 24 insertions(+), 4 deletions(-) create mode 100644 test/files/pos/t9239/Declaration.scala create mode 100644 test/files/pos/t9239/Usage.java (limited to 'test') diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index 5c72bb3258..438d39bc86 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -188,14 +188,16 @@ abstract class Erasure extends AddInterfaces /* Drop redundant types (ones which are implemented by some other parent) from the immediate parents. * This is important on Android because there is otherwise an interface explosion. */ - def minimizeParents(parents: List[Type]): List[Type] = { - var rest = parents - var leaves = collection.mutable.ListBuffer.empty[Type] + def minimizeParents(parents: List[Type]): List[Type] = if (parents.isEmpty) parents else { + def isInterfaceOrTrait(sym: Symbol) = sym.isInterface || sym.isTrait + + var rest = parents.tail + var leaves = collection.mutable.ListBuffer.empty[Type] += parents.head while(rest.nonEmpty) { val candidate = rest.head val nonLeaf = leaves exists { t => t.typeSymbol isSubClass candidate.typeSymbol } if(!nonLeaf) { - leaves = leaves filterNot { t => candidate.typeSymbol isSubClass t.typeSymbol } + leaves = leaves filterNot { t => isInterfaceOrTrait(t.typeSymbol) && (candidate.typeSymbol isSubClass t.typeSymbol) } leaves += candidate } rest = rest.tail diff --git a/test/files/pos/t9239/Declaration.scala b/test/files/pos/t9239/Declaration.scala new file mode 100644 index 0000000000..452dcc1e77 --- /dev/null +++ b/test/files/pos/t9239/Declaration.scala @@ -0,0 +1,3 @@ +class Foo[A] +trait Bar[A] extends Foo[A] +class Baz[A] extends Bar[A] diff --git a/test/files/pos/t9239/Usage.java b/test/files/pos/t9239/Usage.java new file mode 100644 index 0000000000..d1e3fb0c3e --- /dev/null +++ b/test/files/pos/t9239/Usage.java @@ -0,0 +1,15 @@ +/** + * Used to fail with: + * + * Usage.java:5: error: incompatible types: Baz cannot be converted to Foo + * foo(f); + * ^ + */ +public class Usage { + public Usage() { + Baz f = null; + foo(f); + } + + public void foo(Foo f) { }; +} -- cgit v1.2.3 From d44a86f432a7f9ca250b014acdeab02ac9f2c304 Mon Sep 17 00:00:00 2001 From: Gerard Basler Date: Thu, 12 Mar 2015 01:47:47 +0100 Subject: Patmat: efficient reasoning about mutual exclusion Faster analysis of wide (but relatively flat) class hierarchies by using a more efficient encoding of mutual exclusion. The old CNF encoding for mutually exclusive symbols of a domain added a quadratic number of clauses to the formula to satisfy. E.g. if a domain has the symbols `a`, `b` and `c` then the clauses ``` !a \/ !b /\ !a \/ !c /\ !b \/ !c ``` were added. The first line prevents that `a` and `b` are both true at the same time, etc. There's a simple, more efficient encoding that can be used instead: consider a comparator circuit in hardware, that checks that out of `n` signals, at most 1 is true. Such a circuit can be built in the form of a sequential counter and thus requires only 3n-4 additional clauses [1]. A comprehensible comparison of different encodings can be found in [2]. [1]: http://www.carstensinz.de/papers/CP-2005.pdf [2]: http://www.wv.inf.tu-dresden.de/Publications/2013/report-13-04.pdf --- .../scala/tools/nsc/transform/patmat/Logic.scala | 41 +- .../tools/nsc/transform/patmat/MatchAnalysis.scala | 86 ++- .../scala/tools/nsc/transform/patmat/Solving.scala | 79 +- test/files/neg/patmatexhaust-huge.check | 7 + test/files/neg/patmatexhaust-huge.flags | 1 + test/files/neg/patmatexhaust-huge.scala | 806 +++++++++++++++++++++ .../tools/nsc/transform/patmat/SolvingTest.scala | 58 +- 7 files changed, 1034 insertions(+), 44 deletions(-) create mode 100644 test/files/neg/patmatexhaust-huge.check create mode 100644 test/files/neg/patmatexhaust-huge.flags create mode 100644 test/files/neg/patmatexhaust-huge.scala (limited to 'test') diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala index 4ea569c8e6..cef22d7d6b 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala @@ -10,6 +10,7 @@ package tools.nsc.transform.patmat import scala.language.postfixOps import scala.collection.mutable import scala.reflect.internal.util.{NoPosition, Position, Statistics, HashSet} +import scala.tools.nsc.Global trait Logic extends Debugging { import PatternMatchingStats._ @@ -90,6 +91,8 @@ trait Logic extends Debugging { // compute the domain and return it (call registerNull first!) def domainSyms: Option[Set[Sym]] + def groupedDomains: List[Set[Sym]] + // the symbol for this variable being equal to its statically known type // (only available if registerEquality has been called for that type before) def symForStaticTp: Option[Sym] @@ -118,6 +121,9 @@ trait Logic extends Debugging { final case class Not(a: Prop) extends Prop + // mutually exclusive (i.e., not more than one symbol is set) + final case class AtMostOne(ops: List[Sym]) extends Prop + case object True extends Prop case object False extends Prop @@ -192,7 +198,8 @@ trait Logic extends Debugging { case Not(negated) => negationNormalFormNot(negated) case True | False - | (_: Sym) => p + | (_: Sym) + | (_: AtMostOne) => p } def simplifyProp(p: Prop): Prop = p match { @@ -252,6 +259,7 @@ trait Logic extends Debugging { case Not(a) => apply(a) case Eq(a, b) => applyVar(a); applyConst(b) case s: Sym => applySymbol(s) + case AtMostOne(ops) => ops.foreach(applySymbol) case _ => } def applyVar(x: Var): Unit = {} @@ -374,7 +382,23 @@ trait Logic extends Debugging { // when sym is true, what must hold... implied foreach (impliedSym => addAxiom(Or(Not(sym), impliedSym))) // ... and what must not? - excluded foreach (excludedSym => addAxiom(Or(Not(sym), Not(excludedSym)))) + excluded foreach { + excludedSym => + val related = Set(sym, excludedSym) + val exclusive = v.groupedDomains.exists { + domain => related subsetOf domain.toSet + } + + // TODO: populate `v.exclusiveDomains` with `Set`s from the start, and optimize to: + // val exclusive = v.exclusiveDomains.exists { inDomain => inDomain(sym) && inDomain(excludedSym) } + if (!exclusive) + addAxiom(Or(Not(sym), Not(excludedSym))) + } + } + + // all symbols in a domain are mutually exclusive + v.groupedDomains.foreach { + syms => if (syms.size > 1) addAxiom(AtMostOne(syms.toList)) } } @@ -449,7 +473,9 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis { // once we go to run-time checks (on Const's), convert them to checkable types // TODO: there seems to be bug for singleton domains (variable does not show up in model) lazy val domain: Option[Set[Const]] = { - val subConsts = enumerateSubtypes(staticTp).map{ tps => + val subConsts = + enumerateSubtypes(staticTp, grouped = false) + .headOption.map { tps => tps.toSet[Type].map{ tp => val domainC = TypeConst(tp) registerEquality(domainC) @@ -467,6 +493,15 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis { observed(); allConsts } + lazy val groupedDomains: List[Set[Sym]] = { + val subtypes = enumerateSubtypes(staticTp, grouped = true) + subtypes.map { + subTypes => + val syms = subTypes.flatMap(tpe => symForEqualsTo.get(TypeConst(tpe))).toSet + if (mayBeNull) syms + symForEqualsTo(NullConst) else syms + }.filter(_.nonEmpty) + } + // populate equalitySyms // don't care about the result, but want only one fresh symbol per distinct constant c def registerEquality(c: Const): Unit = {ensureCanModify(); symForEqualsTo getOrElseUpdate(c, Sym(this, c))} diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala index cecb5c37be..a11906ace1 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala @@ -95,58 +95,84 @@ trait TreeAndTypeAnalysis extends Debugging { val typer: Typer // TODO: domain of other feasibly enumerable built-in types (char?) - def enumerateSubtypes(tp: Type): Option[List[Type]] = + def enumerateSubtypes(tp: Type, grouped: Boolean): List[List[Type]] = tp.typeSymbol match { // TODO case _ if tp.isTupleType => // recurse into component types? - case UnitClass => - Some(List(UnitTpe)) - case BooleanClass => - Some(ConstantTrue :: ConstantFalse :: Nil) + case UnitClass if !grouped => + List(List(UnitTpe)) + case BooleanClass if !grouped => + List(ConstantTrue :: ConstantFalse :: Nil) // TODO case _ if tp.isTupleType => // recurse into component types - case modSym: ModuleClassSymbol => - Some(List(tp)) + case modSym: ModuleClassSymbol if !grouped => + List(List(tp)) case sym: RefinementClassSymbol => - val parentSubtypes: List[Option[List[Type]]] = tp.parents.map(parent => enumerateSubtypes(parent)) - if (parentSubtypes exists (_.isDefined)) + val parentSubtypes = tp.parents.flatMap(parent => enumerateSubtypes(parent, grouped)) + if (parentSubtypes exists (_.nonEmpty)) { // If any of the parents is enumerable, then the refinement type is enumerable. - Some( - // We must only include subtypes of the parents that conform to `tp`. - // See neg/virtpatmat_exhaust_compound.scala for an example. - parentSubtypes flatMap (_.getOrElse(Nil)) filter (_ <:< tp) - ) - else None + // We must only include subtypes of the parents that conform to `tp`. + // See neg/virtpatmat_exhaust_compound.scala for an example. + parentSubtypes map (_.filter(_ <:< tp)) + } + else Nil // make sure it's not a primitive, else (5: Byte) match { case 5 => ... } sees no Byte case sym if sym.isSealed => - val subclasses = debug.patmatResult(s"enum $sym sealed, subclasses")( - // symbols which are both sealed and abstract need not be covered themselves, because - // all of their children must be and they cannot otherwise be created. - sym.sealedDescendants.toList - sortBy (_.sealedSortName) - filterNot (x => x.isSealed && x.isAbstractClass && !isPrimitiveValueClass(x)) - ) val tpApprox = typer.infer.approximateAbstracts(tp) val pre = tpApprox.prefix - Some(debug.patmatResult(s"enum sealed tp=$tp, tpApprox=$tpApprox as") { - // valid subtypes are turned into checkable types, as we are entering the realm of the dynamic - subclasses flatMap { sym => + def filterChildren(children: List[Symbol]): List[Type] = { + children flatMap { sym => // have to filter out children which cannot match: see ticket #3683 for an example // compare to the fully known type `tp` (modulo abstract types), // so that we can rule out stuff like: sealed trait X[T]; class XInt extends X[Int] --> XInt not valid when enumerating X[String] // however, must approximate abstract types in - val memberType = nestedMemberType(sym, pre, tpApprox.typeSymbol.owner) - val subTp = appliedType(memberType, sym.typeParams.map(_ => WildcardType)) + val memberType = nestedMemberType(sym, pre, tpApprox.typeSymbol.owner) + val subTp = appliedType(memberType, sym.typeParams.map(_ => WildcardType)) val subTpApprox = typer.infer.approximateAbstracts(subTp) // TODO: needed? // debug.patmat("subtp"+(subTpApprox <:< tpApprox, subTpApprox, tpApprox)) if (subTpApprox <:< tpApprox) Some(checkableType(subTp)) else None } - }) + } + + if(grouped) { + def enumerateChildren(sym: Symbol) = { + sym.children.toList + .sortBy(_.sealedSortName) + .filterNot(x => x.isSealed && x.isAbstractClass && !isPrimitiveValueClass(x)) + } + + // enumerate only direct subclasses, + // subclasses of subclasses are enumerated in the next iteration + // and added to a new group + def groupChildren(wl: List[Symbol], + acc: List[List[Type]]): List[List[Type]] = wl match { + case hd :: tl => + val children = enumerateChildren(hd) + groupChildren(tl ++ children, acc :+ filterChildren(children)) + case Nil => acc + } + + groupChildren(sym :: Nil, Nil) + } else { + val subclasses = debug.patmatResult(s"enum $sym sealed, subclasses")( + // symbols which are both sealed and abstract need not be covered themselves, because + // all of their children must be and they cannot otherwise be created. + sym.sealedDescendants.toList + sortBy (_.sealedSortName) + filterNot (x => x.isSealed && x.isAbstractClass && !isPrimitiveValueClass(x)) + ) + + List(debug.patmatResult(s"enum sealed tp=$tp, tpApprox=$tpApprox as") { + // valid subtypes are turned into checkable types, as we are entering the realm of the dynamic + filterChildren(subclasses) + }) + } + case sym => debug.patmat("enum unsealed "+ ((tp, sym, sym.isSealed, isPrimitiveValueClass(sym)))) - None + Nil } // approximate a type to the static type that is fully checkable at run time, @@ -176,7 +202,7 @@ trait TreeAndTypeAnalysis extends Debugging { def uncheckableType(tp: Type): Boolean = { val checkable = ( (isTupleType(tp) && tupleComponents(tp).exists(tp => !uncheckableType(tp))) - || enumerateSubtypes(tp).nonEmpty) + || enumerateSubtypes(tp, grouped = false).nonEmpty) // if (!checkable) debug.patmat("deemed uncheckable: "+ tp) !checkable } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala index c43f1b6209..9710c5c66b 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala @@ -65,11 +65,22 @@ trait Solving extends Logic { def size = symbols.size } + def cnfString(f: Array[Clause]): String + final case class Solvable(cnf: Cnf, symbolMapping: SymbolMapping) { def ++(other: Solvable) = { require(this.symbolMapping eq other.symbolMapping) Solvable(cnf ++ other.cnf, symbolMapping) } + + override def toString: String = { + "Solvable\nLiterals:\n" + + (for { + (lit, sym) <- symbolMapping.symForVar.toSeq.sortBy(_._1) + } yield { + s"$lit -> $sym" + }).mkString("\n") + "Cnf:\n" + cnfString(cnf) + } } trait CnfBuilder { @@ -140,20 +151,23 @@ trait Solving extends Logic { def apply(p: Prop): Solvable = { - def convert(p: Prop): Lit = { + def convert(p: Prop): Option[Lit] = { p match { case And(fv) => - and(fv.map(convert)) + Some(and(fv.flatMap(convert))) case Or(fv) => - or(fv.map(convert)) + Some(or(fv.flatMap(convert))) case Not(a) => - not(convert(a)) + convert(a).map(not) case sym: Sym => - convertSym(sym) + Some(convertSym(sym)) case True => - constTrue + Some(constTrue) case False => - constFalse + Some(constFalse) + case AtMostOne(ops) => + atMostOne(ops) + None case _: Eq => throw new MatchError(p) } @@ -199,8 +213,57 @@ trait Solving extends Logic { // no need for auxiliary variable def not(a: Lit): Lit = -a + /** + * This encoding adds 3n-4 variables auxiliary variables + * to encode that at most 1 symbol can be set. + * See also "Towards an Optimal CNF Encoding of Boolean Cardinality Constraints" + * http://www.carstensinz.de/papers/CP-2005.pdf + */ + def atMostOne(ops: List[Sym]) { + (ops: @unchecked) match { + case hd :: Nil => convertSym(hd) + case x1 :: tail => + // sequential counter: 3n-4 clauses + // pairwise encoding: n*(n-1)/2 clauses + // thus pays off only if n > 5 + if (ops.lengthCompare(5) > 0) { + + @inline + def /\(a: Lit, b: Lit) = addClauseProcessed(clause(a, b)) + + val (mid, xn :: Nil) = tail.splitAt(tail.size - 1) + + // 1 <= x1,...,xn <==> + // + // (!x1 \/ s1) /\ (!xn \/ !sn-1) /\ + // + // /\ + // / \ (!xi \/ si) /\ (!si-1 \/ si) /\ (!xi \/ !si-1) + // 1 < i < n + val s1 = newLiteral() + /\(-convertSym(x1), s1) + val snMinus = mid.foldLeft(s1) { + case (siMinus, sym) => + val xi = convertSym(sym) + val si = newLiteral() + /\(-xi, si) + /\(-siMinus, si) + /\(-xi, -siMinus) + si + } + /\(-convertSym(xn), -snMinus) + } else { + ops.map(convertSym).combinations(2).foreach { + case a :: b :: Nil => + addClauseProcessed(clause(-a, -b)) + case _ => + } + } + } + } + // add intermediate variable since we want the formula to be SAT! - addClauseProcessed(clause(convert(p))) + addClauseProcessed(convert(p).toSet) Solvable(buildCnf, symbolMapping) } diff --git a/test/files/neg/patmatexhaust-huge.check b/test/files/neg/patmatexhaust-huge.check new file mode 100644 index 0000000000..66dbd42ef3 --- /dev/null +++ b/test/files/neg/patmatexhaust-huge.check @@ -0,0 +1,7 @@ +patmatexhaust-huge.scala:404: warning: match may not be exhaustive. +It would fail on the following inputs: C392, C397 + def f(c: C): Int = c match { + ^ +error: No warnings can be incurred under -Xfatal-warnings. +one warning found +one error found diff --git a/test/files/neg/patmatexhaust-huge.flags b/test/files/neg/patmatexhaust-huge.flags new file mode 100644 index 0000000000..591a950f83 --- /dev/null +++ b/test/files/neg/patmatexhaust-huge.flags @@ -0,0 +1 @@ +-Xfatal-warnings -unchecked -Ypatmat-exhaust-depth off \ No newline at end of file diff --git a/test/files/neg/patmatexhaust-huge.scala b/test/files/neg/patmatexhaust-huge.scala new file mode 100644 index 0000000000..8f87655b7a --- /dev/null +++ b/test/files/neg/patmatexhaust-huge.scala @@ -0,0 +1,806 @@ +sealed trait C +case object C1 extends C +case object C2 extends C +case object C3 extends C +case object C4 extends C +case object C5 extends C +case object C6 extends C +case object C7 extends C +case object C8 extends C +case object C9 extends C +case object C10 extends C +case object C11 extends C +case object C12 extends C +case object C13 extends C +case object C14 extends C +case object C15 extends C +case object C16 extends C +case object C17 extends C +case object C18 extends C +case object C19 extends C +case object C20 extends C +case object C21 extends C +case object C22 extends C +case object C23 extends C +case object C24 extends C +case object C25 extends C +case object C26 extends C +case object C27 extends C +case object C28 extends C +case object C29 extends C +case object C30 extends C +case object C31 extends C +case object C32 extends C +case object C33 extends C +case object C34 extends C +case object C35 extends C +case object C36 extends C +case object C37 extends C +case object C38 extends C +case object C39 extends C +case object C40 extends C +case object C41 extends C +case object C42 extends C +case object C43 extends C +case object C44 extends C +case object C45 extends C +case object C46 extends C +case object C47 extends C +case object C48 extends C +case object C49 extends C +case object C50 extends C +case object C51 extends C +case object C52 extends C +case object C53 extends C +case object C54 extends C +case object C55 extends C +case object C56 extends C +case object C57 extends C +case object C58 extends C +case object C59 extends C +case object C60 extends C +case object C61 extends C +case object C62 extends C +case object C63 extends C +case object C64 extends C +case object C65 extends C +case object C66 extends C +case object C67 extends C +case object C68 extends C +case object C69 extends C +case object C70 extends C +case object C71 extends C +case object C72 extends C +case object C73 extends C +case object C74 extends C +case object C75 extends C +case object C76 extends C +case object C77 extends C +case object C78 extends C +case object C79 extends C +case object C80 extends C +case object C81 extends C +case object C82 extends C +case object C83 extends C +case object C84 extends C +case object C85 extends C +case object C86 extends C +case object C87 extends C +case object C88 extends C +case object C89 extends C +case object C90 extends C +case object C91 extends C +case object C92 extends C +case object C93 extends C +case object C94 extends C +case object C95 extends C +case object C96 extends C +case object C97 extends C +case object C98 extends C +case object C99 extends C +case object C100 extends C +case object C101 extends C +case object C102 extends C +case object C103 extends C +case object C104 extends C +case object C105 extends C +case object C106 extends C +case object C107 extends C +case object C108 extends C +case object C109 extends C +case object C110 extends C +case object C111 extends C +case object C112 extends C +case object C113 extends C +case object C114 extends C +case object C115 extends C +case object C116 extends C +case object C117 extends C +case object C118 extends C +case object C119 extends C +case object C120 extends C +case object C121 extends C +case object C122 extends C +case object C123 extends C +case object C124 extends C +case object C125 extends C +case object C126 extends C +case object C127 extends C +case object C128 extends C +case object C129 extends C +case object C130 extends C +case object C131 extends C +case object C132 extends C +case object C133 extends C +case object C134 extends C +case object C135 extends C +case object C136 extends C +case object C137 extends C +case object C138 extends C +case object C139 extends C +case object C140 extends C +case object C141 extends C +case object C142 extends C +case object C143 extends C +case object C144 extends C +case object C145 extends C +case object C146 extends C +case object C147 extends C +case object C148 extends C +case object C149 extends C +case object C150 extends C +case object C151 extends C +case object C152 extends C +case object C153 extends C +case object C154 extends C +case object C155 extends C +case object C156 extends C +case object C157 extends C +case object C158 extends C +case object C159 extends C +case object C160 extends C +case object C161 extends C +case object C162 extends C +case object C163 extends C +case object C164 extends C +case object C165 extends C +case object C166 extends C +case object C167 extends C +case object C168 extends C +case object C169 extends C +case object C170 extends C +case object C171 extends C +case object C172 extends C +case object C173 extends C +case object C174 extends C +case object C175 extends C +case object C176 extends C +case object C177 extends C +case object C178 extends C +case object C179 extends C +case object C180 extends C +case object C181 extends C +case object C182 extends C +case object C183 extends C +case object C184 extends C +case object C185 extends C +case object C186 extends C +case object C187 extends C +case object C188 extends C +case object C189 extends C +case object C190 extends C +case object C191 extends C +case object C192 extends C +case object C193 extends C +case object C194 extends C +case object C195 extends C +case object C196 extends C +case object C197 extends C +case object C198 extends C +case object C199 extends C +case object C200 extends C +case object C201 extends C +case object C202 extends C +case object C203 extends C +case object C204 extends C +case object C205 extends C +case object C206 extends C +case object C207 extends C +case object C208 extends C +case object C209 extends C +case object C210 extends C +case object C211 extends C +case object C212 extends C +case object C213 extends C +case object C214 extends C +case object C215 extends C +case object C216 extends C +case object C217 extends C +case object C218 extends C +case object C219 extends C +case object C220 extends C +case object C221 extends C +case object C222 extends C +case object C223 extends C +case object C224 extends C +case object C225 extends C +case object C226 extends C +case object C227 extends C +case object C228 extends C +case object C229 extends C +case object C230 extends C +case object C231 extends C +case object C232 extends C +case object C233 extends C +case object C234 extends C +case object C235 extends C +case object C236 extends C +case object C237 extends C +case object C238 extends C +case object C239 extends C +case object C240 extends C +case object C241 extends C +case object C242 extends C +case object C243 extends C +case object C244 extends C +case object C245 extends C +case object C246 extends C +case object C247 extends C +case object C248 extends C +case object C249 extends C +case object C250 extends C +case object C251 extends C +case object C252 extends C +case object C253 extends C +case object C254 extends C +case object C255 extends C +case object C256 extends C +case object C257 extends C +case object C258 extends C +case object C259 extends C +case object C260 extends C +case object C261 extends C +case object C262 extends C +case object C263 extends C +case object C264 extends C +case object C265 extends C +case object C266 extends C +case object C267 extends C +case object C268 extends C +case object C269 extends C +case object C270 extends C +case object C271 extends C +case object C272 extends C +case object C273 extends C +case object C274 extends C +case object C275 extends C +case object C276 extends C +case object C277 extends C +case object C278 extends C +case object C279 extends C +case object C280 extends C +case object C281 extends C +case object C282 extends C +case object C283 extends C +case object C284 extends C +case object C285 extends C +case object C286 extends C +case object C287 extends C +case object C288 extends C +case object C289 extends C +case object C290 extends C +case object C291 extends C +case object C292 extends C +case object C293 extends C +case object C294 extends C +case object C295 extends C +case object C296 extends C +case object C297 extends C +case object C298 extends C +case object C299 extends C +case object C300 extends C +case object C301 extends C +case object C302 extends C +case object C303 extends C +case object C304 extends C +case object C305 extends C +case object C306 extends C +case object C307 extends C +case object C308 extends C +case object C309 extends C +case object C310 extends C +case object C311 extends C +case object C312 extends C +case object C313 extends C +case object C314 extends C +case object C315 extends C +case object C316 extends C +case object C317 extends C +case object C318 extends C +case object C319 extends C +case object C320 extends C +case object C321 extends C +case object C322 extends C +case object C323 extends C +case object C324 extends C +case object C325 extends C +case object C326 extends C +case object C327 extends C +case object C328 extends C +case object C329 extends C +case object C330 extends C +case object C331 extends C +case object C332 extends C +case object C333 extends C +case object C334 extends C +case object C335 extends C +case object C336 extends C +case object C337 extends C +case object C338 extends C +case object C339 extends C +case object C340 extends C +case object C341 extends C +case object C342 extends C +case object C343 extends C +case object C344 extends C +case object C345 extends C +case object C346 extends C +case object C347 extends C +case object C348 extends C +case object C349 extends C +case object C350 extends C +case object C351 extends C +case object C352 extends C +case object C353 extends C +case object C354 extends C +case object C355 extends C +case object C356 extends C +case object C357 extends C +case object C358 extends C +case object C359 extends C +case object C360 extends C +case object C361 extends C +case object C362 extends C +case object C363 extends C +case object C364 extends C +case object C365 extends C +case object C366 extends C +case object C367 extends C +case object C368 extends C +case object C369 extends C +case object C370 extends C +case object C371 extends C +case object C372 extends C +case object C373 extends C +case object C374 extends C +case object C375 extends C +case object C376 extends C +case object C377 extends C +case object C378 extends C +case object C379 extends C +case object C380 extends C +case object C381 extends C +case object C382 extends C +case object C383 extends C +case object C384 extends C +case object C385 extends C +case object C386 extends C +case object C387 extends C +case object C388 extends C +case object C389 extends C +case object C390 extends C +case object C391 extends C +case object C392 extends C +case object C393 extends C +case object C394 extends C +case object C395 extends C +case object C396 extends C +case object C397 extends C +case object C398 extends C +case object C399 extends C +case object C400 extends C + +object M { + def f(c: C): Int = c match { + case C1 => 1 + case C2 => 2 + case C3 => 3 + case C4 => 4 + case C5 => 5 + case C6 => 6 + case C7 => 7 + case C8 => 8 + case C9 => 9 + case C10 => 10 + case C11 => 11 + case C12 => 12 + case C13 => 13 + case C14 => 14 + case C15 => 15 + case C16 => 16 + case C17 => 17 + case C18 => 18 + case C19 => 19 + case C20 => 20 + case C21 => 21 + case C22 => 22 + case C23 => 23 + case C24 => 24 + case C25 => 25 + case C26 => 26 + case C27 => 27 + case C28 => 28 + case C29 => 29 + case C30 => 30 + case C31 => 31 + case C32 => 32 + case C33 => 33 + case C34 => 34 + case C35 => 35 + case C36 => 36 + case C37 => 37 + case C38 => 38 + case C39 => 39 + case C40 => 40 + case C41 => 41 + case C42 => 42 + case C43 => 43 + case C44 => 44 + case C45 => 45 + case C46 => 46 + case C47 => 47 + case C48 => 48 + case C49 => 49 + case C50 => 50 + case C51 => 51 + case C52 => 52 + case C53 => 53 + case C54 => 54 + case C55 => 55 + case C56 => 56 + case C57 => 57 + case C58 => 58 + case C59 => 59 + case C60 => 60 + case C61 => 61 + case C62 => 62 + case C63 => 63 + case C64 => 64 + case C65 => 65 + case C66 => 66 + case C67 => 67 + case C68 => 68 + case C69 => 69 + case C70 => 70 + case C71 => 71 + case C72 => 72 + case C73 => 73 + case C74 => 74 + case C75 => 75 + case C76 => 76 + case C77 => 77 + case C78 => 78 + case C79 => 79 + case C80 => 80 + case C81 => 81 + case C82 => 82 + case C83 => 83 + case C84 => 84 + case C85 => 85 + case C86 => 86 + case C87 => 87 + case C88 => 88 + case C89 => 89 + case C90 => 90 + case C91 => 91 + case C92 => 92 + case C93 => 93 + case C94 => 94 + case C95 => 95 + case C96 => 96 + case C97 => 97 + case C98 => 98 + case C99 => 99 + case C100 => 100 + case C101 => 101 + case C102 => 102 + case C103 => 103 + case C104 => 104 + case C105 => 105 + case C106 => 106 + case C107 => 107 + case C108 => 108 + case C109 => 109 + case C110 => 110 + case C111 => 111 + case C112 => 112 + case C113 => 113 + case C114 => 114 + case C115 => 115 + case C116 => 116 + case C117 => 117 + case C118 => 118 + case C119 => 119 + case C120 => 120 + case C121 => 121 + case C122 => 122 + case C123 => 123 + case C124 => 124 + case C125 => 125 + case C126 => 126 + case C127 => 127 + case C128 => 128 + case C129 => 129 + case C130 => 130 + case C131 => 131 + case C132 => 132 + case C133 => 133 + case C134 => 134 + case C135 => 135 + case C136 => 136 + case C137 => 137 + case C138 => 138 + case C139 => 139 + case C140 => 140 + case C141 => 141 + case C142 => 142 + case C143 => 143 + case C144 => 144 + case C145 => 145 + case C146 => 146 + case C147 => 147 + case C148 => 148 + case C149 => 149 + case C150 => 150 + case C151 => 151 + case C152 => 152 + case C153 => 153 + case C154 => 154 + case C155 => 155 + case C156 => 156 + case C157 => 157 + case C158 => 158 + case C159 => 159 + case C160 => 160 + case C161 => 161 + case C162 => 162 + case C163 => 163 + case C164 => 164 + case C165 => 165 + case C166 => 166 + case C167 => 167 + case C168 => 168 + case C169 => 169 + case C170 => 170 + case C171 => 171 + case C172 => 172 + case C173 => 173 + case C174 => 174 + case C175 => 175 + case C176 => 176 + case C177 => 177 + case C178 => 178 + case C179 => 179 + case C180 => 180 + case C181 => 181 + case C182 => 182 + case C183 => 183 + case C184 => 184 + case C185 => 185 + case C186 => 186 + case C187 => 187 + case C188 => 188 + case C189 => 189 + case C190 => 190 + case C191 => 191 + case C192 => 192 + case C193 => 193 + case C194 => 194 + case C195 => 195 + case C196 => 196 + case C197 => 197 + case C198 => 198 + case C199 => 199 + case C200 => 200 + case C201 => 201 + case C202 => 202 + case C203 => 203 + case C204 => 204 + case C205 => 205 + case C206 => 206 + case C207 => 207 + case C208 => 208 + case C209 => 209 + case C210 => 210 + case C211 => 211 + case C212 => 212 + case C213 => 213 + case C214 => 214 + case C215 => 215 + case C216 => 216 + case C217 => 217 + case C218 => 218 + case C219 => 219 + case C220 => 220 + case C221 => 221 + case C222 => 222 + case C223 => 223 + case C224 => 224 + case C225 => 225 + case C226 => 226 + case C227 => 227 + case C228 => 228 + case C229 => 229 + case C230 => 230 + case C231 => 231 + case C232 => 232 + case C233 => 233 + case C234 => 234 + case C235 => 235 + case C236 => 236 + case C237 => 237 + case C238 => 238 + case C239 => 239 + case C240 => 240 + case C241 => 241 + case C242 => 242 + case C243 => 243 + case C244 => 244 + case C245 => 245 + case C246 => 246 + case C247 => 247 + case C248 => 248 + case C249 => 249 + case C250 => 250 + case C251 => 251 + case C252 => 252 + case C253 => 253 + case C254 => 254 + case C255 => 255 + case C256 => 256 + case C257 => 257 + case C258 => 258 + case C259 => 259 + case C260 => 260 + case C261 => 261 + case C262 => 262 + case C263 => 263 + case C264 => 264 + case C265 => 265 + case C266 => 266 + case C267 => 267 + case C268 => 268 + case C269 => 269 + case C270 => 270 + case C271 => 271 + case C272 => 272 + case C273 => 273 + case C274 => 274 + case C275 => 275 + case C276 => 276 + case C277 => 277 + case C278 => 278 + case C279 => 279 + case C280 => 280 + case C281 => 281 + case C282 => 282 + case C283 => 283 + case C284 => 284 + case C285 => 285 + case C286 => 286 + case C287 => 287 + case C288 => 288 + case C289 => 289 + case C290 => 290 + case C291 => 291 + case C292 => 292 + case C293 => 293 + case C294 => 294 + case C295 => 295 + case C296 => 296 + case C297 => 297 + case C298 => 298 + case C299 => 299 + case C300 => 300 + case C301 => 301 + case C302 => 302 + case C303 => 303 + case C304 => 304 + case C305 => 305 + case C306 => 306 + case C307 => 307 + case C308 => 308 + case C309 => 309 + case C310 => 310 + case C311 => 311 + case C312 => 312 + case C313 => 313 + case C314 => 314 + case C315 => 315 + case C316 => 316 + case C317 => 317 + case C318 => 318 + case C319 => 319 + case C320 => 320 + case C321 => 321 + case C322 => 322 + case C323 => 323 + case C324 => 324 + case C325 => 325 + case C326 => 326 + case C327 => 327 + case C328 => 328 + case C329 => 329 + case C330 => 330 + case C331 => 331 + case C332 => 332 + case C333 => 333 + case C334 => 334 + case C335 => 335 + case C336 => 336 + case C337 => 337 + case C338 => 338 + case C339 => 339 + case C340 => 340 + case C341 => 341 + case C342 => 342 + case C343 => 343 + case C344 => 344 + case C345 => 345 + case C346 => 346 + case C347 => 347 + case C348 => 348 + case C349 => 349 + case C350 => 350 + case C351 => 351 + case C352 => 352 + case C353 => 353 + case C354 => 354 + case C355 => 355 + case C356 => 356 + case C357 => 357 + case C358 => 358 + case C359 => 359 + case C360 => 360 + case C361 => 361 + case C362 => 362 + case C363 => 363 + case C364 => 364 + case C365 => 365 + case C366 => 366 + case C367 => 367 + case C368 => 368 + case C369 => 369 + case C370 => 370 + case C371 => 371 + case C372 => 372 + case C373 => 373 + case C374 => 374 + case C375 => 375 + case C376 => 376 + case C377 => 377 + case C378 => 378 + case C379 => 379 + case C380 => 380 + case C381 => 381 + case C382 => 382 + case C383 => 383 + case C384 => 384 + case C385 => 385 + case C386 => 386 + case C387 => 387 + case C388 => 388 + case C389 => 389 + case C390 => 390 + case C391 => 391 +// case C392 => 392 + case C393 => 393 + case C394 => 394 + case C395 => 395 + case C396 => 396 +// case C397 => 397 + case C398 => 398 + case C399 => 399 + case C400 => 400 + } +} diff --git a/test/junit/scala/tools/nsc/transform/patmat/SolvingTest.scala b/test/junit/scala/tools/nsc/transform/patmat/SolvingTest.scala index 1fe7b19056..7bcb90a2ee 100644 --- a/test/junit/scala/tools/nsc/transform/patmat/SolvingTest.scala +++ b/test/junit/scala/tools/nsc/transform/patmat/SolvingTest.scala @@ -52,6 +52,8 @@ object TestSolver extends Logic with Solving { def domainSyms = None + def groupedDomains: List[Set[TestSolver.Sym]] = Nil + def implications = Nil def mayBeNull = false @@ -207,11 +209,44 @@ class SolvingTest { import scala.tools.nsc.transform.patmat.TestSolver.TestSolver._ - implicit val Ord: Ordering[TestSolver.TestSolver.Model] = Ordering.by { - _.toSeq.sortBy(_.toString()).toIterable + object SymName { + def unapply(s: Sym): Option[String] = { + val Var(Tree(name)) = s.variable + Some(name) + } + } + + implicit val ModelOrd: Ordering[TestSolver.TestSolver.Model] = Ordering.by { + _.toSeq.sortWith { + case ((sym1, v1), (sym2, v2)) => + val SymName(name1) = sym1 + val SymName(name2) = sym2 + if (name1 < name2) + true + else if (name1 > name2) + false + else + v1 < v2 + }.toIterable + } + + implicit val SolutionOrd: Ordering[TestSolver.TestSolver.Solution] = + Ordering.by(_.model) + + def formatSolution(solution: Solution): String = { + formatModel(solution.model) } - private def sym(name: String) = Sym(Var(Tree(name)), NullConst) + def formatModel(model: Model): String = { + (for { + (SymName(name), value) <- model + } yield { + val v = if (value) "T" else "F" + s"$name -> $v" + }).mkString(", ") + } + + def sym(name: String) = Sym(Var(Tree(name)), NullConst) @Test def testSymCreation() { @@ -553,6 +588,23 @@ class SolvingTest { assertEquals(tseitinNoUnassigned, expansionNoUnassigned) } } + + def pairWiseEncoding(ops: List[Sym]) = { + And(ops.combinations(2).collect { + case a :: b :: Nil => Or(Not(a), Not(b)) + }.toSet[TestSolver.TestSolver.Prop]) + } + + @Test + def testAtMostOne() { + val dummySym = sym("dummy") + val syms = "pqrstu".map(c => sym(c.toString)).toList + // expand unassigned variables + // (otherwise solutions can not be compared) + val expected = TestSolver.TestSolver.findAllModelsFor(propToSolvable(And(dummySym, pairWiseEncoding(syms)))).flatMap(expandUnassigned) + val actual = TestSolver.TestSolver.findAllModelsFor(propToSolvable(And(dummySym, AtMostOne(syms)))).flatMap(expandUnassigned) + assertEquals(expected.toSet, actual.toSet) + } } -- cgit v1.2.3 From 4df81aab315e587d9c7e319c7a2ece0f0f6fbaf3 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Sat, 20 Dec 2014 02:28:44 -0800 Subject: SI-3368 CDATA gets a Node XML Parser uses `scala.xml.PCData`. A compiler flag `-Yxml:coalescing`, analogous to `DocumentBuilderFactory.setCoalescing`, turns `PCData` nodes into `Text` nodes and coalesces sibling text nodes. This change also fixes parse errors such as rejecting a sequence of CDATA sections. A sequence of "top level" nodes are not coalesced. ``` scala> startworldstuff res0: scala.xml.Elem = startworldstuff scala> :replay -Yxml:coalescing Replaying: startworldstuff res0: scala.xml.Elem = starthi & byeworldstuffred & black ``` --- .../scala/tools/nsc/ast/parser/MarkupParsers.scala | 129 +++++++++++++-------- .../tools/nsc/ast/parser/SymbolicXMLBuilder.scala | 25 +++- .../scala/tools/nsc/settings/ScalaSettings.scala | 12 ++ .../scala/tools/partest/ParserTest.scala | 21 ++++ test/files/pos/t3368.flags | 1 + test/files/pos/t3368.scala | 5 + test/files/run/t3368.check | 46 ++++++++ test/files/run/t3368.scala | 18 +++ 8 files changed, 200 insertions(+), 57 deletions(-) create mode 100644 src/partest-extras/scala/tools/partest/ParserTest.scala create mode 100644 test/files/pos/t3368.flags create mode 100644 test/files/pos/t3368.scala create mode 100644 test/files/run/t3368.check create mode 100644 test/files/run/t3368.scala (limited to 'test') diff --git a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala index 96939e616c..edee1e296d 100755 --- a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala @@ -6,6 +6,7 @@ package scala.tools.nsc package ast.parser +import scala.annotation.tailrec import scala.collection.mutable import mutable.{ Buffer, ArrayBuffer, ListBuffer } import scala.util.control.ControlThrowable @@ -172,20 +173,19 @@ trait MarkupParsers { } def appendText(pos: Position, ts: Buffer[Tree], txt: String): Unit = { - def append(t: String) = ts append handle.text(pos, t) - - if (preserveWS) append(txt) - else { + def append(text: String): Unit = { + val tree = handle.text(pos, text) + ts append tree + } + val clean = if (preserveWS) txt else { val sb = new StringBuilder() - txt foreach { c => if (!isSpace(c)) sb append c else if (sb.isEmpty || !isSpace(sb.last)) sb append ' ' } - - val trimmed = sb.toString.trim - if (!trimmed.isEmpty) append(trimmed) + sb.toString.trim } + if (!clean.isEmpty) append(clean) } /** adds entity/character to ts as side-effect @@ -216,44 +216,75 @@ trait MarkupParsers { if (xCheckEmbeddedBlock) ts append xEmbeddedExpr else appendText(p, ts, xText) - /** Returns true if it encounters an end tag (without consuming it), - * appends trees to ts as side-effect. + /** At an open angle-bracket, detects an end tag + * or consumes CDATA, comment, PI or element. + * Trees are appended to `ts` as a side-effect. + * @return true if an end tag (without consuming it) */ - private def content_LT(ts: ArrayBuffer[Tree]): Boolean = { - if (ch == '/') - return true // end tag - - val toAppend = ch match { - case '!' => nextch() ; if (ch =='[') xCharData else xComment // CDATA or Comment - case '?' => nextch() ; xProcInstr // PI - case _ => element // child node + private def content_LT(ts: ArrayBuffer[Tree]): Boolean = + (ch == '/') || { + val toAppend = ch match { + case '!' => nextch() ; if (ch =='[') xCharData else xComment // CDATA or Comment + case '?' => nextch() ; xProcInstr // PI + case _ => element // child node + } + ts append toAppend + false } - ts append toAppend - false - } - def content: Buffer[Tree] = { val ts = new ArrayBuffer[Tree] - while (true) { - if (xEmbeddedBlock) + val coalescing = settings.YxmlSettings.isCoalescing + @tailrec def loopContent(): Unit = + if (xEmbeddedBlock) { ts append xEmbeddedExpr - else { + loopContent() + } else { tmppos = o2p(curOffset) ch match { - // end tag, cdata, comment, pi or child node - case '<' => nextch() ; if (content_LT(ts)) return ts - // either the character '{' or an embedded scala block } - case '{' => content_BRACE(tmppos, ts) // } - // EntityRef or CharRef - case '&' => content_AMP(ts) - case SU => return ts - // text content - here xEmbeddedBlock might be true - case _ => appendText(tmppos, ts, xText) + case '<' => // end tag, cdata, comment, pi or child node + nextch() + if (!content_LT(ts)) loopContent() + case '{' => // } literal brace or embedded Scala block + content_BRACE(tmppos, ts) + loopContent() + case '&' => // EntityRef or CharRef + content_AMP(ts) + loopContent() + case SU => () + case _ => // text content - here xEmbeddedBlock might be true + appendText(tmppos, ts, xText) + loopContent() } } + // merge text sections and strip attachments + def coalesce(): ArrayBuffer[Tree] = { + def copy() = { + val buf = new ArrayBuffer[Tree] + var acc = new StringBuilder + var pos: Position = NoPosition + def emit() = if (acc.nonEmpty) { + appendText(pos, buf, acc.toString) + acc.clear() + } + for (t <- ts) + t.attachments.get[handle.TextAttache] match { + case Some(ta) => + if (acc.isEmpty) pos = ta.pos + acc append ta.text + case _ => + emit() + buf += t + } + emit() + buf + } + val res = if (ts.count(_.hasAttachment[handle.TextAttache]) > 1) copy() else ts + for (t <- res) t.removeAttachment[handle.TextAttache] + res } - unreachable + loopContent() + if (coalescing) coalesce() else ts } /** '<' element ::= xmlTag1 '>' { xmlExpr | '{' simpleExpr '}' } ETag @@ -289,20 +320,16 @@ trait MarkupParsers { private def xText: String = { assert(!xEmbeddedBlock, "internal error: encountered embedded block") val buf = new StringBuilder - def done = buf.toString - - while (ch != SU) { - if (ch == '}') { - if (charComingAfter(nextch()) == '}') nextch() - else errorBraces() - } - - buf append ch - nextch() - if (xCheckEmbeddedBlock || ch == '<' || ch == '&') - return done - } - done + if (ch != SU) + do { + if (ch == '}') { + if (charComingAfter(nextch()) == '}') nextch() + else errorBraces() + } + buf append ch + nextch() + } while (!(ch == SU || xCheckEmbeddedBlock || ch == '<' || ch == '&')) + buf.toString } /** Some try/catch/finally logic used by xLiteral and xLiteralPattern. */ @@ -344,12 +371,12 @@ trait MarkupParsers { tmppos = o2p(curOffset) // Iuli: added this line, as it seems content_LT uses tmppos when creating trees content_LT(ts) - // parse more XML ? + // parse more XML? if (charComingAfter(xSpaceOpt()) == '<') { do { xSpaceOpt() nextch() - ts append element + content_LT(ts) } while (charComingAfter(xSpaceOpt()) == '<') handle.makeXMLseq(r2p(start, start, curOffset), ts) } diff --git a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala index d2a999cdec..90610ab2e6 100755 --- a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala @@ -36,6 +36,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) { val _MetaData: NameType = "MetaData" val _NamespaceBinding: NameType = "NamespaceBinding" val _NodeBuffer: NameType = "NodeBuffer" + val _PCData: NameType = "PCData" val _PrefixedAttribute: NameType = "PrefixedAttribute" val _ProcInstr: NameType = "ProcInstr" val _Text: NameType = "Text" @@ -46,6 +47,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) { private object xmlterms extends TermNames { val _Null: NameType = "Null" val __Elem: NameType = "Elem" + val _PCData: NameType = "PCData" val __Text: NameType = "Text" val _buf: NameType = "$buf" val _md: NameType = "$md" @@ -55,10 +57,15 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) { val _xml: NameType = "xml" } - import xmltypes.{_Comment, _Elem, _EntityRef, _Group, _MetaData, _NamespaceBinding, _NodeBuffer, - _PrefixedAttribute, _ProcInstr, _Text, _Unparsed, _UnprefixedAttribute} + import xmltypes.{ + _Comment, _Elem, _EntityRef, _Group, _MetaData, _NamespaceBinding, _NodeBuffer, + _PCData, _PrefixedAttribute, _ProcInstr, _Text, _Unparsed, _UnprefixedAttribute + } + + import xmlterms.{ _Null, __Elem, __Text, _buf, _md, _plus, _scope, _tmpscope, _xml } - import xmlterms.{_Null, __Elem, __Text, _buf, _md, _plus, _scope, _tmpscope, _xml} + /** Attachment for trees deriving from text nodes (Text, CData, entities). Used for coalescing. */ + case class TextAttache(pos: Position, text: String) // convenience methods private def LL[A](x: A*): List[List[A]] = List(List(x:_*)) @@ -108,16 +115,22 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) { final def entityRef(pos: Position, n: String) = atPos(pos)( New(_scala_xml_EntityRef, LL(const(n))) ) + private def coalescing = settings.YxmlSettings.isCoalescing + // create scala.xml.Text here <: scala.xml.Node final def text(pos: Position, txt: String): Tree = atPos(pos) { - if (isPattern) makeTextPat(const(txt)) - else makeText1(const(txt)) + val t = if (isPattern) makeTextPat(const(txt)) else makeText1(const(txt)) + if (coalescing) t updateAttachment TextAttache(pos, txt) else t } def makeTextPat(txt: Tree) = Apply(_scala_xml__Text, List(txt)) def makeText1(txt: Tree) = New(_scala_xml_Text, LL(txt)) def comment(pos: Position, text: String) = atPos(pos)( Comment(const(text)) ) - def charData(pos: Position, txt: String) = atPos(pos)( makeText1(const(txt)) ) + def charData(pos: Position, txt: String) = atPos(pos) { + val t = if (isPattern) Apply(_scala_xml(xmlterms._PCData), List(const(txt))) + else New(_scala_xml(_PCData), LL(const(txt))) + if (coalescing) t updateAttachment TextAttache(pos, txt) else t + } def procInstr(pos: Position, target: String, txt: String) = atPos(pos)( ProcInstr(const(target), const(txt)) ) diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 03fd0976e5..e10b3bc259 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -305,6 +305,18 @@ trait ScalaSettings extends AbsScalaSettings ) withPostSetHook { _ => scala.reflect.internal.util.Statistics.enabled = true } } + // XML parsing options (transitional in 2.11) + object YxmlSettings extends MultiChoiceEnumeration { + val coalescing = Value + def isCoalescing = Yxml contains coalescing + } + val Yxml = MultiChoiceSetting( + name = "-Yxml", + helpArg = "property", + descr = "Configure XML parsing", + domain = YxmlSettings + ) + def YstatisticsEnabled = Ystatistics.value.nonEmpty /** Area-specific debug output. diff --git a/src/partest-extras/scala/tools/partest/ParserTest.scala b/src/partest-extras/scala/tools/partest/ParserTest.scala new file mode 100644 index 0000000000..e4c92e3dc3 --- /dev/null +++ b/src/partest-extras/scala/tools/partest/ParserTest.scala @@ -0,0 +1,21 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2014 LAMP/EPFL + */ + +package scala.tools.partest + +/** A class for testing parser output. + * Just supply the `code` and update the check file. + */ +abstract class ParserTest extends DirectTest { + + override def extraSettings: String = "-usejavacp -Ystop-after:parser -Xprint:parser" + + override def show(): Unit = { + // redirect err to out, for logging + val prevErr = System.err + System.setErr(System.out) + compile() + System.setErr(prevErr) + } +} diff --git a/test/files/pos/t3368.flags b/test/files/pos/t3368.flags new file mode 100644 index 0000000000..cb20509902 --- /dev/null +++ b/test/files/pos/t3368.flags @@ -0,0 +1 @@ +-Ystop-after:parser diff --git a/test/files/pos/t3368.scala b/test/files/pos/t3368.scala new file mode 100644 index 0000000000..c8e861a899 --- /dev/null +++ b/test/files/pos/t3368.scala @@ -0,0 +1,5 @@ + +trait X { + // error: in XML literal: name expected, but char '!' cannot start a name + def x = +} diff --git a/test/files/run/t3368.check b/test/files/run/t3368.check new file mode 100644 index 0000000000..1d9dd677f6 --- /dev/null +++ b/test/files/run/t3368.check @@ -0,0 +1,46 @@ +[[syntax trees at end of parser]] // newSource1.scala +package { + abstract trait X extends scala.AnyRef { + def $init$() = { + () + }; + def x = { + val $buf = new _root_.scala.xml.NodeBuffer(); + $buf.$amp$plus(new _root_.scala.xml.PCData("hi & bye")); + $buf.$amp$plus(new _root_.scala.xml.PCData("red & black")); + $buf + } + }; + abstract trait Y extends scala.AnyRef { + def $init$() = { + () + }; + def y = { + { + new _root_.scala.xml.Elem(null, "a", _root_.scala.xml.Null, $scope, false, ({ + val $buf = new _root_.scala.xml.NodeBuffer(); + $buf.$amp$plus({ + { + new _root_.scala.xml.Elem(null, "b", _root_.scala.xml.Null, $scope, true) + } + }); + $buf.$amp$plus(new _root_.scala.xml.Text("starthi & bye")); + $buf.$amp$plus({ + { + new _root_.scala.xml.Elem(null, "c", _root_.scala.xml.Null, $scope, true) + } + }); + $buf.$amp$plus(new _root_.scala.xml.Text("world")); + $buf.$amp$plus({ + { + new _root_.scala.xml.Elem(null, "d", _root_.scala.xml.Null, $scope, true) + } + }); + $buf.$amp$plus(new _root_.scala.xml.Text("stuffred & black")); + $buf + }: _*)) + } + } + } +} + diff --git a/test/files/run/t3368.scala b/test/files/run/t3368.scala new file mode 100644 index 0000000000..aaf34b43fb --- /dev/null +++ b/test/files/run/t3368.scala @@ -0,0 +1,18 @@ + +import scala.tools.partest.ParserTest + + +object Test extends ParserTest { + + override def code = """ + trait X { + // error: in XML literal: name expected, but char '!' cannot start a name + def x = + } + trait Y { + def y = startworldstuff + } + """ + + override def extraSettings = s"${super.extraSettings} -Yxml:coalescing" +} -- cgit v1.2.3 From b60de05a2e1257d16b7876856e26feec7bb81870 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Sun, 21 Dec 2014 12:50:51 -0800 Subject: SI-5699 Use ParserTest Update the test slightly to use the rig it inspired. --- test/files/run/t5699.scala | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) (limited to 'test') diff --git a/test/files/run/t5699.scala b/test/files/run/t5699.scala index ec3b1d26b4..ede6fefe48 100755 --- a/test/files/run/t5699.scala +++ b/test/files/run/t5699.scala @@ -1,7 +1,7 @@ -import scala.tools.partest.DirectTest +import scala.tools.partest.ParserTest import scala.reflect.internal.util.BatchSourceFile -object Test extends DirectTest { +object Test extends ParserTest { // Java code override def code = """ |public @interface MyAnnotation { String value(); } @@ -9,14 +9,6 @@ object Test extends DirectTest { override def extraSettings: String = "-usejavacp -Ystop-after:typer -Xprint:parser" - override def show(): Unit = { - // redirect err to out, for logging - val prevErr = System.err - System.setErr(System.out) - compile() - System.setErr(prevErr) - } - override def newSources(sourceCodes: String*) = { assert(sourceCodes.size == 1) List(new BatchSourceFile("annodef.java", sourceCodes(0))) -- cgit v1.2.3 From 996d066bb3ef0fc91acab18a5502bb649929881a Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Sun, 8 Feb 2015 15:35:43 -0800 Subject: SI-3368 Promote xml option to -Xxml As long as Scala does XML literals, there is probably parsing behavior worth configuring. Therefore, the umbrella option is promoted to `-Xxml`. It was tempting to make it `-XML`, but we resisted. --- .../scala/tools/nsc/ast/parser/MarkupParsers.scala | 2 +- .../tools/nsc/ast/parser/SymbolicXMLBuilder.scala | 2 +- .../scala/tools/nsc/settings/ScalaSettings.scala | 24 +++++++++++----------- test/files/run/t3368.scala | 2 +- 4 files changed, 15 insertions(+), 15 deletions(-) (limited to 'test') diff --git a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala index edee1e296d..52b8a51a79 100755 --- a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala @@ -234,7 +234,7 @@ trait MarkupParsers { def content: Buffer[Tree] = { val ts = new ArrayBuffer[Tree] - val coalescing = settings.YxmlSettings.isCoalescing + val coalescing = settings.XxmlSettings.isCoalescing @tailrec def loopContent(): Unit = if (xEmbeddedBlock) { ts append xEmbeddedExpr diff --git a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala index 90610ab2e6..99399e363f 100755 --- a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala @@ -115,7 +115,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) { final def entityRef(pos: Position, n: String) = atPos(pos)( New(_scala_xml_EntityRef, LL(const(n))) ) - private def coalescing = settings.YxmlSettings.isCoalescing + private def coalescing = settings.XxmlSettings.isCoalescing // create scala.xml.Text here <: scala.xml.Node final def text(pos: Position, txt: String): Tree = atPos(pos) { diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index e10b3bc259..b783e80db9 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -139,6 +139,18 @@ trait ScalaSettings extends AbsScalaSettings val XnoPatmatAnalysis = BooleanSetting ("-Xno-patmat-analysis", "Don't perform exhaustivity/unreachability analysis. Also, ignore @switch annotation.") val XfullLubs = BooleanSetting ("-Xfull-lubs", "Retains pre 2.10 behavior of less aggressive truncation of least upper bounds.") + // XML parsing options + object XxmlSettings extends MultiChoiceEnumeration { + val coalescing = Value + def isCoalescing = Xxml contains coalescing + } + val Xxml = MultiChoiceSetting( + name = "-Xxml", + helpArg = "property", + descr = "Configure XML parsing", + domain = XxmlSettings + ) + /** Compatibility stubs for options whose value name did * not previously match the option name. */ @@ -305,18 +317,6 @@ trait ScalaSettings extends AbsScalaSettings ) withPostSetHook { _ => scala.reflect.internal.util.Statistics.enabled = true } } - // XML parsing options (transitional in 2.11) - object YxmlSettings extends MultiChoiceEnumeration { - val coalescing = Value - def isCoalescing = Yxml contains coalescing - } - val Yxml = MultiChoiceSetting( - name = "-Yxml", - helpArg = "property", - descr = "Configure XML parsing", - domain = YxmlSettings - ) - def YstatisticsEnabled = Ystatistics.value.nonEmpty /** Area-specific debug output. diff --git a/test/files/run/t3368.scala b/test/files/run/t3368.scala index aaf34b43fb..15acba5099 100644 --- a/test/files/run/t3368.scala +++ b/test/files/run/t3368.scala @@ -14,5 +14,5 @@ object Test extends ParserTest { } """ - override def extraSettings = s"${super.extraSettings} -Yxml:coalescing" + override def extraSettings = s"${super.extraSettings} -Xxml:coalescing" } -- cgit v1.2.3 From ef7b5524a7d6a2da82c32f16632ed2304130f32a Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Wed, 8 Apr 2015 09:49:31 -0700 Subject: SI-3368 Review Verbose option help and test tweak. --- src/compiler/scala/tools/nsc/settings/ScalaSettings.scala | 2 +- test/files/run/t5699.scala | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) (limited to 'test') diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index b783e80db9..f217d21c35 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -141,7 +141,7 @@ trait ScalaSettings extends AbsScalaSettings // XML parsing options object XxmlSettings extends MultiChoiceEnumeration { - val coalescing = Value + val coalescing = Choice("coalescing", "Convert PCData to Text and coalesce sibling nodes") def isCoalescing = Xxml contains coalescing } val Xxml = MultiChoiceSetting( diff --git a/test/files/run/t5699.scala b/test/files/run/t5699.scala index ede6fefe48..409bcd250c 100755 --- a/test/files/run/t5699.scala +++ b/test/files/run/t5699.scala @@ -7,7 +7,7 @@ object Test extends ParserTest { |public @interface MyAnnotation { String value(); } """.stripMargin - override def extraSettings: String = "-usejavacp -Ystop-after:typer -Xprint:parser" + override def extraSettings: String = "-usejavacp -Ystop-after:namer -Xprint:parser" override def newSources(sourceCodes: String*) = { assert(sourceCodes.size == 1) -- cgit v1.2.3 From ea61b3f66007b1a0029a5f22a7de334924390f7c Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 2 Jun 2014 13:20:58 +0200 Subject: Disable -Ydelambdafy:method for specialized FunctionN The Delambdafy phase generates its `FunctionN` subclasses after the specialization phase. As such, `((x: Int) => x).apply(42)` incurs boxing. This commit falls back to the `-Ydelambdafy:inline` in this case. This is done by running the specialization type map over the type of the function, and seeing if anything changes. To make this work robustly, we first need to ensure that the specialization info transformer has processed all the function types. This is not a fundamental limitation; we could in principle generate the specialized code. A followup change will use `-Ydelambdafy:method` as the basis for invokedymnamic lambdas. As part of that stream of work, we will synthesize specialization-aware lambdas, and remove the fallback to `-Ydelambdafy:inline`. I have updated some tests that intend to test the delambdafy transform to avoid use of specialized function types. --- .../scala/tools/nsc/transform/UnCurry.scala | 18 +++++++- test/files/run/delambdafy-specialized.check | 1 + test/files/run/delambdafy-specialized.flags | 1 + test/files/run/delambdafy-specialized.scala | 6 +++ test/files/run/delambdafy_t6028.check | 51 ++++++++++++++-------- test/files/run/delambdafy_t6028.scala | 10 ++--- test/files/run/delambdafy_t6555.check | 8 ++-- test/files/run/delambdafy_t6555.scala | 2 +- .../run/delambdafy_uncurry_byname_method.check | 6 +-- .../run/delambdafy_uncurry_byname_method.scala | 4 +- test/files/run/repl-javap-lambdas.scala | 4 +- test/files/run/t9097.scala | 2 +- 12 files changed, 76 insertions(+), 37 deletions(-) create mode 100644 test/files/run/delambdafy-specialized.check create mode 100644 test/files/run/delambdafy-specialized.flags create mode 100644 test/files/run/delambdafy-specialized.scala (limited to 'test') diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index 3544dc9966..001fd71bfb 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -70,6 +70,14 @@ abstract class UnCurry extends InfoTransform private val noApply = mutable.HashSet[Tree]() private val newMembers = mutable.Map[Symbol, mutable.Buffer[Tree]]() + private lazy val forceSpecializationInfoTransformOfFunctionN: Unit = { + if (currentRun.specializePhase != NoPhase) { // be robust in case of -Ystop-after:uncurry + exitingSpecialize { + FunctionClass.seq.foreach(cls => cls.info) + } + } + } + /** Add a new synthetic member for `currentOwner` */ private def addNewMember(t: Tree): Unit = newMembers.getOrElseUpdate(currentOwner, mutable.Buffer()) += t @@ -220,8 +228,16 @@ abstract class UnCurry extends InfoTransform def mkMethod(owner: Symbol, name: TermName, additionalFlags: FlagSet = NoFlags): DefDef = gen.mkMethodFromFunction(localTyper)(fun, owner, name, additionalFlags) - val canUseDelamdafyMethod = (inConstructorFlag == 0) // Avoiding synthesizing code prone to SI-6666, SI-8363 by using old-style lambda translation + def isSpecialized = { + forceSpecializationInfoTransformOfFunctionN + val specialized = specializeTypes.specializedType(fun.tpe) + !(specialized =:= fun.tpe) + } + def canUseDelamdafyMethod = ( + (inConstructorFlag == 0) // Avoiding synthesizing code prone to SI-6666, SI-8363 by using old-style lambda translation + && !isSpecialized // DelambdafyTransformer currently only emits generic FunctionN-s, use the old style in the meantime + ) if (inlineFunctionExpansion || !canUseDelamdafyMethod) { val parents = addSerializable(abstractFunctionForFunctionType(fun.tpe)) val anonClass = fun.symbol.owner newAnonymousFunctionClass(fun.pos, inConstructorFlag) addAnnotation SerialVersionUIDAnnotation diff --git a/test/files/run/delambdafy-specialized.check b/test/files/run/delambdafy-specialized.check new file mode 100644 index 0000000000..c6903b9e29 --- /dev/null +++ b/test/files/run/delambdafy-specialized.check @@ -0,0 +1 @@ +scala.runtime.AbstractFunction1$mcII$sp diff --git a/test/files/run/delambdafy-specialized.flags b/test/files/run/delambdafy-specialized.flags new file mode 100644 index 0000000000..48b438ddf8 --- /dev/null +++ b/test/files/run/delambdafy-specialized.flags @@ -0,0 +1 @@ +-Ydelambdafy:method diff --git a/test/files/run/delambdafy-specialized.scala b/test/files/run/delambdafy-specialized.scala new file mode 100644 index 0000000000..634d4e490b --- /dev/null +++ b/test/files/run/delambdafy-specialized.scala @@ -0,0 +1,6 @@ +object Test { + def main(args: Array[String]): Unit = { + val f = (x: Int) => -x + println(f.getClass.getSuperclass.getName) + } +} diff --git a/test/files/run/delambdafy_t6028.check b/test/files/run/delambdafy_t6028.check index 7bd8cd7202..419e7043a3 100644 --- a/test/files/run/delambdafy_t6028.check +++ b/test/files/run/delambdafy_t6028.check @@ -1,35 +1,35 @@ [[syntax trees at end of lambdalift]] // newSource1.scala package { class T extends Object { - private[this] val classParam: Int = _; - def (classParam: Int): T = { + private[this] val classParam: String = _; + def (classParam: String): T = { T.super.(); () }; - private[this] val field: Int = 0; - def field(): Int = T.this.field; - def foo(methodParam: Int): Function0 = { - val methodLocal: Int = 0; + private[this] val field: String = ""; + def field(): String = T.this.field; + def foo(methodParam: String): Function0 = { + val methodLocal: String = ""; { (() => T.this.$anonfun$1(methodParam, methodLocal)).$asInstanceOf[Function0]() } }; - def bar(barParam: Int): Object = { + def bar(barParam: String): Object = { @volatile var MethodLocalObject$module: runtime.VolatileObjectRef = scala.runtime.VolatileObjectRef.zero(); T.this.MethodLocalObject$1(barParam, MethodLocalObject$module) }; - def tryy(tryyParam: Int): Function0 = { - var tryyLocal: runtime.IntRef = scala.runtime.IntRef.create(0); + def tryy(tryyParam: String): Function0 = { + var tryyLocal: runtime.ObjectRef = scala.runtime.ObjectRef.create(""); { - (() => T.this.$anonfun$2(tryyParam, tryyLocal)).$asInstanceOf[Function0]() + (new <$anon: Function0>(T.this, tryyParam, tryyLocal): Function0) } }; - final private[this] def $anonfun$1(methodParam$1: Int, methodLocal$1: Int): Int = T.this.classParam.+(T.this.field()).+(methodParam$1).+(methodLocal$1); + final private[this] def $anonfun$1(methodParam$1: String, methodLocal$1: String): String = T.this.classParam.+(T.this.field()).+(methodParam$1).+(methodLocal$1); abstract trait MethodLocalTrait$1 extends Object { def $outer(): T }; object MethodLocalObject$2 extends Object with T#MethodLocalTrait$1 { - def ($outer: T, barParam$1: Int): T#MethodLocalObject$2.type = { + def ($outer: T, barParam$1: String): T#MethodLocalObject$2.type = { MethodLocalObject$2.super.(); MethodLocalObject$2.this.$asInstanceOf[T#MethodLocalTrait$1$class]()./*MethodLocalTrait$1$class*/$init$(barParam$1); () @@ -38,19 +38,34 @@ package { def $outer(): T = MethodLocalObject$2.this.$outer; def $outer(): T = MethodLocalObject$2.this.$outer }; - final private[this] def MethodLocalObject$1(barParam$1: Int, MethodLocalObject$module$1: runtime.VolatileObjectRef): T#MethodLocalObject$2.type = { + final private[this] def MethodLocalObject$1(barParam$1: String, MethodLocalObject$module$1: runtime.VolatileObjectRef): T#MethodLocalObject$2.type = { MethodLocalObject$module$1.elem = new T#MethodLocalObject$2.type(T.this, barParam$1); MethodLocalObject$module$1.elem.$asInstanceOf[T#MethodLocalObject$2.type]() }; abstract trait MethodLocalTrait$1$class extends Object with T#MethodLocalTrait$1 { - def /*MethodLocalTrait$1$class*/$init$(barParam$1: Int): Unit = { + def /*MethodLocalTrait$1$class*/$init$(barParam$1: String): Unit = { () }; - scala.this.Predef.print(scala.Int.box(barParam$1)) + scala.this.Predef.print(barParam$1) }; - final private[this] def $anonfun$2(tryyParam$1: Int, tryyLocal$1: runtime.IntRef): Unit = try { - tryyLocal$1.elem = tryyParam$1 - } finally () + @SerialVersionUID(value = 0) final class $anonfun$tryy$1 extends scala.runtime.AbstractFunction0$mcV$sp with Serializable { + def ($outer: T, tryyParam$1: String, tryyLocal$1: runtime.ObjectRef): <$anon: Function0> = { + $anonfun$tryy$1.super.(); + () + }; + final def apply(): Unit = $anonfun$tryy$1.this.apply$mcV$sp(); + def apply$mcV$sp(): Unit = try { + $anonfun$tryy$1.this.tryyLocal$1.elem = $anonfun$tryy$1.this.tryyParam$1 + } finally (); + private[this] val $outer: T = _; + def $outer(): T = $anonfun$tryy$1.this.$outer; + final def apply(): Object = { + $anonfun$tryy$1.this.apply(); + scala.runtime.BoxedUnit.UNIT + }; + private[this] val tryyParam$1: String = _; + private[this] val tryyLocal$1: runtime.ObjectRef = _ + } } } diff --git a/test/files/run/delambdafy_t6028.scala b/test/files/run/delambdafy_t6028.scala index 0b7ef48c3d..ca39195310 100644 --- a/test/files/run/delambdafy_t6028.scala +++ b/test/files/run/delambdafy_t6028.scala @@ -5,11 +5,11 @@ object Test extends DirectTest { override def extraSettings: String = "-usejavacp -Ydelambdafy:method -Xprint:lambdalift -d " + testOutput.path - override def code = """class T(classParam: Int) { - | val field: Int = 0 - | def foo(methodParam: Int) = {val methodLocal = 0 ; () => classParam + field + methodParam + methodLocal } - | def bar(barParam: Int) = { trait MethodLocalTrait { print(barParam) }; object MethodLocalObject extends MethodLocalTrait; MethodLocalObject } - | def tryy(tryyParam: Int) = { var tryyLocal = 0; () => try { tryyLocal = tryyParam } finally () } + override def code = """class T(classParam: String) { + | val field: String = "" + | def foo(methodParam: String) = {val methodLocal = "" ; () => classParam + field + methodParam + methodLocal } + | def bar(barParam: String) = { trait MethodLocalTrait { print(barParam) }; object MethodLocalObject extends MethodLocalTrait; MethodLocalObject } + | def tryy(tryyParam: String) = { var tryyLocal = ""; () => try { tryyLocal = tryyParam } finally () } |} |""".stripMargin.trim diff --git a/test/files/run/delambdafy_t6555.check b/test/files/run/delambdafy_t6555.check index 6b174c0d2a..b6ccebde78 100644 --- a/test/files/run/delambdafy_t6555.check +++ b/test/files/run/delambdafy_t6555.check @@ -5,11 +5,11 @@ package { Foo.super.(); () }; - private[this] val f: Int => Int = { - final def $anonfun(param: Int): Int = param; - ((param: Int) => $anonfun(param)) + private[this] val f: String => String = { + final def $anonfun(param: String): String = param; + ((param: String) => $anonfun(param)) }; - def f(): Int => Int = Foo.this.f + def f(): String => String = Foo.this.f } } diff --git a/test/files/run/delambdafy_t6555.scala b/test/files/run/delambdafy_t6555.scala index a1dcfe790c..8d4976e989 100644 --- a/test/files/run/delambdafy_t6555.scala +++ b/test/files/run/delambdafy_t6555.scala @@ -5,7 +5,7 @@ object Test extends DirectTest { override def extraSettings: String = "-usejavacp -Xprint:specialize -Ydelambdafy:method -d " + testOutput.path - override def code = "class Foo { val f = (param: Int) => param } " + override def code = "class Foo { val f = (param: String) => param } " override def show(): Unit = { Console.withErr(System.out) { diff --git a/test/files/run/delambdafy_uncurry_byname_method.check b/test/files/run/delambdafy_uncurry_byname_method.check index cd3edc7d6f..e0f281b1cd 100644 --- a/test/files/run/delambdafy_uncurry_byname_method.check +++ b/test/files/run/delambdafy_uncurry_byname_method.check @@ -5,9 +5,9 @@ package { Foo.super.(); () }; - def bar(x: () => Int): Int = x.apply(); - def foo(): Int = Foo.this.bar({ - final def $anonfun(): Int = 1; + def bar(x: () => String): String = x.apply(); + def foo(): String = Foo.this.bar({ + final def $anonfun(): String = ""; (() => $anonfun()) }) } diff --git a/test/files/run/delambdafy_uncurry_byname_method.scala b/test/files/run/delambdafy_uncurry_byname_method.scala index 1adeec8433..0ccc1f2e92 100644 --- a/test/files/run/delambdafy_uncurry_byname_method.scala +++ b/test/files/run/delambdafy_uncurry_byname_method.scala @@ -6,9 +6,9 @@ object Test extends DirectTest { override def extraSettings: String = "-usejavacp -Xprint:uncurry -Ydelambdafy:method -Ystop-after:uncurry -d " + testOutput.path override def code = """class Foo { - | def bar(x: => Int) = x + | def bar(x: => String) = x | - | def foo = bar(1) + | def foo = bar("") |} |""".stripMargin.trim diff --git a/test/files/run/repl-javap-lambdas.scala b/test/files/run/repl-javap-lambdas.scala index 15e5bf6877..c503c99d66 100644 --- a/test/files/run/repl-javap-lambdas.scala +++ b/test/files/run/repl-javap-lambdas.scala @@ -7,8 +7,8 @@ object Test extends JavapTest { def code = """ |object Betty { | List(1,2,3) count (_ % 2 != 0) - | def f = List(1,2,3) filter (_ % 2 != 0) map (_ * 2) - | def g = List(1,2,3) filter (_ % 2 == 0) map (_ * 3) map (_ + 1) + | def f = List(1,2,3) filter ((x: Any) => true) map (x => "m1") + | def g = List(1,2,3) filter ((x: Any) => true) map (x => "m1") map (x => "m2") |} |:javap -fun Betty#g """.stripMargin diff --git a/test/files/run/t9097.scala b/test/files/run/t9097.scala index d2bf55fc44..aa2b23bbac 100644 --- a/test/files/run/t9097.scala +++ b/test/files/run/t9097.scala @@ -15,7 +15,7 @@ object Test extends StoreReporterDirectTest { override def code = """package o |package a { | class C { - | def hihi = List(1,2).map(_ * 2) + | def hihi = List(1,2).map(_ => "") | } |} |package object a { -- cgit v1.2.3 From 4ff3abbe950c0bd703298d7808e1de38894ddac7 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 15 Apr 2015 22:21:14 +1000 Subject: SI-9273 Avoid unpositioned error for bare classOf A bare identifier `classOf` in a position wth an expected type of `Class[_]` was leading to an unpositioned error. This is due to special treatment of bare `classOf` in `typedIdent` creating an ephemeral, but unpositioned, `TypeTree`. This commit positions that tree and tests that the error is issued at a sensible position. There is still an irregularity between `classOf` and `Predef.classOf`, but that seems esoteric enough to leave alone for now. --- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 9 +++++---- test/files/neg/t9273.check | 10 ++++++++++ test/files/neg/t9273.scala | 9 +++++++++ 3 files changed, 24 insertions(+), 4 deletions(-) create mode 100644 test/files/neg/t9273.check create mode 100644 test/files/neg/t9273.scala (limited to 'test') diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 391ef9e337..7417c5364e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -4881,10 +4881,11 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper (// this -> Foo.this if (sym.isThisSym) typed1(This(sym.owner) setPos tree.pos, mode, pt) - // Inferring classOf type parameter from expected type. Otherwise an - // actual call to the stubbed classOf method is generated, returning null. - else if (isPredefClassOf(sym) && pt.typeSymbol == ClassClass && pt.typeArgs.nonEmpty) - typedClassOf(tree, TypeTree(pt.typeArgs.head)) + else if (isPredefClassOf(sym) && pt.typeSymbol == ClassClass && pt.typeArgs.nonEmpty) { + // Inferring classOf type parameter from expected type. Otherwise an + // actual call to the stubbed classOf method is generated, returning null. + typedClassOf(tree, TypeTree(pt.typeArgs.head).setPos(tree.pos.focus)) + } else { val pre1 = if (sym.isTopLevel) sym.owner.thisType else if (qual == EmptyTree) NoPrefix else qual.tpe val tree1 = if (qual == EmptyTree) tree else atPos(tree.pos)(Select(atPos(tree.pos.focusStart)(qual), name)) diff --git a/test/files/neg/t9273.check b/test/files/neg/t9273.check new file mode 100644 index 0000000000..1dca63a736 --- /dev/null +++ b/test/files/neg/t9273.check @@ -0,0 +1,10 @@ +t9273.scala:2: error: class type required but ? found + val foo: Class[_] = classOf // error without position, line or file + ^ +t9273.scala:3: error: not found: type X + val foo1: Class[_] = classOf[X] // good error, all info contained + ^ +t9273.scala:7: error: not found: type X + val foo4: Class[_] = Predef.classOf[X] // good error, all info contained + ^ +three errors found diff --git a/test/files/neg/t9273.scala b/test/files/neg/t9273.scala new file mode 100644 index 0000000000..3f99dff17f --- /dev/null +++ b/test/files/neg/t9273.scala @@ -0,0 +1,9 @@ +class MissingLineNumbers { + val foo: Class[_] = classOf // error without position, line or file + val foo1: Class[_] = classOf[X] // good error, all info contained + val foo2 = classOf // Infers T=Nothing + + val foo3: Class[_] = Predef.classOf // Infers T=Nothing. Irregular wrt typedIdent. + val foo4: Class[_] = Predef.classOf[X] // good error, all info contained + val foo5 = Predef.classOf // Infers T=Nothing +} -- cgit v1.2.3 From 498a07fcd8d922545e942b817d3fbd16ef56809b Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Thu, 16 Apr 2015 12:07:02 -0700 Subject: SI-3368 Default to coalescing for 2.11 Preserve current behavior (no PCData nodes, only Text) for 2.11. The coalescing flag is on if enabled or if source level is not 2.12 and no flag was supplied. The subtle change is that adjacent Text nodes are thereby coalesced. This happens in the presence of CData sections, but this is at the discretion of the parser anyway. Also, no PCData nodes are emitted under coalescing, even if there were no sibling text nodes. That is the correct behavior: the general idea is that coalescing mode says, I only want to deal with text. --- .../tools/nsc/ast/parser/SymbolicXMLBuilder.scala | 7 +- .../scala/tools/nsc/settings/ScalaSettings.scala | 2 +- test/files/run/t3368-b.check | 89 ++++++++++++++++++++++ test/files/run/t3368-b.scala | 26 +++++++ test/files/run/t3368-c.check | 85 +++++++++++++++++++++ test/files/run/t3368-c.scala | 26 +++++++ test/files/run/t3368-d.check | 89 ++++++++++++++++++++++ test/files/run/t3368-d.scala | 26 +++++++ test/files/run/t3368.check | 43 ++++++++++- test/files/run/t3368.scala | 8 ++ 10 files changed, 394 insertions(+), 7 deletions(-) create mode 100644 test/files/run/t3368-b.check create mode 100644 test/files/run/t3368-b.scala create mode 100644 test/files/run/t3368-c.check create mode 100644 test/files/run/t3368-c.scala create mode 100644 test/files/run/t3368-d.check create mode 100644 test/files/run/t3368-d.scala (limited to 'test') diff --git a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala index 99399e363f..67241ef639 100755 --- a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala @@ -126,10 +126,9 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) { def makeTextPat(txt: Tree) = Apply(_scala_xml__Text, List(txt)) def makeText1(txt: Tree) = New(_scala_xml_Text, LL(txt)) def comment(pos: Position, text: String) = atPos(pos)( Comment(const(text)) ) - def charData(pos: Position, txt: String) = atPos(pos) { - val t = if (isPattern) Apply(_scala_xml(xmlterms._PCData), List(const(txt))) - else New(_scala_xml(_PCData), LL(const(txt))) - if (coalescing) t updateAttachment TextAttache(pos, txt) else t + def charData(pos: Position, txt: String) = if (coalescing) text(pos, txt) else atPos(pos) { + if (isPattern) Apply(_scala_xml(xmlterms._PCData), List(const(txt))) + else New(_scala_xml(_PCData), LL(const(txt))) } def procInstr(pos: Position, target: String, txt: String) = diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index f217d21c35..630276e412 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -142,7 +142,7 @@ trait ScalaSettings extends AbsScalaSettings // XML parsing options object XxmlSettings extends MultiChoiceEnumeration { val coalescing = Choice("coalescing", "Convert PCData to Text and coalesce sibling nodes") - def isCoalescing = Xxml contains coalescing + def isCoalescing = (Xxml contains coalescing) || (!isScala212 && !Xxml.isSetByUser) } val Xxml = MultiChoiceSetting( name = "-Xxml", diff --git a/test/files/run/t3368-b.check b/test/files/run/t3368-b.check new file mode 100644 index 0000000000..4cbe98c577 --- /dev/null +++ b/test/files/run/t3368-b.check @@ -0,0 +1,89 @@ +[[syntax trees at end of parser]] // newSource1.scala +package { + abstract trait X extends scala.AnyRef { + def $init$() = { + () + }; + def x = { + val $buf = new _root_.scala.xml.NodeBuffer(); + $buf.$amp$plus(new _root_.scala.xml.PCData("hi & bye")); + $buf.$amp$plus(new _root_.scala.xml.PCData("red & black")); + $buf + } + }; + abstract trait Y extends scala.AnyRef { + def $init$() = { + () + }; + def y = { + { + new _root_.scala.xml.Elem(null, "a", _root_.scala.xml.Null, $scope, false, ({ + val $buf = new _root_.scala.xml.NodeBuffer(); + $buf.$amp$plus({ + { + new _root_.scala.xml.Elem(null, "b", _root_.scala.xml.Null, $scope, true) + } + }); + $buf.$amp$plus(new _root_.scala.xml.Text("start")); + $buf.$amp$plus(new _root_.scala.xml.PCData("hi & bye")); + $buf.$amp$plus({ + { + new _root_.scala.xml.Elem(null, "c", _root_.scala.xml.Null, $scope, true) + } + }); + $buf.$amp$plus(new _root_.scala.xml.Text("world")); + $buf.$amp$plus({ + { + new _root_.scala.xml.Elem(null, "d", _root_.scala.xml.Null, $scope, true) + } + }); + $buf.$amp$plus(new _root_.scala.xml.Text("stuff")); + $buf.$amp$plus(new _root_.scala.xml.PCData("red & black")); + $buf + }: _*)) + } + } + }; + abstract trait Z extends scala.AnyRef { + def $init$() = { + () + }; + def d = new _root_.scala.xml.PCData("hello, world"); + def e = { + val $buf = new _root_.scala.xml.NodeBuffer(); + $buf.$amp$plus(new _root_.scala.xml.PCData("hello, world")); + $buf.$amp$plus(new _root_.scala.xml.PCData("hello, world")); + $buf + }; + def f = { + { + new _root_.scala.xml.Elem(null, "foo", _root_.scala.xml.Null, $scope, false, ({ + val $buf = new _root_.scala.xml.NodeBuffer(); + $buf.$amp$plus(new _root_.scala.xml.Text("x")); + $buf.$amp$plus(new _root_.scala.xml.PCData("hello, world")); + $buf + }: _*)) + } + }; + def g = { + { + new _root_.scala.xml.Elem(null, "foo", _root_.scala.xml.Null, $scope, false, ({ + val $buf = new _root_.scala.xml.NodeBuffer(); + $buf.$amp$plus(new _root_.scala.xml.PCData("hello, world")); + $buf + }: _*)) + } + }; + def h = { + { + new _root_.scala.xml.Elem(null, "foo", _root_.scala.xml.Null, $scope, false, ({ + val $buf = new _root_.scala.xml.NodeBuffer(); + $buf.$amp$plus(new _root_.scala.xml.PCData("hello, world")); + $buf.$amp$plus(new _root_.scala.xml.PCData("hello, world")); + $buf + }: _*)) + } + } + } +} + diff --git a/test/files/run/t3368-b.scala b/test/files/run/t3368-b.scala new file mode 100644 index 0000000000..108cb9a5ee --- /dev/null +++ b/test/files/run/t3368-b.scala @@ -0,0 +1,26 @@ + +import scala.tools.partest.ParserTest + + +object Test extends ParserTest { + + override def code = """ + trait X { + // error: in XML literal: name expected, but char '!' cannot start a name + def x = + } + trait Y { + def y = startworldstuff + } + trait Z { + def d = + def e = // top level not coalesced + def f = x // adjoining text + def g = // text node when coalescing + def h = + } + """ + + // not coalescing + override def extraSettings = s"${super.extraSettings} -Xxml:-coalescing" +} diff --git a/test/files/run/t3368-c.check b/test/files/run/t3368-c.check new file mode 100644 index 0000000000..e0c10cc0dd --- /dev/null +++ b/test/files/run/t3368-c.check @@ -0,0 +1,85 @@ +[[syntax trees at end of parser]] // newSource1.scala +package { + abstract trait X extends scala.AnyRef { + def $init$() = { + () + }; + def x = { + val $buf = new _root_.scala.xml.NodeBuffer(); + $buf.$amp$plus(new _root_.scala.xml.Text("hi & bye")); + $buf.$amp$plus(new _root_.scala.xml.Text("red & black")); + $buf + } + }; + abstract trait Y extends scala.AnyRef { + def $init$() = { + () + }; + def y = { + { + new _root_.scala.xml.Elem(null, "a", _root_.scala.xml.Null, $scope, false, ({ + val $buf = new _root_.scala.xml.NodeBuffer(); + $buf.$amp$plus({ + { + new _root_.scala.xml.Elem(null, "b", _root_.scala.xml.Null, $scope, true) + } + }); + $buf.$amp$plus(new _root_.scala.xml.Text("starthi & bye")); + $buf.$amp$plus({ + { + new _root_.scala.xml.Elem(null, "c", _root_.scala.xml.Null, $scope, true) + } + }); + $buf.$amp$plus(new _root_.scala.xml.Text("world")); + $buf.$amp$plus({ + { + new _root_.scala.xml.Elem(null, "d", _root_.scala.xml.Null, $scope, true) + } + }); + $buf.$amp$plus(new _root_.scala.xml.Text("stuffred & black")); + $buf + }: _*)) + } + } + }; + abstract trait Z extends scala.AnyRef { + def $init$() = { + () + }; + def d = new _root_.scala.xml.Text("hello, world"); + def e = { + val $buf = new _root_.scala.xml.NodeBuffer(); + $buf.$amp$plus(new _root_.scala.xml.Text("hello, world")); + $buf.$amp$plus(new _root_.scala.xml.Text("hello, world")); + $buf + }; + def f = { + { + new _root_.scala.xml.Elem(null, "foo", _root_.scala.xml.Null, $scope, false, ({ + val $buf = new _root_.scala.xml.NodeBuffer(); + $buf.$amp$plus(new _root_.scala.xml.Text("xhello, world")); + $buf + }: _*)) + } + }; + def g = { + { + new _root_.scala.xml.Elem(null, "foo", _root_.scala.xml.Null, $scope, false, ({ + val $buf = new _root_.scala.xml.NodeBuffer(); + $buf.$amp$plus(new _root_.scala.xml.Text("hello, world")); + $buf + }: _*)) + } + }; + def h = { + { + new _root_.scala.xml.Elem(null, "foo", _root_.scala.xml.Null, $scope, false, ({ + val $buf = new _root_.scala.xml.NodeBuffer(); + $buf.$amp$plus(new _root_.scala.xml.Text("hello, worldhello, world")); + $buf + }: _*)) + } + } + } +} + diff --git a/test/files/run/t3368-c.scala b/test/files/run/t3368-c.scala new file mode 100644 index 0000000000..5121794463 --- /dev/null +++ b/test/files/run/t3368-c.scala @@ -0,0 +1,26 @@ + +import scala.tools.partest.ParserTest + + +object Test extends ParserTest { + + override def code = """ + trait X { + // error: in XML literal: name expected, but char '!' cannot start a name + def x = + } + trait Y { + def y = startworldstuff + } + trait Z { + def d = + def e = // top level not coalesced + def f = x // adjoining text + def g = // text node when coalescing + def h = + } + """ + + // default coalescing behavior, whatever that is today. + //override def extraSettings = s"${super.extraSettings} -Xxml:coalescing" +} diff --git a/test/files/run/t3368-d.check b/test/files/run/t3368-d.check new file mode 100644 index 0000000000..4cbe98c577 --- /dev/null +++ b/test/files/run/t3368-d.check @@ -0,0 +1,89 @@ +[[syntax trees at end of parser]] // newSource1.scala +package { + abstract trait X extends scala.AnyRef { + def $init$() = { + () + }; + def x = { + val $buf = new _root_.scala.xml.NodeBuffer(); + $buf.$amp$plus(new _root_.scala.xml.PCData("hi & bye")); + $buf.$amp$plus(new _root_.scala.xml.PCData("red & black")); + $buf + } + }; + abstract trait Y extends scala.AnyRef { + def $init$() = { + () + }; + def y = { + { + new _root_.scala.xml.Elem(null, "a", _root_.scala.xml.Null, $scope, false, ({ + val $buf = new _root_.scala.xml.NodeBuffer(); + $buf.$amp$plus({ + { + new _root_.scala.xml.Elem(null, "b", _root_.scala.xml.Null, $scope, true) + } + }); + $buf.$amp$plus(new _root_.scala.xml.Text("start")); + $buf.$amp$plus(new _root_.scala.xml.PCData("hi & bye")); + $buf.$amp$plus({ + { + new _root_.scala.xml.Elem(null, "c", _root_.scala.xml.Null, $scope, true) + } + }); + $buf.$amp$plus(new _root_.scala.xml.Text("world")); + $buf.$amp$plus({ + { + new _root_.scala.xml.Elem(null, "d", _root_.scala.xml.Null, $scope, true) + } + }); + $buf.$amp$plus(new _root_.scala.xml.Text("stuff")); + $buf.$amp$plus(new _root_.scala.xml.PCData("red & black")); + $buf + }: _*)) + } + } + }; + abstract trait Z extends scala.AnyRef { + def $init$() = { + () + }; + def d = new _root_.scala.xml.PCData("hello, world"); + def e = { + val $buf = new _root_.scala.xml.NodeBuffer(); + $buf.$amp$plus(new _root_.scala.xml.PCData("hello, world")); + $buf.$amp$plus(new _root_.scala.xml.PCData("hello, world")); + $buf + }; + def f = { + { + new _root_.scala.xml.Elem(null, "foo", _root_.scala.xml.Null, $scope, false, ({ + val $buf = new _root_.scala.xml.NodeBuffer(); + $buf.$amp$plus(new _root_.scala.xml.Text("x")); + $buf.$amp$plus(new _root_.scala.xml.PCData("hello, world")); + $buf + }: _*)) + } + }; + def g = { + { + new _root_.scala.xml.Elem(null, "foo", _root_.scala.xml.Null, $scope, false, ({ + val $buf = new _root_.scala.xml.NodeBuffer(); + $buf.$amp$plus(new _root_.scala.xml.PCData("hello, world")); + $buf + }: _*)) + } + }; + def h = { + { + new _root_.scala.xml.Elem(null, "foo", _root_.scala.xml.Null, $scope, false, ({ + val $buf = new _root_.scala.xml.NodeBuffer(); + $buf.$amp$plus(new _root_.scala.xml.PCData("hello, world")); + $buf.$amp$plus(new _root_.scala.xml.PCData("hello, world")); + $buf + }: _*)) + } + } + } +} + diff --git a/test/files/run/t3368-d.scala b/test/files/run/t3368-d.scala new file mode 100644 index 0000000000..5777c1a81e --- /dev/null +++ b/test/files/run/t3368-d.scala @@ -0,0 +1,26 @@ + +import scala.tools.partest.ParserTest + + +object Test extends ParserTest { + + override def code = """ + trait X { + // error: in XML literal: name expected, but char '!' cannot start a name + def x = + } + trait Y { + def y = startworldstuff + } + trait Z { + def d = + def e = // top level not coalesced + def f = x // adjoining text + def g = // text node when coalescing + def h = + } + """ + + // default under 2.12 is not coalescing + override def extraSettings = s"${super.extraSettings} -Xsource:212" +} diff --git a/test/files/run/t3368.check b/test/files/run/t3368.check index 1d9dd677f6..e0c10cc0dd 100644 --- a/test/files/run/t3368.check +++ b/test/files/run/t3368.check @@ -6,8 +6,8 @@ package { }; def x = { val $buf = new _root_.scala.xml.NodeBuffer(); - $buf.$amp$plus(new _root_.scala.xml.PCData("hi & bye")); - $buf.$amp$plus(new _root_.scala.xml.PCData("red & black")); + $buf.$amp$plus(new _root_.scala.xml.Text("hi & bye")); + $buf.$amp$plus(new _root_.scala.xml.Text("red & black")); $buf } }; @@ -41,6 +41,45 @@ package { }: _*)) } } + }; + abstract trait Z extends scala.AnyRef { + def $init$() = { + () + }; + def d = new _root_.scala.xml.Text("hello, world"); + def e = { + val $buf = new _root_.scala.xml.NodeBuffer(); + $buf.$amp$plus(new _root_.scala.xml.Text("hello, world")); + $buf.$amp$plus(new _root_.scala.xml.Text("hello, world")); + $buf + }; + def f = { + { + new _root_.scala.xml.Elem(null, "foo", _root_.scala.xml.Null, $scope, false, ({ + val $buf = new _root_.scala.xml.NodeBuffer(); + $buf.$amp$plus(new _root_.scala.xml.Text("xhello, world")); + $buf + }: _*)) + } + }; + def g = { + { + new _root_.scala.xml.Elem(null, "foo", _root_.scala.xml.Null, $scope, false, ({ + val $buf = new _root_.scala.xml.NodeBuffer(); + $buf.$amp$plus(new _root_.scala.xml.Text("hello, world")); + $buf + }: _*)) + } + }; + def h = { + { + new _root_.scala.xml.Elem(null, "foo", _root_.scala.xml.Null, $scope, false, ({ + val $buf = new _root_.scala.xml.NodeBuffer(); + $buf.$amp$plus(new _root_.scala.xml.Text("hello, worldhello, world")); + $buf + }: _*)) + } + } } } diff --git a/test/files/run/t3368.scala b/test/files/run/t3368.scala index 15acba5099..284fed0784 100644 --- a/test/files/run/t3368.scala +++ b/test/files/run/t3368.scala @@ -12,7 +12,15 @@ object Test extends ParserTest { trait Y { def y = startworldstuff } + trait Z { + def d = + def e = // top level not coalesced + def f = x // adjoining text + def g = // text node when coalescing + def h = + } """ + // coalescing override def extraSettings = s"${super.extraSettings} -Xxml:coalescing" } -- cgit v1.2.3 From 1aa97cb81b96cbf1e7b7b2cd42dd5bd3d8e5c6dd Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 17 Apr 2015 14:55:04 +1000 Subject: Make lambda body public rather than using static accessor Under -Ydelambdafy:method (the basis of the upcoming "indylambda" translation for -target:jvm-1.8), an anonymous function is currently encoded as: 1. a private method containing the lambda's code 2. a public, static accessor method that allows access to 1 from other classes, namely: 3. an anonymous class capturing free variables and implementing the suitable FunctionN interface. In our prototypes of indylambda, we do away with 3, instead deferring creating of this class to JDK8's LambdaMetafactory by way of an invokedynamic instruction at the point of lambda capture. This facility can use a private method as the lambda target; access is mediated by the runtime-provided instance of `java.lang.invoke.MethodHandles.Lookup` that confers the privelages of the lambda capture call site to the generated implementation class. Indeed, The java compiler uses this to emit private lambda body methods. However, there are two Scala specific factors that make this a little troublesome. First, Scala's optimizer may want to inline the lambda capture call site into a new enclosing class. In general, this isn't a safe optimization, as `invokedynamic` instructions have call-site specific semantics. But we will rely the ability to inline like this in order to eliminate closures altogether where possible. Second, to support lambda deserialization, the Java compiler creates a synthetic method `$dersializeLamda$` in each class that captures them, just to be able to get the suitable access to spin up an anoymous class with access to the private method. It only needs to do this for functional interfaces that extends Serializable, which is the exception rather than the rule. But in Scala, *every* function must support serialization, so blindly copying the Java approach will lead to some code bloat. I have prototyped a hybrid approach to these challenges: use the private method directly where it is allowed, but fall back to using the accessor method in a generic lambda deserializer or in call sites that have been inlined into a new enclosing class. However, the most straight forward approach is to simply emit the lambda bodies as public (with an mangled name and with the SYHTNETIC flag) and use them in all cases. That is what is done in this commit. This does moves us one step backwards from the goals of SI-7085, but it doesn't seem sensible to incur the inconvenience from locking down one small part of our house when we don't have a plan or the budget to complete that job. The REPL has some fancy logic to decompile the bodys of lambdas (`:javap -fun C#m`) which needed tweaking to accomodate this change. I haven't tried to make this backwards compatible with the old encoding as `-Ydelambdafy:method` is still experimental. --- .../scala/tools/nsc/transform/Delambdafy.scala | 115 +++++++-------------- .../scala/tools/nsc/interpreter/JavapClass.scala | 34 ++---- test/files/run/repl-javap-lambdas.scala | 2 +- 3 files changed, 49 insertions(+), 102 deletions(-) (limited to 'test') diff --git a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala index 94e88589f5..2d33b35241 100644 --- a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala +++ b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala @@ -15,13 +15,12 @@ import scala.collection.mutable.LinkedHashMap * Currently Uncurry is responsible for that transformation. * * From a lambda, Delambdafy will create - * 1) a static forwarder at the top level of the class that contained the lambda - * 2) a new top level class that + * 1) a new top level class that a) has fields and a constructor taking the captured environment (including possibly the "this" * reference) - * b) an apply method that calls the static forwarder + * b) an apply method that calls the target method * c) if needed a bridge method for the apply method - * 3) an instantiation of the newly created class which replaces the lambda + * 2) an instantiation of the newly created class which replaces the lambda * * TODO the main work left to be done is to plug into specialization. Primarily that means choosing a * specialized FunctionN trait instead of the generic FunctionN trait as a parent and creating the @@ -76,36 +75,25 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre referrers } - val accessorMethods = mutable.ArrayBuffer[Tree]() - - // the result of the transformFunction method. A class definition for the lambda, an expression - // insantiating the lambda class, and an accessor method for the lambda class to be able to - // call the implementation - case class TransformedFunction(lambdaClassDef: ClassDef, newExpr: Tree, accessorMethod: Tree) + // the result of the transformFunction method. + sealed abstract class TransformedFunction + // A class definition for the lambda, an expression insantiating the lambda class + case class DelambdafyAnonClass(lambdaClassDef: ClassDef, newExpr: Tree) extends TransformedFunction // here's the main entry point of the transform override def transform(tree: Tree): Tree = tree match { // the main thing we care about is lambdas case fun @ Function(_, _) => - // a lambda beccomes a new class, an instantiation expression, and an - // accessor method - val TransformedFunction(lambdaClassDef, newExpr, accessorMethod) = transformFunction(fun) - // we'll add accessor methods to the current template later - accessorMethods += accessorMethod - val pkg = lambdaClassDef.symbol.owner - - // we'll add the lambda class to the package later - lambdaClassDefs(pkg) = lambdaClassDef :: lambdaClassDefs(pkg) - - super.transform(newExpr) - // when we encounter a template (basically the thing that holds body of a class/trait) - // we need to updated it to include newly created accessor methods after transforming it - case Template(_, _, _) => - try { - // during this call accessorMethods will be populated from the Function case - val Template(parents, self, body) = super.transform(tree) - Template(parents, self, body ++ accessorMethods) - } finally accessorMethods.clear() + transformFunction(fun) match { + case DelambdafyAnonClass(lambdaClassDef, newExpr) => + // a lambda beccomes a new class, an instantiation expression + val pkg = lambdaClassDef.symbol.owner + + // we'll add the lambda class to the package later + lambdaClassDefs(pkg) = lambdaClassDef :: lambdaClassDefs(pkg) + + super.transform(newExpr) + } case _ => super.transform(tree) } @@ -120,8 +108,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre private def optionSymbol(sym: Symbol): Option[Symbol] = if (sym.exists) Some(sym) else None - // turns a lambda into a new class def, a New expression instantiating that class, and an - // accessor method fo the body of the lambda + // turns a lambda into a new class def, a New expression instantiating that class private def transformFunction(originalFunction: Function): TransformedFunction = { val functionTpe = originalFunction.tpe val targs = functionTpe.typeArgs @@ -132,46 +119,16 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre // passed into the constructor of the anonymous function class val captures = FreeVarTraverser.freeVarsOf(originalFunction) - /** - * Creates the apply method for the anonymous subclass of FunctionN - */ - def createAccessorMethod(thisProxy: Symbol, fun: Function): DefDef = { - val target = targetMethod(fun) - if (!thisProxy.exists) { - target setFlag STATIC - } - val params = ((optionSymbol(thisProxy) map {proxy:Symbol => ValDef(proxy)}) ++ (target.paramss.flatten map ValDef.apply)).toList - - val methSym = oldClass.newMethod(unit.freshTermName(nme.accessor.toString() + "$"), target.pos, FINAL | BRIDGE | SYNTHETIC | PROTECTED | STATIC) + val target = targetMethod(originalFunction) + target.makeNotPrivate(target.owner) + if (!thisReferringMethods.contains(target)) + target setFlag STATIC - val paramSyms = params map {param => methSym.newSyntheticValueParam(param.symbol.tpe, param.name) } - - params zip paramSyms foreach { case (valdef, sym) => valdef.symbol = sym } - params foreach (_.symbol.owner = methSym) - - val methodType = MethodType(paramSyms, restpe) - methSym setInfo methodType - - oldClass.info.decls enter methSym - - val body = localTyper.typed { - val newTarget = Select(if (thisProxy.exists) gen.mkAttributedRef(paramSyms(0)) else gen.mkAttributedThis(oldClass), target) - val newParams = paramSyms drop (if (thisProxy.exists) 1 else 0) map Ident - Apply(newTarget, newParams) - } setPos fun.pos - val methDef = DefDef(methSym, List(params), body) - - // Have to repack the type to avoid mismatches when existentials - // appear in the result - see SI-4869. - // TODO probably don't need packedType - methDef.tpt setType localTyper.packedType(body, methSym) - methDef - } /** * Creates the apply method for the anonymous subclass of FunctionN */ - def createApplyMethod(newClass: Symbol, fun: Function, accessor: DefDef, thisProxy: Symbol): DefDef = { + def createApplyMethod(newClass: Symbol, fun: Function, thisProxy: Symbol): DefDef = { val methSym = newClass.newMethod(nme.apply, fun.pos, FINAL | SYNTHETIC) val params = fun.vparams map (_.duplicate) @@ -187,8 +144,12 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre newClass.info.decls enter methSym val Apply(_, oldParams) = fun.body + val qual = if (thisProxy.exists) + Select(gen.mkAttributedThis(newClass), thisProxy) + else + gen.mkAttributedThis(oldClass) // sort of a lie, EmptyTree. would be more honest, but the backend chokes on that. - val body = localTyper typed Apply(Select(gen.mkAttributedThis(oldClass), accessor.symbol), (optionSymbol(thisProxy) map {tp => Select(gen.mkAttributedThis(newClass), tp)}).toList ++ oldParams) + val body = localTyper typed Apply(Select(qual, target), oldParams) body.substituteSymbols(fun.vparams map (_.symbol), params map (_.symbol)) body changeOwner (fun.symbol -> methSym) @@ -271,18 +232,16 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre // the Optional proxy that will hold a reference to the 'this' // object used by the lambda, if any. NoSymbol if there is no this proxy val thisProxy = { - val target = targetMethod(originalFunction) - if (thisReferringMethods contains target) { + if (target.hasFlag(STATIC)) + NoSymbol + else { val sym = lambdaClass.newVariable(nme.FAKE_LOCAL_THIS, originalFunction.pos, SYNTHETIC) - sym.info = oldClass.tpe - sym - } else NoSymbol + sym.setInfo(oldClass.tpe) + } } val decapturify = new DeCapturifyTransformer(captureProxies2, unit, oldClass, lambdaClass, originalFunction.symbol.pos, thisProxy) - val accessorMethod = createAccessorMethod(thisProxy, originalFunction) - val decapturedFunction = decapturify.transform(originalFunction).asInstanceOf[Function] val members = (optionSymbol(thisProxy).toList ++ (captureProxies2 map (_._2))) map {member => @@ -294,7 +253,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre val constr = createConstructor(lambdaClass, members) // apply method with same arguments and return type as original lambda. - val applyMethodDef = createApplyMethod(lambdaClass, decapturedFunction, accessorMethod, thisProxy) + val applyMethodDef = createApplyMethod(lambdaClass, decapturedFunction, thisProxy) val bridgeMethod = createBridgeMethod(lambdaClass, originalFunction, applyMethodDef) @@ -312,10 +271,10 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre val body = members ++ List(constr, applyMethodDef) ++ bridgeMethod // TODO if member fields are private this complains that they're not accessible - (localTyper.typedPos(decapturedFunction.pos)(ClassDef(lambdaClass, body)).asInstanceOf[ClassDef], thisProxy, accessorMethod) + (localTyper.typedPos(decapturedFunction.pos)(ClassDef(lambdaClass, body)).asInstanceOf[ClassDef], thisProxy) } - val (anonymousClassDef, thisProxy, accessorMethod) = makeAnonymousClass + val (anonymousClassDef, thisProxy) = makeAnonymousClass pkg.info.decls enter anonymousClassDef.symbol @@ -327,7 +286,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre val typedNewStat = localTyper.typedPos(originalFunction.pos)(newStat) - TransformedFunction(anonymousClassDef, typedNewStat, accessorMethod) + DelambdafyAnonClass(anonymousClassDef, typedNewStat) } /** diff --git a/src/repl/scala/tools/nsc/interpreter/JavapClass.scala b/src/repl/scala/tools/nsc/interpreter/JavapClass.scala index c80b94bf89..1ccade2172 100644 --- a/src/repl/scala/tools/nsc/interpreter/JavapClass.scala +++ b/src/repl/scala/tools/nsc/interpreter/JavapClass.scala @@ -8,6 +8,7 @@ package tools.nsc package interpreter import java.lang.{ ClassLoader => JavaClassLoader, Iterable => JIterable } +import scala.tools.asm.Opcodes import scala.tools.nsc.util.ScalaClassLoader import java.io.{ ByteArrayInputStream, CharArrayWriter, FileNotFoundException, PrintWriter, StringWriter, Writer } import java.util.{ Locale } @@ -758,32 +759,19 @@ object JavapClass { import scala.tools.asm.ClassReader import scala.tools.asm.Opcodes.INVOKESTATIC import scala.tools.asm.tree.{ ClassNode, MethodInsnNode } - // the accessor methods invoked statically by the apply of the given closure class - def accesses(s: String): Seq[(String, String)] = { - val accessor = """accessor\$\d+""".r + def callees(s: String): List[(String, String)] = { loader classReader s withMethods { ms => - ms filter (_.name == "apply") flatMap (_.instructions.toArray.collect { - case i: MethodInsnNode if i.getOpcode == INVOKESTATIC && when(i.name) { case accessor(_*) => true } => (i.owner, i.name) - }) + val nonBridgeApplyMethods = ms filter (_.name == "apply") filter (n => (n.access & Opcodes.ACC_BRIDGE) == 0) + val instructions = nonBridgeApplyMethods flatMap (_.instructions.toArray) + instructions.collect { + case i: MethodInsnNode => (i.owner, i.name) + }.toList } } - // get the k.$anonfun for the accessor k.m - def anonOf(k: String, m: String): String = { - val res = - loader classReader k withMethods { ms => - ms filter (_.name == m) flatMap (_.instructions.toArray.collect { - case i: MethodInsnNode if i.getOpcode == INVOKESTATIC && i.name.startsWith("$anonfun") => i.name - }) - } - assert(res.size == 1) - res.head - } - // the lambdas invoke accessors that call the anonfuns of interest. Filter k on the k#$anonfuns. - val ack = accesses(lambda) - assert(ack.size == 1) // There can be only one. - ack.head match { - case (k, _) if target.isModule && !(k endsWith "$") => None - case (k, m) => Some(s"${k}#${anonOf(k, m)}") + callees(lambda) match { + case (k, _) :: Nil if target.isModule && !(k endsWith "$") => None + case (k, m) :: _ => Some(s"${k}#${m}") + case _ => None } } /** Translate the supplied targets to patterns for anonfuns. diff --git a/test/files/run/repl-javap-lambdas.scala b/test/files/run/repl-javap-lambdas.scala index c503c99d66..76a6ec8450 100644 --- a/test/files/run/repl-javap-lambdas.scala +++ b/test/files/run/repl-javap-lambdas.scala @@ -16,7 +16,7 @@ object Test extends JavapTest { // three anonfuns of Betty#g override def yah(res: Seq[String]) = { import PartialFunction.{ cond => when } - val r = """\s*private static final .* \$anonfun\$\d+\(.*""".r + val r = """.*final .* .*\$anonfun\$\d+\(.*""".r def filtered = res filter (when(_) { case r(_*) => true }) 3 == filtered.size } -- cgit v1.2.3