summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--build.xml153
-rw-r--r--project/Build.scala89
-rw-r--r--project/Layers.scala32
-rw-r--r--src/actors-migration/scala/actors/MigrationSystem.scala36
-rw-r--r--src/actors-migration/scala/actors/Pattern.scala25
-rw-r--r--src/actors-migration/scala/actors/Props.scala13
-rw-r--r--src/actors-migration/scala/actors/StashingActor.scala255
-rw-r--r--src/actors-migration/scala/actors/Timeout.scala39
-rw-r--r--src/actors/scala/actors/ActorRef.scala119
-rw-r--r--src/actors/scala/actors/ActorTask.scala1
-rw-r--r--src/actors/scala/actors/InternalActor.scala53
-rw-r--r--src/actors/scala/actors/MQueue.scala14
-rw-r--r--src/actors/scala/actors/Reactor.scala10
-rw-r--r--src/actors/scala/actors/ReplyReactor.scala7
-rw-r--r--src/asm/scala/tools/asm/AnnotationVisitor.java (renamed from src/compiler/scala/tools/asm/AnnotationVisitor.java)0
-rw-r--r--src/asm/scala/tools/asm/AnnotationWriter.java (renamed from src/compiler/scala/tools/asm/AnnotationWriter.java)0
-rw-r--r--src/asm/scala/tools/asm/Attribute.java (renamed from src/compiler/scala/tools/asm/Attribute.java)0
-rw-r--r--src/asm/scala/tools/asm/ByteVector.java (renamed from src/compiler/scala/tools/asm/ByteVector.java)0
-rw-r--r--src/asm/scala/tools/asm/ClassReader.java (renamed from src/compiler/scala/tools/asm/ClassReader.java)0
-rw-r--r--src/asm/scala/tools/asm/ClassVisitor.java (renamed from src/compiler/scala/tools/asm/ClassVisitor.java)0
-rw-r--r--src/asm/scala/tools/asm/ClassWriter.java (renamed from src/compiler/scala/tools/asm/ClassWriter.java)0
-rw-r--r--src/asm/scala/tools/asm/CustomAttr.java (renamed from src/compiler/scala/tools/asm/CustomAttr.java)0
-rw-r--r--src/asm/scala/tools/asm/Edge.java (renamed from src/compiler/scala/tools/asm/Edge.java)0
-rw-r--r--src/asm/scala/tools/asm/FieldVisitor.java (renamed from src/compiler/scala/tools/asm/FieldVisitor.java)0
-rw-r--r--src/asm/scala/tools/asm/FieldWriter.java (renamed from src/compiler/scala/tools/asm/FieldWriter.java)0
-rw-r--r--src/asm/scala/tools/asm/Frame.java (renamed from src/compiler/scala/tools/asm/Frame.java)0
-rw-r--r--src/asm/scala/tools/asm/Handle.java (renamed from src/compiler/scala/tools/asm/Handle.java)0
-rw-r--r--src/asm/scala/tools/asm/Handler.java (renamed from src/compiler/scala/tools/asm/Handler.java)0
-rw-r--r--src/asm/scala/tools/asm/Item.java (renamed from src/compiler/scala/tools/asm/Item.java)0
-rw-r--r--src/asm/scala/tools/asm/Label.java (renamed from src/compiler/scala/tools/asm/Label.java)0
-rw-r--r--src/asm/scala/tools/asm/MethodVisitor.java (renamed from src/compiler/scala/tools/asm/MethodVisitor.java)0
-rw-r--r--src/asm/scala/tools/asm/MethodWriter.java (renamed from src/compiler/scala/tools/asm/MethodWriter.java)0
-rw-r--r--src/asm/scala/tools/asm/Opcodes.java (renamed from src/compiler/scala/tools/asm/Opcodes.java)0
-rw-r--r--src/asm/scala/tools/asm/Type.java (renamed from src/compiler/scala/tools/asm/Type.java)0
-rw-r--r--src/asm/scala/tools/asm/signature/SignatureReader.java (renamed from src/compiler/scala/tools/asm/signature/SignatureReader.java)0
-rw-r--r--src/asm/scala/tools/asm/signature/SignatureVisitor.java (renamed from src/compiler/scala/tools/asm/signature/SignatureVisitor.java)0
-rw-r--r--src/asm/scala/tools/asm/signature/SignatureWriter.java (renamed from src/compiler/scala/tools/asm/signature/SignatureWriter.java)0
-rw-r--r--src/asm/scala/tools/asm/tree/AbstractInsnNode.java (renamed from src/compiler/scala/tools/asm/tree/AbstractInsnNode.java)0
-rw-r--r--src/asm/scala/tools/asm/tree/AnnotationNode.java (renamed from src/compiler/scala/tools/asm/tree/AnnotationNode.java)0
-rw-r--r--src/asm/scala/tools/asm/tree/ClassNode.java (renamed from src/compiler/scala/tools/asm/tree/ClassNode.java)0
-rw-r--r--src/asm/scala/tools/asm/tree/FieldInsnNode.java (renamed from src/compiler/scala/tools/asm/tree/FieldInsnNode.java)0
-rw-r--r--src/asm/scala/tools/asm/tree/FieldNode.java (renamed from src/compiler/scala/tools/asm/tree/FieldNode.java)0
-rw-r--r--src/asm/scala/tools/asm/tree/FrameNode.java (renamed from src/compiler/scala/tools/asm/tree/FrameNode.java)0
-rw-r--r--src/asm/scala/tools/asm/tree/IincInsnNode.java (renamed from src/compiler/scala/tools/asm/tree/IincInsnNode.java)0
-rw-r--r--src/asm/scala/tools/asm/tree/InnerClassNode.java (renamed from src/compiler/scala/tools/asm/tree/InnerClassNode.java)0
-rw-r--r--src/asm/scala/tools/asm/tree/InsnList.java (renamed from src/compiler/scala/tools/asm/tree/InsnList.java)0
-rw-r--r--src/asm/scala/tools/asm/tree/InsnNode.java (renamed from src/compiler/scala/tools/asm/tree/InsnNode.java)0
-rw-r--r--src/asm/scala/tools/asm/tree/IntInsnNode.java (renamed from src/compiler/scala/tools/asm/tree/IntInsnNode.java)0
-rw-r--r--src/asm/scala/tools/asm/tree/InvokeDynamicInsnNode.java (renamed from src/compiler/scala/tools/asm/tree/InvokeDynamicInsnNode.java)0
-rw-r--r--src/asm/scala/tools/asm/tree/JumpInsnNode.java (renamed from src/compiler/scala/tools/asm/tree/JumpInsnNode.java)0
-rw-r--r--src/asm/scala/tools/asm/tree/LabelNode.java (renamed from src/compiler/scala/tools/asm/tree/LabelNode.java)0
-rw-r--r--src/asm/scala/tools/asm/tree/LdcInsnNode.java (renamed from src/compiler/scala/tools/asm/tree/LdcInsnNode.java)0
-rw-r--r--src/asm/scala/tools/asm/tree/LineNumberNode.java (renamed from src/compiler/scala/tools/asm/tree/LineNumberNode.java)0
-rw-r--r--src/asm/scala/tools/asm/tree/LocalVariableNode.java (renamed from src/compiler/scala/tools/asm/tree/LocalVariableNode.java)0
-rw-r--r--src/asm/scala/tools/asm/tree/LookupSwitchInsnNode.java (renamed from src/compiler/scala/tools/asm/tree/LookupSwitchInsnNode.java)0
-rw-r--r--src/asm/scala/tools/asm/tree/MethodInsnNode.java (renamed from src/compiler/scala/tools/asm/tree/MethodInsnNode.java)0
-rw-r--r--src/asm/scala/tools/asm/tree/MethodNode.java (renamed from src/compiler/scala/tools/asm/tree/MethodNode.java)0
-rw-r--r--src/asm/scala/tools/asm/tree/MultiANewArrayInsnNode.java (renamed from src/compiler/scala/tools/asm/tree/MultiANewArrayInsnNode.java)0
-rw-r--r--src/asm/scala/tools/asm/tree/TableSwitchInsnNode.java (renamed from src/compiler/scala/tools/asm/tree/TableSwitchInsnNode.java)0
-rw-r--r--src/asm/scala/tools/asm/tree/TryCatchBlockNode.java (renamed from src/compiler/scala/tools/asm/tree/TryCatchBlockNode.java)0
-rw-r--r--src/asm/scala/tools/asm/tree/TypeInsnNode.java (renamed from src/compiler/scala/tools/asm/tree/TypeInsnNode.java)0
-rw-r--r--src/asm/scala/tools/asm/tree/VarInsnNode.java (renamed from src/compiler/scala/tools/asm/tree/VarInsnNode.java)0
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/Analyzer.java (renamed from src/compiler/scala/tools/asm/tree/analysis/Analyzer.java)0
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/AnalyzerException.java (renamed from src/compiler/scala/tools/asm/tree/analysis/AnalyzerException.java)0
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/BasicInterpreter.java (renamed from src/compiler/scala/tools/asm/tree/analysis/BasicInterpreter.java)0
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/BasicValue.java (renamed from src/compiler/scala/tools/asm/tree/analysis/BasicValue.java)0
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/BasicVerifier.java (renamed from src/compiler/scala/tools/asm/tree/analysis/BasicVerifier.java)0
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/Frame.java (renamed from src/compiler/scala/tools/asm/tree/analysis/Frame.java)0
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/Interpreter.java (renamed from src/compiler/scala/tools/asm/tree/analysis/Interpreter.java)0
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/SimpleVerifier.java (renamed from src/compiler/scala/tools/asm/tree/analysis/SimpleVerifier.java)0
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/SmallSet.java (renamed from src/compiler/scala/tools/asm/tree/analysis/SmallSet.java)0
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/SourceInterpreter.java (renamed from src/compiler/scala/tools/asm/tree/analysis/SourceInterpreter.java)0
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/SourceValue.java (renamed from src/compiler/scala/tools/asm/tree/analysis/SourceValue.java)0
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/Subroutine.java (renamed from src/compiler/scala/tools/asm/tree/analysis/Subroutine.java)0
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/Value.java (renamed from src/compiler/scala/tools/asm/tree/analysis/Value.java)0
-rw-r--r--src/asm/scala/tools/asm/util/ASMifiable.java (renamed from src/compiler/scala/tools/asm/util/ASMifiable.java)0
-rw-r--r--src/asm/scala/tools/asm/util/ASMifier.java (renamed from src/compiler/scala/tools/asm/util/ASMifier.java)0
-rw-r--r--src/asm/scala/tools/asm/util/CheckAnnotationAdapter.java (renamed from src/compiler/scala/tools/asm/util/CheckAnnotationAdapter.java)0
-rw-r--r--src/asm/scala/tools/asm/util/CheckClassAdapter.java (renamed from src/compiler/scala/tools/asm/util/CheckClassAdapter.java)0
-rw-r--r--src/asm/scala/tools/asm/util/CheckFieldAdapter.java (renamed from src/compiler/scala/tools/asm/util/CheckFieldAdapter.java)0
-rw-r--r--src/asm/scala/tools/asm/util/CheckMethodAdapter.java (renamed from src/compiler/scala/tools/asm/util/CheckMethodAdapter.java)0
-rw-r--r--src/asm/scala/tools/asm/util/CheckSignatureAdapter.java (renamed from src/compiler/scala/tools/asm/util/CheckSignatureAdapter.java)0
-rw-r--r--src/asm/scala/tools/asm/util/Printer.java (renamed from src/compiler/scala/tools/asm/util/Printer.java)0
-rw-r--r--src/asm/scala/tools/asm/util/SignatureChecker.java (renamed from src/compiler/scala/tools/asm/util/SignatureChecker.java)0
-rw-r--r--src/asm/scala/tools/asm/util/Textifiable.java (renamed from src/compiler/scala/tools/asm/util/Textifiable.java)0
-rw-r--r--src/asm/scala/tools/asm/util/Textifier.java (renamed from src/compiler/scala/tools/asm/util/Textifier.java)0
-rw-r--r--src/asm/scala/tools/asm/util/TraceAnnotationVisitor.java (renamed from src/compiler/scala/tools/asm/util/TraceAnnotationVisitor.java)0
-rw-r--r--src/asm/scala/tools/asm/util/TraceClassVisitor.java (renamed from src/compiler/scala/tools/asm/util/TraceClassVisitor.java)0
-rw-r--r--src/asm/scala/tools/asm/util/TraceFieldVisitor.java (renamed from src/compiler/scala/tools/asm/util/TraceFieldVisitor.java)0
-rw-r--r--src/asm/scala/tools/asm/util/TraceMethodVisitor.java (renamed from src/compiler/scala/tools/asm/util/TraceMethodVisitor.java)0
-rw-r--r--src/asm/scala/tools/asm/util/TraceSignatureVisitor.java (renamed from src/compiler/scala/tools/asm/util/TraceSignatureVisitor.java)0
-rw-r--r--src/build/maven/maven-deploy.xml3
-rw-r--r--src/build/maven/scala-actors-migration-pom.xml66
-rw-r--r--src/build/pack.xml6
-rw-r--r--src/compiler/scala/reflect/internal/Constants.scala1
-rw-r--r--src/compiler/scala/reflect/internal/Definitions.scala2
-rw-r--r--src/compiler/scala/reflect/internal/InfoTransformers.scala4
-rw-r--r--src/compiler/scala/reflect/internal/Phase.scala10
-rw-r--r--src/compiler/scala/reflect/internal/util/Origins.scala86
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeGen.scala1
-rw-r--r--src/compiler/scala/tools/nsc/ast/Trees.scala4
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Members.scala20
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala8
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala9
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/Index.scala2
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala9
-rw-r--r--src/compiler/scala/tools/nsc/transform/AddInterfaces.scala3
-rw-r--r--src/compiler/scala/tools/nsc/transform/InfoTransform.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala17
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala38
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala12
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala8
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala58
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala22
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala29
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala1336
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala14
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala67
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala17
-rw-r--r--src/library/scala/collection/mutable/FlatHashTable.scala8
-rw-r--r--src/library/scala/collection/mutable/HashTable.scala20
-rw-r--r--src/library/scala/collection/mutable/PriorityQueue.scala26
-rw-r--r--src/library/scala/collection/mutable/StackProxy.scala5
-rw-r--r--src/library/scala/collection/package.scala4
-rw-r--r--src/library/scala/concurrent/Future.scala102
-rw-r--r--src/library/scala/concurrent/Promise.scala14
-rw-r--r--src/library/scala/concurrent/impl/Future.scala2
-rw-r--r--src/library/scala/concurrent/impl/Promise.scala21
-rw-r--r--src/partest/scala/tools/partest/PartestTask.scala11
-rw-r--r--src/partest/scala/tools/partest/nest/AntRunner.scala1
-rw-r--r--src/partest/scala/tools/partest/nest/ConsoleFileManager.scala9
-rw-r--r--src/partest/scala/tools/partest/nest/DirectRunner.scala4
-rw-r--r--src/partest/scala/tools/partest/nest/FileManager.scala1
-rw-r--r--src/partest/scala/tools/partest/nest/ReflectiveRunner.scala4
-rw-r--r--src/partest/scala/tools/partest/nest/SBTRunner.scala2
-rw-r--r--src/partest/scala/tools/partest/nest/Worker.scala1
-rw-r--r--test/files/buildmanager/t2792/t2792.check3
-rw-r--r--test/files/jvm/scala-concurrent-tck.scala121
-rw-r--r--test/files/neg/exhausting.check34
-rw-r--r--test/files/neg/exhausting.flags2
-rw-r--r--test/files/neg/exhausting.scala26
-rw-r--r--test/files/neg/pat_unreachable.flags2
-rw-r--r--test/files/neg/patmatexhaust.check55
-rw-r--r--test/files/neg/patmatexhaust.flags2
-rw-r--r--test/files/neg/patmatexhaust.scala2
-rw-r--r--test/files/neg/sealed-java-enums.check8
-rw-r--r--test/files/neg/sealed-java-enums.flags2
-rw-r--r--test/files/neg/switch.check6
-rw-r--r--test/files/neg/switch.flags2
-rw-r--r--test/files/neg/t2405.check8
-rw-r--r--test/files/neg/t2405.scala10
-rw-r--r--test/files/neg/t3098.check5
-rw-r--r--test/files/neg/t3098.flags2
-rw-r--r--test/files/neg/t3614.check4
-rw-r--r--test/files/neg/t3614.scala3
-rw-r--r--test/files/neg/t3683a.check5
-rw-r--r--test/files/neg/t3683a.flags2
-rw-r--r--test/files/neg/t3692-new.flags2
-rw-r--r--test/files/neg/t3692-old.flags2
-rw-r--r--test/files/neg/t3761-overload-byname.check13
-rw-r--r--test/files/neg/t3761-overload-byname.scala13
-rw-r--r--test/files/neg/t5735.check6
-rw-r--r--test/files/neg/t5735.scala7
-rw-r--r--test/files/neg/t5821.check4
-rw-r--r--test/files/neg/t5821.scala8
-rw-r--r--test/files/neg/t639.check5
-rw-r--r--test/files/neg/t963b.check6
-rw-r--r--test/files/neg/t963b.scala (renamed from test/pending/neg/t963.scala)6
-rw-r--r--test/files/pos/arrays3.scala11
-rw-r--r--test/files/pos/exhaust_alternatives.flags1
-rw-r--r--test/files/pos/exhaust_alternatives.scala10
-rw-r--r--test/files/pos/exhaustive_heuristics.scala16
-rw-r--r--test/files/pos/t2405.scala23
-rw-r--r--test/files/pos/t3097.scala31
-rw-r--r--test/files/pos/t3856.scala1
-rw-r--r--test/files/pos/t4975.scala12
-rw-r--r--test/files/pos/t5305.scala13
-rw-r--r--test/files/pos/t5779-numeq-warn.scala13
-rw-r--r--test/files/pos/t5809.flags1
-rw-r--r--test/files/pos/t5809.scala10
-rw-r--r--test/files/pos/virtpatmat_exhaust.scala24
-rw-r--r--test/files/pos/virtpatmat_exhaust_unchecked.flags1
-rw-r--r--test/files/pos/virtpatmat_exhaust_unchecked.scala24
-rw-r--r--test/files/pos/z1730.flags1
-rw-r--r--test/files/pos/z1730.scala13
-rw-r--r--test/files/run/inline-ex-handlers.check109
-rw-r--r--test/files/run/origins.check2
-rw-r--r--test/files/run/origins.scala2
-rw-r--r--test/files/run/t3097.check1
-rw-r--r--test/files/run/t3097.scala18
-rw-r--r--test/files/run/t3761-overload-byname.check12
-rw-r--r--test/files/run/t3761-overload-byname.scala39
-rw-r--r--test/files/run/t5428.check1
-rw-r--r--test/files/run/t5428.scala29
-rw-r--r--test/files/run/t5804.check4
-rw-r--r--test/files/run/t5804.scala32
-rw-r--r--test/pending/neg/t5008.scala165
-rw-r--r--test/pending/pos/no-widen-locals.scala (renamed from test/files/pos/no-widen-locals.scala)0
-rw-r--r--test/pending/pos/t4649.flags1
-rw-r--r--test/pending/pos/t4649.scala6
-rw-r--r--test/pending/run/partial-anyref-spec.check13
-rw-r--r--test/pending/run/partial-anyref-spec.scala31
203 files changed, 3194 insertions, 789 deletions
diff --git a/build.xml b/build.xml
index b28e6141c6..35cc4f3004 100644
--- a/build.xml
+++ b/build.xml
@@ -182,6 +182,7 @@ PROPERTIES
<!-- Sets location of build folders -->
<property name="build.dir" value="${basedir}/build"/>
+ <property name="build-asm.dir" value="${build.dir}/asm"/>
<property name="build-locker.dir" value="${build.dir}/locker"/>
<property name="build-palo.dir" value="${build.dir}/palo"/>
<property name="build-quick.dir" value="${build.dir}/quick"/>
@@ -413,10 +414,43 @@ INITIALISATION
</target>
<!-- ===========================================================================
+LOCAL Adapted ASM
+============================================================================ -->
+
+ <target name="asm.start" depends="init">
+ <condition property="asm.available">
+ <available file="${build-asm.dir}/asm.complete"/>
+ </condition>
+ </target>
+
+ <target name="asm.lib" depends="asm.start" unless="asm.available">
+ <stopwatch name="asm.lib.timer"/>
+ <mkdir dir="${build-asm.dir}/classes/"/>
+ <javac
+ srcdir="${src.dir}/asm"
+ destdir="${build-asm.dir}/classes"
+ classpath="${build-asm.dir}/classes"
+ includes="**/*.java"
+ target="1.5" source="1.5">
+ <compilerarg line="${javac.args} -XDignore.symbol.file"/>
+ </javac>
+ <touch file="${build-asm.dir}/asm.complete" verbose="no"/>
+ <stopwatch name="asm.lib.timer" action="total"/>
+ </target>
+
+
+ <target name="asm.done" depends="asm.lib">
+ <path id="asm.classpath">
+ <pathelement location="${build-asm.dir}/classes/"/>
+ </path>
+ </target>
+
+
+<!-- ===========================================================================
LOCAL REFERENCE BUILD (LOCKER)
============================================================================ -->
- <target name="locker.start" depends="init">
+ <target name="locker.start" depends="asm.done">
<condition property="locker.available">
<available file="${build-locker.dir}/all.complete"/>
</condition>
@@ -482,20 +516,6 @@ LOCAL REFERENCE BUILD (LOCKER)
<target name="locker.comp" depends="locker.pre-comp" if="locker.comp.needed">
<stopwatch name="locker.comp.timer"/>
<mkdir dir="${build-locker.dir}/classes/compiler"/>
- <!-- TODO If we have cross dependencies, move this below scalacfork... -->
- <javac
- srcdir="${src.dir}/compiler"
- destdir="${build-locker.dir}/classes/compiler"
- includes="**/*.java"
- target="1.5" source="1.5">
- <compilerarg line="${javac.args}"/>
- <classpath>
- <pathelement location="${build-locker.dir}/classes/library"/>
- <pathelement location="${build-locker.dir}/classes/compiler"/>
- <path refid="aux.libs"/>
- <pathelement location="${jline.jar}"/>
- </classpath>
- </javac>
<scalacfork
destdir="${build-locker.dir}/classes/compiler"
compilerpathref="starr.classpath"
@@ -507,6 +527,7 @@ LOCAL REFERENCE BUILD (LOCKER)
<pathelement location="${build-locker.dir}/classes/library"/>
<pathelement location="${build-locker.dir}/classes/compiler"/>
<path refid="aux.libs"/>
+ <path refid="asm.classpath"/>
<pathelement location="${jline.jar}"/>
</compilationpath>
</scalacfork>
@@ -538,11 +559,13 @@ LOCAL REFERENCE BUILD (LOCKER)
<path id="locker.classpath">
<pathelement location="${build-locker.dir}/classes/library"/>
<pathelement location="${build-locker.dir}/classes/compiler"/>
+ <path refid="asm.classpath"/>
<path refid="aux.libs"/>
</path>
<path id="quick.classpath">
<pathelement location="${build-quick.dir}/classes/library"/>
<pathelement location="${build-quick.dir}/classes/compiler"/>
+ <path refid="asm.classpath"/>
<path refid="aux.libs"/>
</path>
</target>
@@ -610,6 +633,7 @@ PACKED LOCKER BUILD (PALO)
<mkdir dir="${build-palo.dir}/lib"/>
<jar destfile="${build-palo.dir}/lib/scala-compiler.jar" manifest="${basedir}/META-INF/MANIFEST.MF">
<fileset dir="${build-locker.dir}/classes/compiler"/>
+ <fileset dir="${build-asm.dir}/classes/"/>
<!-- filemode / dirmode: see trac ticket #1294 -->
<zipfileset dirmode="755" filemode="644" src="${lib.dir}/fjbg.jar"/>
<zipfileset dirmode="755" filemode="644" src="${lib.dir}/msil.jar"/>
@@ -682,6 +706,7 @@ QUICK BUILD (QUICK)
<include name="continuations/**"/>
<include name="swing/**"/>
<include name="actors/**"/>
+ <include name="actors-migration/**"/>
</srcfiles>
</uptodate>
</target>
@@ -780,20 +805,6 @@ QUICK BUILD (QUICK)
<target name="quick.comp" depends="quick.pre-comp" unless="quick.comp.available">
<stopwatch name="quick.comp.timer"/>
<mkdir dir="${build-quick.dir}/classes/compiler"/>
- <!-- TODO If we have cross dependencies, move this below scalacfork... -->
- <javac
- srcdir="${src.dir}/compiler"
- destdir="${build-quick.dir}/classes/compiler"
- includes="**/*.java"
- target="1.5" source="1.5">
- <compilerarg line="${javac.args}"/>
- <classpath>
- <pathelement location="${build-quick.dir}/classes/library"/>
- <pathelement location="${build-quick.dir}/classes/compiler"/>
- <path refid="aux.libs"/>
- <pathelement location="${jline.jar}"/>
- </classpath>
- </javac>
<scalacfork
destdir="${build-quick.dir}/classes/compiler"
compilerpathref="locker.classpath"
@@ -805,6 +816,7 @@ QUICK BUILD (QUICK)
<pathelement location="${build-quick.dir}/classes/library"/>
<pathelement location="${build-quick.dir}/classes/compiler"/>
<path refid="aux.libs"/>
+ <path refid="asm.classpath"/>
<pathelement location="${jline.jar}"/>
</compilationpath>
</scalacfork>
@@ -879,7 +891,32 @@ QUICK BUILD (QUICK)
<stopwatch name="quick.plugins.timer" action="total"/>
</target>
- <target name="quick.pre-scalacheck" depends="quick.plugins">
+ <target name="quick.pre-actors-migration" depends="quick.plugins">
+ <uptodate property="quick.actors-migration.available" targetfile="${build-quick.dir}/actors-migration.complete">
+ <srcfiles dir="${src.dir}/actors-migration"/>
+ </uptodate>
+ </target>
+
+ <target name="quick.actors-migration" depends="quick.pre-actors-migration" unless="quick.actors-migration.available">
+ <stopwatch name="quick.actors-migration.timer"/>
+ <mkdir dir="${build-quick.dir}/classes/actors-migration"/>
+ <scalacfork
+ destdir="${build-quick.dir}/classes/actors-migration"
+ compilerpathref="quick.classpath"
+ params="${scalac.args.quick}"
+ srcdir="${src.dir}/actors-migration"
+ jvmargs="${scalacfork.jvmargs}">
+ <include name="**/*.scala"/>
+ <compilationpath>
+ <pathelement location="${build-quick.dir}/classes/library"/>
+ <pathelement location="${build-quick.dir}/classes/actors-migration"/>
+ </compilationpath>
+ </scalacfork>
+ <touch file="${build-quick.dir}/actors-migration.complete" verbose="no"/>
+ <stopwatch name="quick.actors-migration.timer" action="total"/>
+ </target>
+
+ <target name="quick.pre-scalacheck" depends="quick.actors-migration">
<uptodate property="quick.scalacheck.available" targetfile="${build-quick.dir}/scalacheck.complete">
<srcfiles dir="${src.dir}/scalacheck"/>
</uptodate>
@@ -1038,6 +1075,7 @@ QUICK BUILD (QUICK)
<path id="quick.classpath">
<pathelement location="${build-quick.dir}/classes/library"/>
<pathelement location="${build-quick.dir}/classes/compiler"/>
+ <path refid="asm.classpath"/>
<path refid="aux.libs"/>
</path>
</target>
@@ -1083,6 +1121,9 @@ PACKED QUICK BUILD (PACK)
<include name="scala/actors/**"/>
</fileset>
</jar>
+ <jar destfile="${build-pack.dir}/lib/scala-actors-migration.jar">
+ <fileset dir="${build-quick.dir}/classes/actors-migration"/>
+ </jar>
</target>
<target name="pack.pre-comp" depends="pack.lib">
@@ -1101,6 +1142,7 @@ PACKED QUICK BUILD (PACK)
<mkdir dir="${build-pack.dir}/lib"/>
<jar destfile="${build-pack.dir}/lib/scala-compiler.jar" manifest="${build-pack.dir}/META-INF/MANIFEST.MF">
<fileset dir="${build-quick.dir}/classes/compiler"/>
+ <fileset dir="${build-asm.dir}/classes"/>
<!-- filemode / dirmode: see trac ticket #1294 -->
<zipfileset dirmode="755" filemode="644" src="${fjbg.jar}"/>
<zipfileset dirmode="755" filemode="644" src="${msil.jar}"/>
@@ -1213,6 +1255,7 @@ PACKED QUICK BUILD (PACK)
<pathelement location="${build-pack.dir}/lib/scala-partest.jar"/>
<pathelement location="${build-pack.dir}/lib/scalap.jar"/>
<pathelement location="${build-pack.dir}/lib/scala-actors.jar"/>
+ <pathelement location="${build-pack.dir}/lib/scala-actors-migration.jar"/>
<pathelement location="${ant.jar}"/>
<pathelement location="${jline.jar}"/>
<path refid="lib.extra"/>
@@ -1316,19 +1359,6 @@ BOOTSTRAPPING BUILD (STRAP)
<stopwatch name="strap.comp.timer"/>
<mkdir dir="${build-strap.dir}/classes/compiler"/>
<!-- TODO If we have cross dependencies, move this below scalacfork... -->
- <javac
- srcdir="${src.dir}/compiler"
- destdir="${build-strap.dir}/classes/compiler"
- includes="**/*.java"
- target="1.5" source="1.5">
- <compilerarg line="${javac.args}"/>
- <classpath>
- <pathelement location="${build-strap.dir}/classes/library"/>
- <pathelement location="${build-strap.dir}/classes/compiler"/>
- <path refid="aux.libs"/>
- <pathelement location="${jline.jar}"/>
- </classpath>
- </javac>
<scalacfork
destdir="${build-strap.dir}/classes/compiler"
compilerpathref="pack.classpath"
@@ -1340,6 +1370,7 @@ BOOTSTRAPPING BUILD (STRAP)
<pathelement location="${build-strap.dir}/classes/library"/>
<pathelement location="${build-strap.dir}/classes/compiler"/>
<path refid="aux.libs"/>
+ <path refid="asm.classpath"/>
<pathelement location="${jline.jar}"/>
</compilationpath>
</scalacfork>
@@ -1685,6 +1716,7 @@ DOCUMENTATION
<include name="library/**"/>
<include name="swing/**"/>
<include name="actors/**"/>
+ <include name="actors-migration/**"/>
</source-includes>
</doc-uptodate-check>
</target>
@@ -1705,6 +1737,7 @@ DOCUMENTATION
docRootContent="${src.dir}/library/rootdoc.txt"
implicits="on" diagrams="on">
<src>
+ <files includes="${src.dir}/actors-migration"/>
<files includes="${src.dir}/actors"/>
<files includes="${src.dir}/library/scala"/>
<files includes="${src.dir}/swing"/>
@@ -1882,9 +1915,31 @@ DOCUMENTATION
<stopwatch name="docs.continuations-plugin.timer" action="total"/>
</target>
+ <target name="docs.pre-actors-migration" depends="docs.continuations-plugin">
+ <doc-uptodate-check name="actors-migration" srcdir="${src.dir}/actors-migration" />
+ </target>
+
+ <target name="docs.actors-migration" depends="docs.pre-actors-migration" unless="docs.actors-migration.available">
+ <stopwatch name="docs.actors-migration.timer"/>
+ <mkdir dir="${build-docs.dir}/actors-migration"/>
+ <scaladoc
+ destdir="${build-docs.dir}/actors-migration"
+ doctitle="Actors Migration Kit"
+ docversion="${version.number}"
+ sourcepath="${src.dir}"
+ classpathref="pack.classpath"
+ srcdir="${src.dir}/actors-migration"
+ addparams="${scalac.args.all}"
+ implicits="on" diagrams="on">
+ <include name="**/*.scala"/>
+ </scaladoc>
+ <touch file="${build-docs.dir}/actors-migration.complete" verbose="no"/>
+ <stopwatch name="docs.actors-migration.timer" action="total"/>
+ </target>
+
<target name="docs.done" depends="docs.man"/>
- <target name="docs.all" depends="docs.jline, docs.comp, docs.man, docs.lib, docs.scalap, docs.partest, docs.continuations-plugin"/>
+ <target name="docs.all" depends="docs.jline, docs.comp, docs.man, docs.lib, docs.scalap, docs.partest, docs.continuations-plugin, docs.actors-migration"/>
<target name="docs.clean">
<delete dir="${build-docs.dir}" includeemptydirs="yes" quiet="yes" failonerror="no"/>
@@ -1901,6 +1956,7 @@ BOOTRAPING TEST AND TEST SUITE
<exclude name="bin/**"/>
<exclude name="*.complete"/>
<exclude name="misc/scala-devel/plugins/*.jar"/>
+ <exclude name="classes/actors-migration/**"/>
</same>
</target>
@@ -2079,6 +2135,9 @@ DISTRIBUTION
<jar destfile="${dist.dir}/src/scala-actors-src.jar">
<fileset dir="${src.dir}/actors"/>
</jar>
+ <jar destfile="${dist.dir}/src/scala-actors-migration-src.jar">
+ <fileset dir="${src.dir}/actors-migration"/>
+ </jar>
<jar destfile="${dist.dir}/src/scalap-src.jar">
<fileset dir="${src.dir}/scalap"/>
</jar>
@@ -2259,6 +2318,12 @@ POSITIONS
<param name="test.srcs" value="${src.dir}/actors"/>
</antcall>
<antcall target="test.positions.sub" inheritRefs="true">
+ <param name="test.srcs" value="${src.dir}/actors-migration"/>
+ </antcall>
+ <antcall target="test.positions.sub" inheritRefs="true">
+ <param name="test.srcs" value="${src.dir}/dbc"/>
+ </antcall>
+ <antcall target="test.positions.sub" inheritRefs="true">
<param name="test.srcs" value="${src.dir}/swing"/>
</antcall>
<antcall target="test.positions.sub" inheritRefs="true">
diff --git a/project/Build.scala b/project/Build.scala
index ea0e84f0cc..610f756a34 100644
--- a/project/Build.scala
+++ b/project/Build.scala
@@ -2,25 +2,11 @@ import sbt._
import Keys._
import partest._
import SameTest._
+import ScalaBuildKeys._
+
+
object ScalaBuild extends Build with Layers {
- // New tasks/settings specific to the scala build.
- lazy val lockerLock: TaskKey[Unit] = TaskKey("locker-lock",
- "Locks the locker layer of the compiler build such that it won't rebuild on changed source files.")
- lazy val lockerUnlock: TaskKey[Unit] = TaskKey("locker-unlock",
- "Unlocks the locker layer of the compiler so that it will be recompiled on changed source files.")
- lazy val lockFile: SettingKey[File] = SettingKey("lock-file",
- "Location of the lock file compiling this project.")
- // New tasks/settings specific to the scala build.
- lazy val lock: TaskKey[Unit] = TaskKey("lock", "Locks this project so it won't be recompiled.")
- lazy val unlock: TaskKey[Unit] = TaskKey("unlock", "Unlocks this project so it will be recompiled.")
- lazy val makeDist: TaskKey[File] = TaskKey("make-dist",
- "Creates a mini-distribution (scala home directory) for this build in a zip file.")
- lazy val makeExplodedDist: TaskKey[File] = TaskKey("make-exploded-dist",
- "Creates a mini-distribution (scala home directory) for this build in a directory.")
- lazy val makeDistMappings: TaskKey[Map[File, String]] = TaskKey("make-dist-mappings",
- "Creates distribution mappings for creating zips,jars,directorys,etc.")
- lazy val buildFixed = AttributeKey[Boolean]("build-uri-fixed")
// Build wide settings:
override lazy val settings = super.settings ++ Versions.settings ++ Seq(
@@ -35,22 +21,7 @@ object ScalaBuild extends Build with Layers {
),
organization := "org.scala-lang",
version <<= Versions.mavenVersion,
- pomExtra := <xml:group>
- <inceptionYear>2002</inceptionYear>
- <licenses>
- <license>
- <name>BSD-like</name>
- <url>http://www.scala-lang.org/downloads/license.html</url>
- </license>
- </licenses>
- <scm>
- <connection>scm:git:git://github.com/scala/scala.git</connection>
- </scm>
- <issueManagement>
- <system>jira</system>
- <url>http://issues.scala-lang.org</url>
- </issueManagement>
- </xml:group>,
+ pomExtra := epflPomExtra,
commands += Command.command("fix-uri-projects") { (state: State) =>
if(state.get(buildFixed) getOrElse false) state
else {
@@ -154,24 +125,26 @@ object ScalaBuild extends Build with Layers {
// These are setting overrides for most artifacts in the Scala build file.
def settingOverrides: Seq[Setting[_]] = publishSettings ++ Seq(
- crossPaths := false,
- autoScalaLibrary := false,
- publishArtifact in packageDoc := false,
- publishArtifact in packageSrc := false,
- target <<= (baseDirectory, name) apply (_ / "target" / _),
- (classDirectory in Compile) <<= target(_ / "classes"),
- javacOptions ++= Seq("-target", "1.5", "-source", "1.5"),
- scalaSource in Compile <<= (baseDirectory, name) apply (_ / "src" / _),
- javaSource in Compile <<= (baseDirectory, name) apply (_ / "src" / _),
- autoScalaLibrary := false,
- unmanagedJars in Compile := Seq(),
- // Most libs in the compiler use this order to build.
- compileOrder in Compile := CompileOrder.JavaThenScala,
- lockFile <<= target(_ / "compile.lock"),
- skip in Compile <<= lockFile.map(_ exists),
- lock <<= lockFile map { f => IO.touch(f) },
- unlock <<= lockFile map IO.delete
- )
+ crossPaths := false,
+ autoScalaLibrary := false,
+ // Work around a bug where scala-library (and forkjoin) is put on classpath for analysis.
+ classpathOptions := ClasspathOptions.manual,
+ publishArtifact in packageDoc := false,
+ publishArtifact in packageSrc := false,
+ target <<= (baseDirectory, name) apply (_ / "target" / _),
+ (classDirectory in Compile) <<= target(_ / "classes"),
+ javacOptions ++= Seq("-target", "1.5", "-source", "1.5"),
+ scalaSource in Compile <<= (baseDirectory, name) apply (_ / "src" / _),
+ javaSource in Compile <<= (baseDirectory, name) apply (_ / "src" / _),
+ autoScalaLibrary := false,
+ unmanagedJars in Compile := Seq(),
+ // Most libs in the compiler use this order to build.
+ compileOrder in Compile := CompileOrder.JavaThenScala,
+ lockFile <<= target(_ / "compile.lock"),
+ skip in Compile <<= lockFile map (_.exists),
+ lock <<= lockFile map (f => IO.touch(f)),
+ unlock <<= lockFile map IO.delete
+ )
// --------------------------------------------------------------
// Libraries used by Scalac that change infrequently
@@ -182,6 +155,8 @@ object ScalaBuild extends Build with Layers {
lazy val jline = Project("jline", file("src/jline"))
// Fast Java Bytecode Generator (nested in every scala-compiler.jar)
lazy val fjbg = Project("fjbg", file(".")) settings(settingOverrides : _*)
+ // Our wrapped version of msil.
+ lazy val asm = Project("asm", file(".")) settings(settingOverrides : _*)
// Forkjoin backport
lazy val forkjoin = Project("forkjoin", file(".")) settings(settingOverrides : _*)
@@ -202,15 +177,15 @@ object ScalaBuild extends Build with Layers {
}
// Locker is a lockable Scala compiler that can be built of 'current' source to perform rapid development.
- lazy val (lockerLib, lockerComp) = makeLayer("locker", STARR)
+ lazy val (lockerLib, lockerComp) = makeLayer("locker", STARR, autoLock = true)
lazy val locker = Project("locker", file(".")) aggregate(lockerLib, lockerComp)
// Quick is the general purpose project layer for the Scala compiler.
- lazy val (quickLib, quickComp) = makeLayer("quick", makeScalaReference("locker", lockerLib, lockerComp, fjbg))
+ lazy val (quickLib, quickComp) = makeLayer("quick", makeScalaReference("locker", lockerLib, lockerComp))
lazy val quick = Project("quick", file(".")) aggregate(quickLib, quickComp)
// Reference to quick scala instance.
- lazy val quickScalaInstance = makeScalaReference("quick", quickLib, quickComp, fjbg)
+ lazy val quickScalaInstance = makeScalaReference("quick", quickLib, quickComp)
def quickScalaLibraryDependency = unmanagedClasspath in Compile <++= (exportedProducts in quickLib in Compile).identity
def quickScalaCompilerDependency = unmanagedClasspath in Compile <++= (exportedProducts in quickComp in Compile).identity
@@ -310,7 +285,7 @@ object ScalaBuild extends Build with Layers {
// --------------------------------------------------------------
// Real Compiler Artifact
// --------------------------------------------------------------
- lazy val packageScalaBinTask = Seq(quickComp, fjbg).map(p => products in p in Compile).join.map(_.flatten).map(productTaskToMapping)
+ lazy val packageScalaBinTask = Seq(quickComp, fjbg, asm).map(p => products in p in Compile).join.map(_.flatten).map(productTaskToMapping)
lazy val scalaBinArtifactSettings : Seq[Setting[_]] = inConfig(Compile)(Defaults.packageTasks(packageBin, packageScalaBinTask)) ++ Seq(
name := "scala-compiler",
crossPaths := false,
@@ -322,7 +297,7 @@ object ScalaBuild extends Build with Layers {
target <<= (baseDirectory, name) apply (_ / "target" / _)
)
lazy val scalaCompiler = Project("scala-compiler", file(".")) settings(publishSettings:_*) settings(scalaBinArtifactSettings:_*) dependsOn(scalaLibrary)
- lazy val fullQuickScalaReference = makeScalaReference("pack", scalaLibrary, scalaCompiler, fjbg)
+ lazy val fullQuickScalaReference = makeScalaReference("pack", scalaLibrary, scalaCompiler)
// --------------------------------------------------------------
// Testing
@@ -366,7 +341,7 @@ object ScalaBuild extends Build with Layers {
// TODO - Migrate this into the dist project.
// Scaladocs
- def distScalaInstance = makeScalaReference("dist", scalaLibrary, scalaCompiler, fjbg)
+ def distScalaInstance = makeScalaReference("dist", scalaLibrary, scalaCompiler)
lazy val documentationSettings: Seq[Setting[_]] = dependentProjectSettings ++ Seq(
// TODO - Make these work for realz.
defaultExcludes in unmanagedSources in Compile := ((".*" - ".") || HiddenFileFilter ||
diff --git a/project/Layers.scala b/project/Layers.scala
index 3d6d780f8f..6d0c68f2a4 100644
--- a/project/Layers.scala
+++ b/project/Layers.scala
@@ -1,6 +1,7 @@
import sbt._
import Keys._
import com.jsuereth.git.GitKeys.gitRunner
+import ScalaBuildKeys.lock
/** This trait stores all the helper methods to generate layers in Scala's layered build. */
trait Layers extends Build {
@@ -14,6 +15,8 @@ trait Layers extends Build {
def forkjoin: Project
/** Reference to Fast-Java-Bytecode-Generator library */
def fjbg: Project
+ /** Reference to the ASM wrapped project. */
+ def asm: Project
/** A setting that adds some external dependencies. */
def externalDeps: Setting[_]
/** The root project. */
@@ -22,14 +25,15 @@ trait Layers extends Build {
/** Creates a reference Scala version that can be used to build other projects. This takes in the raw
* library, compiler and fjbg libraries as well as a string representing the layer name (used for compiling the compile-interface).
*/
- def makeScalaReference(layer : String, library: Project, compiler: Project, fjbg: Project) =
+ def makeScalaReference(layer: String, library: Project, compiler: Project) =
scalaInstance <<= (appConfiguration in library,
version in library,
(exportedProducts in library in Compile),
(exportedProducts in compiler in Compile),
(exportedProducts in fjbg in Compile),
- (fullClasspath in jline in Runtime)) map {
- (app, version: String, lib: Classpath, comp: Classpath, fjbg: Classpath, jline: Classpath) =>
+ (fullClasspath in jline in Runtime),
+ (exportedProducts in asm in Runtime)) map {
+ (app, version: String, lib: Classpath, comp: Classpath, fjbg: Classpath, jline: Classpath, asm: Classpath) =>
val launcher = app.provider.scalaProvider.launcher
(lib,comp) match {
case (Seq(libraryJar), Seq(compilerJar)) =>
@@ -38,7 +42,7 @@ trait Layers extends Build {
libraryJar.data,
compilerJar.data,
launcher,
- ((fjbg.files++jline.files):_*))
+ ((fjbg.files++jline.files ++ asm.files):_*))
case _ => error("Cannot build a ScalaReference with more than one classpath element")
}
}
@@ -47,8 +51,18 @@ trait Layers extends Build {
* Returns the library project and compiler project from the next layer.
* Note: The library and compiler are not *complete* in the sense that they are missing things like "actors" and "fjbg".
*/
- def makeLayer(layer: String, referenceScala: Setting[Task[ScalaInstance]]) : (Project, Project) = {
- val library = Project(layer + "-library", file(".")) settings(settingOverrides: _*) settings(
+ def makeLayer(layer: String, referenceScala: Setting[Task[ScalaInstance]], autoLock: Boolean = false) : (Project, Project) = {
+ val autoLockSettings: Seq[Setting[_]] =
+ if(autoLock) Seq(compile in Compile <<= (compile in Compile, lock) apply { (c, l) =>
+ c flatMapR { cResult =>
+ val result = Result.tryValue(cResult)
+ l mapR { tx => result }
+ }
+ })
+ else Seq.empty
+
+
+ val library = Project(layer + "-library", file(".")) settings(settingOverrides: _*) settings(autoLockSettings:_*) settings(
version := layer,
// TODO - use depends on.
unmanagedClasspath in Compile <<= (exportedProducts in forkjoin in Compile).identity,
@@ -58,13 +72,12 @@ trait Layers extends Build {
defaultExcludes in unmanagedResources := ("*.scala" | "*.java" | "*.disabled"),
// TODO - Allow other scalac option settings.
scalacOptions in Compile <++= (scalaSource in Compile) map (src => Seq("-sourcepath", src.getAbsolutePath)),
- classpathOptions := ClasspathOptions.manual,
resourceGenerators in Compile <+= (resourceManaged, Versions.scalaVersions, skip in Compile, streams) map Versions.generateVersionPropertiesFile("library.properties"),
referenceScala
)
// Define the compiler
- val compiler = Project(layer + "-compiler", file(".")) settings(settingOverrides:_*) settings(
+ val compiler = Project(layer + "-compiler", file(".")) settings(settingOverrides:_*) settings(autoLockSettings:_*) settings(
version := layer,
scalaSource in Compile <<= (baseDirectory) apply (_ / "src" / "compiler"),
resourceDirectory in Compile <<= baseDirectory apply (_ / "src" / "compiler"),
@@ -80,8 +93,7 @@ trait Layers extends Build {
dirs.descendentsExcept( ("*.xml" | "*.html" | "*.gif" | "*.png" | "*.js" | "*.css" | "*.tmpl" | "*.swf" | "*.properties" | "*.txt"),"*.scala").get
},
// TODO - Use depends on *and* SBT's magic dependency mechanisms...
- unmanagedClasspath in Compile <<= Seq(forkjoin, library, fjbg, jline).map(exportedProducts in Compile in _).join.map(_.flatten),
- classpathOptions := ClasspathOptions.manual,
+ unmanagedClasspath in Compile <<= Seq(forkjoin, library, fjbg, jline, asm).map(exportedProducts in Compile in _).join.map(_.flatten),
externalDeps,
referenceScala
)
diff --git a/src/actors-migration/scala/actors/MigrationSystem.scala b/src/actors-migration/scala/actors/MigrationSystem.scala
new file mode 100644
index 0000000000..ffc93d9c6f
--- /dev/null
+++ b/src/actors-migration/scala/actors/MigrationSystem.scala
@@ -0,0 +1,36 @@
+package scala.actors
+
+import scala.collection._
+
+object MigrationSystem {
+
+ private[actors] val contextStack = new ThreadLocal[immutable.Stack[Boolean]] {
+ override def initialValue() = immutable.Stack[Boolean]()
+ }
+
+ private[this] def withCleanContext(block: => ActorRef): ActorRef = {
+ // push clean marker
+ val old = contextStack.get
+ contextStack.set(old.push(true))
+ try {
+ val instance = block
+
+ if (instance eq null)
+ throw new Exception("Actor instance passed to actorOf can't be 'null'")
+
+ instance
+ } finally {
+ val stackAfter = contextStack.get
+ if (stackAfter.nonEmpty)
+ contextStack.set(if (!stackAfter.head) stackAfter.pop.pop else stackAfter.pop)
+ }
+ }
+
+ def actorOf(props: Props): ActorRef = withCleanContext {
+ val creator = props.creator()
+ val r = new InternalActorRef(creator)
+ creator.start()
+ r
+ }
+
+} \ No newline at end of file
diff --git a/src/actors-migration/scala/actors/Pattern.scala b/src/actors-migration/scala/actors/Pattern.scala
new file mode 100644
index 0000000000..97dbd2cccd
--- /dev/null
+++ b/src/actors-migration/scala/actors/Pattern.scala
@@ -0,0 +1,25 @@
+package scala.actors
+
+import scala.concurrent.util.Duration
+
+object pattern {
+
+ implicit def askSupport(ar: ActorRef): AskableActorRef =
+ new AskableActorRef(ar)
+}
+
+/**
+ * ActorRef with support for ask(?) operation.
+ */
+class AskableActorRef(val ar: ActorRef) extends ActorRef {
+
+ def !(message: Any)(implicit sender: ActorRef = null): Unit = ar.!(message)(sender)
+
+ def ?(message: Any)(timeout: Timeout): Future[Any] = ar.?(message, timeout.duration)
+
+ private[actors] def ?(message: Any, timeout: Duration): Future[Any] = ar.?(message, timeout)
+
+ def forward(message: Any) = ar.forward(message)
+
+ private[actors] def localActor: AbstractActor = ar.localActor
+} \ No newline at end of file
diff --git a/src/actors-migration/scala/actors/Props.scala b/src/actors-migration/scala/actors/Props.scala
new file mode 100644
index 0000000000..891e23213a
--- /dev/null
+++ b/src/actors-migration/scala/actors/Props.scala
@@ -0,0 +1,13 @@
+package scala.actors
+
+/**
+ * ActorRef configuration object. It represents the minimal subset of Akka Props class.
+ */
+case class Props(creator: () ⇒ InternalActor, dispatcher: String) {
+
+ /**
+ * Returns a new Props with the specified creator set
+ */
+ def withCreator(c: ⇒ InternalActor) = copy(creator = () ⇒ c)
+
+}
diff --git a/src/actors-migration/scala/actors/StashingActor.scala b/src/actors-migration/scala/actors/StashingActor.scala
new file mode 100644
index 0000000000..37300f9d63
--- /dev/null
+++ b/src/actors-migration/scala/actors/StashingActor.scala
@@ -0,0 +1,255 @@
+package scala.actors
+
+import scala.collection._
+import scala.concurrent.util.Duration
+import java.util.concurrent.TimeUnit
+
+object StashingActor extends Combinators {
+ implicit def mkBody[A](body: => A) = new InternalActor.Body[A] {
+ def andThen[B](other: => B): Unit = Actor.rawSelf.seq(body, other)
+ }
+}
+
+@deprecated("Scala Actors are being removed from the standard library. Please refer to the migration guide.", "2.10")
+trait StashingActor extends InternalActor {
+ type Receive = PartialFunction[Any, Unit]
+
+ // checks if StashingActor is created within the actorOf block
+ creationCheck;
+
+ private[actors] val ref = new InternalActorRef(this)
+
+ val self: ActorRef = ref
+
+ protected[this] val context: ActorContext = new ActorContext(this)
+
+ @volatile
+ private var myTimeout: Option[Long] = None
+
+ private val stash = new MQueue[Any]("Stash")
+
+ /**
+ * Migration notes:
+ * this method replaces receiveWithin, receive and react methods from Scala Actors.
+ */
+ def receive: Receive
+
+ /**
+ * User overridable callback.
+ * <p/>
+ * Is called when an Actor is started by invoking 'actor'.
+ */
+ def preStart() {}
+
+ /**
+ * User overridable callback.
+ * <p/>
+ * Is called when 'actor.stop()' is invoked.
+ */
+ def postStop() {}
+
+ /**
+ * User overridable callback.
+ * <p/>
+ * Is called on a crashed Actor right BEFORE it is restarted to allow clean
+ * up of resources before Actor is terminated.
+ * By default it calls postStop()
+ */
+ def preRestart(reason: Throwable, message: Option[Any]) { postStop() }
+
+ /**
+ * Changes the Actor's behavior to become the new 'Receive' (PartialFunction[Any, Unit]) handler.
+ * Puts the behavior on top of the hotswap stack.
+ * If "discardOld" is true, an unbecome will be issued prior to pushing the new behavior to the stack
+ */
+ private def become(behavior: Receive, discardOld: Boolean = true) {
+ if (discardOld) unbecome()
+ behaviorStack = behaviorStack.push(wrapWithSystemMessageHandling(behavior))
+ }
+
+ /**
+ * Reverts the Actor behavior to the previous one in the hotswap stack.
+ */
+ private def unbecome() {
+ // never unbecome the initial behavior
+ if (behaviorStack.size > 1)
+ behaviorStack = behaviorStack.pop
+ }
+
+ /**
+ * User overridable callback.
+ * <p/>
+ * Is called when a message isn't handled by the current behavior of the actor
+ * by default it does: EventHandler.warning(self, message)
+ */
+ def unhandled(message: Any) {
+ message match {
+ case Terminated(dead) ⇒ throw new DeathPactException(dead)
+ case _ ⇒ System.err.println("Unhandeled message " + message)
+ }
+ }
+
+ protected def sender: ActorRef = new OutputChannelRef(internalSender)
+
+ override def act(): Unit = internalAct()
+
+ override def start(): StashingActor = {
+ super.start()
+ this
+ }
+
+ override def receive[R](f: PartialFunction[Any, R]): R
+
+ /*
+ * Internal implementation.
+ */
+
+ private[actors] var behaviorStack = immutable.Stack[PartialFunction[Any, Unit]]()
+
+ /*
+ * Checks that StashingActor can be created only by MigrationSystem.actorOf method.
+ */
+ private[this] def creationCheck = {
+
+ // creation check (see ActorRef)
+ val context = MigrationSystem.contextStack.get
+ if (context.isEmpty)
+ throw new RuntimeException("In order to create StashingActor one must use actorOf.")
+ else {
+ if (!context.head)
+ throw new RuntimeException("Only one actor can be created per actorOf call.")
+ else
+ MigrationSystem.contextStack.set(context.push(false))
+ }
+
+ }
+
+ private[actors] override def preAct() {
+ preStart()
+ }
+
+ /**
+ * Adds message to a stash, to be processed later. Stashed messages can be fed back into the $actor's
+ * mailbox using <code>unstashAll()</code>.
+ *
+ * Temporarily stashing away messages that the $actor does not (yet) handle simplifies implementing
+ * certain messaging protocols.
+ */
+ final def stash(msg: Any): Unit = {
+ stash.append(msg, null)
+ }
+
+ final def unstashAll(): Unit = {
+ mailbox.prepend(stash)
+ stash.clear()
+ }
+
+ /**
+ * Wraps any partial function with Exit message handling.
+ */
+ private[actors] def wrapWithSystemMessageHandling(pf: PartialFunction[Any, Unit]): PartialFunction[Any, Unit] = {
+
+ def swapExitHandler(pf: PartialFunction[Any, Unit]) = new PartialFunction[Any, Unit] {
+ def swapExit(v: Any) = v match {
+ case Exit(from, reason) =>
+ Terminated(new InternalActorRef(from.asInstanceOf[InternalActor]))
+ case v => v
+ }
+
+ def isDefinedAt(v: Any) = pf.isDefinedAt(swapExit(v))
+ def apply(v: Any) = pf(swapExit(v))
+ }
+
+ swapExitHandler(pf orElse {
+ case m => unhandled(m)
+ })
+ }
+
+ /**
+ * Method that models the behavior of Akka actors.
+ */
+ private[actors] def internalAct() {
+ trapExit = true
+ behaviorStack = behaviorStack.push(wrapWithSystemMessageHandling(receive))
+ loop {
+ if (myTimeout.isDefined)
+ reactWithin(myTimeout.get)(behaviorStack.top)
+ else
+ react(behaviorStack.top)
+ }
+ }
+
+ private[actors] override def internalPostStop() = postStop()
+
+ // Used for pattern matching statement similar to Akka
+ lazy val ReceiveTimeout = TIMEOUT
+
+ /**
+ * Used to simulate Akka context behavior. Should be used only for migration purposes.
+ */
+ protected[actors] class ActorContext(val actr: StashingActor) {
+
+ /**
+ * Changes the Actor's behavior to become the new 'Receive' (PartialFunction[Any, Unit]) handler.
+ * Puts the behavior on top of the hotswap stack.
+ * If "discardOld" is true, an unbecome will be issued prior to pushing the new behavior to the stack
+ */
+ def become(behavior: Receive, discardOld: Boolean = true) = actr.become(behavior, discardOld)
+
+ /**
+ * Reverts the Actor behavior to the previous one in the hotswap stack.
+ */
+ def unbecome() = actr.unbecome()
+
+ /**
+ * Shuts down the actor its dispatcher and message queue.
+ */
+ def stop(subject: ActorRef): Nothing = if (subject != ref)
+ throw new RuntimeException("Only stoping of self is allowed during migration.")
+ else
+ actr.exit()
+
+ /**
+ * Registers this actor as a Monitor for the provided ActorRef.
+ * @return the provided ActorRef
+ */
+ def watch(subject: ActorRef): ActorRef = {
+ actr.watch(subject)
+ subject
+ }
+
+ /**
+ * Unregisters this actor as Monitor for the provided ActorRef.
+ * @return the provided ActorRef
+ */
+ def unwatch(subject: ActorRef): ActorRef = {
+ actr unwatch subject
+ subject
+ }
+
+ /**
+ * Defines the receiver timeout value.
+ */
+ final def setReceiveTimeout(timeout: Duration): Unit =
+ actr.myTimeout = Some(timeout.toMillis)
+
+ /**
+ * Gets the current receiveTimeout
+ */
+ final def receiveTimeout: Option[Duration] =
+ actr.myTimeout.map(Duration(_, TimeUnit.MILLISECONDS))
+
+ }
+}
+
+/**
+ * This message is thrown by default when an Actor does not handle termination.
+ */
+class DeathPactException(ref: ActorRef = null) extends Exception {
+ override def fillInStackTrace() = this //Don't waste cycles generating stack trace
+}
+
+/**
+ * Message that is sent to a watching actor when the watched actor terminates.
+ */
+case class Terminated(actor: ActorRef)
diff --git a/src/actors-migration/scala/actors/Timeout.scala b/src/actors-migration/scala/actors/Timeout.scala
new file mode 100644
index 0000000000..0d9532a14b
--- /dev/null
+++ b/src/actors-migration/scala/actors/Timeout.scala
@@ -0,0 +1,39 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.actors
+
+import scala.concurrent.util.Duration
+import java.util.concurrent.TimeUnit
+
+case class Timeout(duration: Duration) {
+ def this(timeout: Long) = this(Duration(timeout, TimeUnit.MILLISECONDS))
+ def this(length: Long, unit: TimeUnit) = this(Duration(length, unit))
+}
+
+object Timeout {
+
+ /**
+ * A timeout with zero duration, will cause most requests to always timeout.
+ */
+ val zero = new Timeout(Duration.Zero)
+
+ /**
+ * A Timeout with infinite duration. Will never timeout. Use extreme caution with this
+ * as it may cause memory leaks, blocked threads, or may not even be supported by
+ * the receiver, which would result in an exception.
+ */
+ val never = new Timeout(Duration.Inf)
+
+ def apply(timeout: Long) = new Timeout(timeout)
+ def apply(length: Long, unit: TimeUnit) = new Timeout(length, unit)
+
+ implicit def durationToTimeout(duration: Duration) = new Timeout(duration)
+ implicit def intToTimeout(timeout: Int) = new Timeout(timeout)
+ implicit def longToTimeout(timeout: Long) = new Timeout(timeout)
+}
diff --git a/src/actors/scala/actors/ActorRef.scala b/src/actors/scala/actors/ActorRef.scala
new file mode 100644
index 0000000000..8f70b13e01
--- /dev/null
+++ b/src/actors/scala/actors/ActorRef.scala
@@ -0,0 +1,119 @@
+package scala.actors
+
+import java.util.concurrent.TimeoutException
+import scala.concurrent.util.Duration
+
+/**
+ * Trait used for migration of Scala actors to Akka.
+ */
+@deprecated("ActorRef ought to be used only with the Actor Migration Kit.")
+trait ActorRef {
+
+ /**
+ * Sends a one-way asynchronous message. E.g. fire-and-forget semantics.
+ * <p/>
+ *
+ * If invoked from within an actor then the actor reference is implicitly passed on as the implicit 'sender' argument.
+ * <p/>
+ *
+ * This actor 'sender' reference is then available in the receiving actor in the 'sender' member variable,
+ * if invoked from within an Actor. If not then no sender is available.
+ * <pre>
+ * actor ! message
+ * </pre>
+ * <p/>
+ */
+ def !(message: Any)(implicit sender: ActorRef = null): Unit
+
+ /**
+ * Sends a message asynchronously, returning a future which may eventually hold the reply.
+ */
+ private[actors] def ?(message: Any, timeout: Duration): Future[Any]
+
+ /**
+ * Forwards the message and passes the original sender actor as the sender.
+ * <p/>
+ * Works with '!' and '?'.
+ */
+ def forward(message: Any)
+
+ private[actors] def localActor: AbstractActor
+
+}
+
+private[actors] class OutputChannelRef(val actor: OutputChannel[Any]) extends ActorRef {
+
+ override private[actors] def ?(message: Any, timeout: Duration): Future[Any] =
+ throw new UnsupportedOperationException("Output channel does not support ?")
+
+ /**
+ * Sends a one-way asynchronous message. E.g. fire-and-forget semantics.
+ * <p/>
+ *
+ * <p/>
+ * <pre>
+ * actor ! message
+ * </pre>
+ * <p/>
+ */
+ def !(message: Any)(implicit sender: ActorRef = null): Unit =
+ if (sender != null)
+ actor.send(message, sender.localActor)
+ else
+ actor ! message
+
+ override def equals(that: Any) =
+ that.isInstanceOf[OutputChannelRef] && that.asInstanceOf[OutputChannelRef].actor == this.actor
+
+ private[actors] override def localActor: AbstractActor =
+ throw new UnsupportedOperationException("Output channel does not have an instance of the actor")
+
+ def forward(message: Any): Unit = throw new UnsupportedOperationException("OutputChannel does not support forward.")
+
+}
+
+private[actors] class ReactorRef(override val actor: Reactor[Any]) extends OutputChannelRef(actor) {
+
+ /**
+ * Forwards the message and passes the original sender actor as the sender.
+ * <p/>
+ * Works with '!' and '?'.
+ */
+ override def forward(message: Any) = actor.forward(message)
+
+}
+
+private[actors] final class InternalActorRef(override val actor: InternalActor) extends ReactorRef(actor) {
+
+ /**
+ * Sends a message asynchronously, returning a future which may eventually hold the reply.
+ */
+ override private[actors] def ?(message: Any, timeout: Duration): Future[Any] =
+ Futures.future {
+ val dur = if (timeout.isFinite()) timeout.toMillis else (java.lang.Long.MAX_VALUE >> 2)
+ actor !? (dur, message) match {
+ case Some(x) => x
+ case None => new AskTimeoutException("? operation timed out.")
+ }
+ }
+
+ override def !(message: Any)(implicit sender: ActorRef = null): Unit =
+ if (message == PoisonPill)
+ actor.stop('normal)
+ else if (sender != null)
+ actor.send(message, sender.localActor)
+ else
+ actor ! message
+
+ private[actors] override def localActor: InternalActor = this.actor
+}
+
+/**
+ * This is what is used to complete a Future that is returned from an ask/? call,
+ * when it times out.
+ */
+class AskTimeoutException(message: String, cause: Throwable) extends TimeoutException {
+ def this(message: String) = this(message, null: Throwable)
+}
+
+object PoisonPill
diff --git a/src/actors/scala/actors/ActorTask.scala b/src/actors/scala/actors/ActorTask.scala
index bb04302238..045b00f5f2 100644
--- a/src/actors/scala/actors/ActorTask.scala
+++ b/src/actors/scala/actors/ActorTask.scala
@@ -51,7 +51,6 @@ private[actors] class ActorTask(actor: InternalActor,
super.terminateExecution(e)
() => {}
}
- actor.internalPostStop
res
}
diff --git a/src/actors/scala/actors/InternalActor.scala b/src/actors/scala/actors/InternalActor.scala
index c94da5b9fd..cb66021d1c 100644
--- a/src/actors/scala/actors/InternalActor.scala
+++ b/src/actors/scala/actors/InternalActor.scala
@@ -153,7 +153,7 @@ private[actors] trait InternalActor extends AbstractActor with InternalReplyReac
val matches = f.isDefinedAt(m)
senders = senders.tail
matches
- })
+ })
if (null eq qel) {
val todo = synchronized {
// in mean time new stuff might have arrived
@@ -317,6 +317,35 @@ private[actors] trait InternalActor extends AbstractActor with InternalReplyReac
}
/**
+ * Links <code>self</code> to actor <code>to</code>.
+ *
+ * @param to the actor to link to
+ * @return the parameter actor
+ */
+ def link(to: ActorRef): ActorRef = {
+ this.link(to.localActor)
+ to
+ }
+
+ /**
+ * Unidirectional linking. For migration purposes only
+ */
+ private[actors] def watch(subject: ActorRef): ActorRef = {
+ assert(Actor.self(scheduler) == this, "link called on actor different from self")
+ subject.localActor linkTo this
+ subject
+ }
+
+ /**
+ * Unidirectional linking. For migration purposes only
+ */
+ private[actors] def unwatch(subject: ActorRef): ActorRef = {
+ assert(Actor.self(scheduler) == this, "link called on actor different from self")
+ subject.localActor unlinkFrom this
+ subject
+ }
+
+ /**
* Links <code>self</code> to the actor defined by <code>body</code>.
*
* @param body the body of the actor to link to
@@ -346,17 +375,24 @@ private[actors] trait InternalActor extends AbstractActor with InternalReplyReac
from unlinkFrom this
}
+ /**
+ * Unlinks <code>self</code> from actor <code>from</code>.
+ */
+ def unlink(from: ActorRef) {
+ unlink(from.localActor)
+ }
+
private[actors] def unlinkFrom(from: AbstractActor) = synchronized {
links = links.filterNot(from.==)
}
- @volatile
+ @volatile
private[actors] var _trapExit = false
-
+
def trapExit = _trapExit
-
+
def trapExit_=(value: Boolean) = _trapExit = value
-
+
// guarded by this
private var exitReason: AnyRef = 'normal
// guarded by this
@@ -445,12 +481,11 @@ private[actors] trait InternalActor extends AbstractActor with InternalReplyReac
scheduler.onTerminate(this) { f }
}
- private[actors] def internalPostStop() = {}
- private[actors] def stop(reason: AnyRef): Unit = {
+ private[actors] def stop(reason: AnyRef): Unit = {
synchronized {
shouldExit = true
- exitReason = reason
+ exitReason = reason
// resume this Actor in a way that
// causes it to exit
// (because shouldExit == true)
@@ -464,7 +499,7 @@ private[actors] trait InternalActor extends AbstractActor with InternalReplyReac
/* Here we should not throw a SuspendActorControl,
since the current method is called from an actor that
is in the process of exiting.
-
+
Therefore, the contract for scheduleActor is that
it never throws a SuspendActorControl.
*/
diff --git a/src/actors/scala/actors/MQueue.scala b/src/actors/scala/actors/MQueue.scala
index 65427d68c5..4a148d2cb3 100644
--- a/src/actors/scala/actors/MQueue.scala
+++ b/src/actors/scala/actors/MQueue.scala
@@ -25,6 +25,20 @@ private[actors] class MQueue[Msg >: Null](protected val label: String) {
_size += diff
}
+ def prepend(other: MQueue[Msg]) {
+ if (!other.isEmpty) {
+ other.last.next = first
+ first = other.first
+ }
+ }
+
+ def clear() {
+ first = null
+ last = null
+ _size = 0
+ }
+
+
def append(msg: Msg, session: OutputChannel[Any]) {
changeSize(1) // size always increases by 1
val el = new MQueueElement(msg, session)
diff --git a/src/actors/scala/actors/Reactor.scala b/src/actors/scala/actors/Reactor.scala
index 206a97d97c..7a8d738758 100644
--- a/src/actors/scala/actors/Reactor.scala
+++ b/src/actors/scala/actors/Reactor.scala
@@ -214,11 +214,16 @@ trait Reactor[Msg >: Null] extends OutputChannel[Msg] with Combinators {
scheduler executeFromActor makeReaction(null, handler, msg)
}
+ private[actors] def preAct() = {}
+
// guarded by this
private[actors] def dostart() {
_state = Actor.State.Runnable
scheduler newActor this
- scheduler execute makeReaction(() => act(), null, null)
+ scheduler execute makeReaction(() => {
+ preAct()
+ act()
+ }, null, null)
}
/**
@@ -285,12 +290,15 @@ trait Reactor[Msg >: Null] extends OutputChannel[Msg] with Combinators {
throw Actor.suspendException
}
+ private[actors] def internalPostStop() = {}
+
private[actors] def terminated() {
synchronized {
_state = Actor.State.Terminated
// reset waitingFor, otherwise getState returns Suspended
waitingFor = Reactor.waitingForNone
}
+ internalPostStop()
scheduler.terminated(this)
}
diff --git a/src/actors/scala/actors/ReplyReactor.scala b/src/actors/scala/actors/ReplyReactor.scala
index 0ffbbd3cce..83d7ba0f7f 100644
--- a/src/actors/scala/actors/ReplyReactor.scala
+++ b/src/actors/scala/actors/ReplyReactor.scala
@@ -7,10 +7,7 @@
\* */
package scala.actors
-@deprecated("Scala Actors are beeing removed from the standard library. Please refer to the migration guide.", "2.10")
+@deprecated("Scala Actors are being removed from the standard library. Please refer to the migration guide.", "2.10")
trait ReplyReactor extends InternalReplyReactor {
-
- protected[actors] def sender: OutputChannel[Any] = super.internalSender
-
+ protected[actors] def sender: OutputChannel[Any] = super.internalSender
}
-
diff --git a/src/compiler/scala/tools/asm/AnnotationVisitor.java b/src/asm/scala/tools/asm/AnnotationVisitor.java
index b96e730a73..b96e730a73 100644
--- a/src/compiler/scala/tools/asm/AnnotationVisitor.java
+++ b/src/asm/scala/tools/asm/AnnotationVisitor.java
diff --git a/src/compiler/scala/tools/asm/AnnotationWriter.java b/src/asm/scala/tools/asm/AnnotationWriter.java
index e530780249..e530780249 100644
--- a/src/compiler/scala/tools/asm/AnnotationWriter.java
+++ b/src/asm/scala/tools/asm/AnnotationWriter.java
diff --git a/src/compiler/scala/tools/asm/Attribute.java b/src/asm/scala/tools/asm/Attribute.java
index 408f21ce1e..408f21ce1e 100644
--- a/src/compiler/scala/tools/asm/Attribute.java
+++ b/src/asm/scala/tools/asm/Attribute.java
diff --git a/src/compiler/scala/tools/asm/ByteVector.java b/src/asm/scala/tools/asm/ByteVector.java
index 5081f0184b..5081f0184b 100644
--- a/src/compiler/scala/tools/asm/ByteVector.java
+++ b/src/asm/scala/tools/asm/ByteVector.java
diff --git a/src/compiler/scala/tools/asm/ClassReader.java b/src/asm/scala/tools/asm/ClassReader.java
index f3287d41ae..f3287d41ae 100644
--- a/src/compiler/scala/tools/asm/ClassReader.java
+++ b/src/asm/scala/tools/asm/ClassReader.java
diff --git a/src/compiler/scala/tools/asm/ClassVisitor.java b/src/asm/scala/tools/asm/ClassVisitor.java
index ae38ae0ab9..ae38ae0ab9 100644
--- a/src/compiler/scala/tools/asm/ClassVisitor.java
+++ b/src/asm/scala/tools/asm/ClassVisitor.java
diff --git a/src/compiler/scala/tools/asm/ClassWriter.java b/src/asm/scala/tools/asm/ClassWriter.java
index c7a0736b51..c7a0736b51 100644
--- a/src/compiler/scala/tools/asm/ClassWriter.java
+++ b/src/asm/scala/tools/asm/ClassWriter.java
diff --git a/src/compiler/scala/tools/asm/CustomAttr.java b/src/asm/scala/tools/asm/CustomAttr.java
index 5ecfd283d0..5ecfd283d0 100644
--- a/src/compiler/scala/tools/asm/CustomAttr.java
+++ b/src/asm/scala/tools/asm/CustomAttr.java
diff --git a/src/compiler/scala/tools/asm/Edge.java b/src/asm/scala/tools/asm/Edge.java
index daac1f7bb0..daac1f7bb0 100644
--- a/src/compiler/scala/tools/asm/Edge.java
+++ b/src/asm/scala/tools/asm/Edge.java
diff --git a/src/compiler/scala/tools/asm/FieldVisitor.java b/src/asm/scala/tools/asm/FieldVisitor.java
index 9ac0f6236f..9ac0f6236f 100644
--- a/src/compiler/scala/tools/asm/FieldVisitor.java
+++ b/src/asm/scala/tools/asm/FieldVisitor.java
diff --git a/src/compiler/scala/tools/asm/FieldWriter.java b/src/asm/scala/tools/asm/FieldWriter.java
index 45ef6d0df3..45ef6d0df3 100644
--- a/src/compiler/scala/tools/asm/FieldWriter.java
+++ b/src/asm/scala/tools/asm/FieldWriter.java
diff --git a/src/compiler/scala/tools/asm/Frame.java b/src/asm/scala/tools/asm/Frame.java
index 387b56796d..387b56796d 100644
--- a/src/compiler/scala/tools/asm/Frame.java
+++ b/src/asm/scala/tools/asm/Frame.java
diff --git a/src/compiler/scala/tools/asm/Handle.java b/src/asm/scala/tools/asm/Handle.java
index be8f334192..be8f334192 100644
--- a/src/compiler/scala/tools/asm/Handle.java
+++ b/src/asm/scala/tools/asm/Handle.java
diff --git a/src/compiler/scala/tools/asm/Handler.java b/src/asm/scala/tools/asm/Handler.java
index 9e92bb98be..9e92bb98be 100644
--- a/src/compiler/scala/tools/asm/Handler.java
+++ b/src/asm/scala/tools/asm/Handler.java
diff --git a/src/compiler/scala/tools/asm/Item.java b/src/asm/scala/tools/asm/Item.java
index 021a0b11d3..021a0b11d3 100644
--- a/src/compiler/scala/tools/asm/Item.java
+++ b/src/asm/scala/tools/asm/Item.java
diff --git a/src/compiler/scala/tools/asm/Label.java b/src/asm/scala/tools/asm/Label.java
index 712c7f251f..712c7f251f 100644
--- a/src/compiler/scala/tools/asm/Label.java
+++ b/src/asm/scala/tools/asm/Label.java
diff --git a/src/compiler/scala/tools/asm/MethodVisitor.java b/src/asm/scala/tools/asm/MethodVisitor.java
index a8a859a6a9..a8a859a6a9 100644
--- a/src/compiler/scala/tools/asm/MethodVisitor.java
+++ b/src/asm/scala/tools/asm/MethodVisitor.java
diff --git a/src/compiler/scala/tools/asm/MethodWriter.java b/src/asm/scala/tools/asm/MethodWriter.java
index 321bacb6fc..321bacb6fc 100644
--- a/src/compiler/scala/tools/asm/MethodWriter.java
+++ b/src/asm/scala/tools/asm/MethodWriter.java
diff --git a/src/compiler/scala/tools/asm/Opcodes.java b/src/asm/scala/tools/asm/Opcodes.java
index 809e5ae590..809e5ae590 100644
--- a/src/compiler/scala/tools/asm/Opcodes.java
+++ b/src/asm/scala/tools/asm/Opcodes.java
diff --git a/src/compiler/scala/tools/asm/Type.java b/src/asm/scala/tools/asm/Type.java
index bf1107182a..bf1107182a 100644
--- a/src/compiler/scala/tools/asm/Type.java
+++ b/src/asm/scala/tools/asm/Type.java
diff --git a/src/compiler/scala/tools/asm/signature/SignatureReader.java b/src/asm/scala/tools/asm/signature/SignatureReader.java
index 22e6427e63..22e6427e63 100644
--- a/src/compiler/scala/tools/asm/signature/SignatureReader.java
+++ b/src/asm/scala/tools/asm/signature/SignatureReader.java
diff --git a/src/compiler/scala/tools/asm/signature/SignatureVisitor.java b/src/asm/scala/tools/asm/signature/SignatureVisitor.java
index 2fc364e374..2fc364e374 100644
--- a/src/compiler/scala/tools/asm/signature/SignatureVisitor.java
+++ b/src/asm/scala/tools/asm/signature/SignatureVisitor.java
diff --git a/src/compiler/scala/tools/asm/signature/SignatureWriter.java b/src/asm/scala/tools/asm/signature/SignatureWriter.java
index a59fdfde2b..a59fdfde2b 100644
--- a/src/compiler/scala/tools/asm/signature/SignatureWriter.java
+++ b/src/asm/scala/tools/asm/signature/SignatureWriter.java
diff --git a/src/compiler/scala/tools/asm/tree/AbstractInsnNode.java b/src/asm/scala/tools/asm/tree/AbstractInsnNode.java
index 471f842ffc..471f842ffc 100644
--- a/src/compiler/scala/tools/asm/tree/AbstractInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/AbstractInsnNode.java
diff --git a/src/compiler/scala/tools/asm/tree/AnnotationNode.java b/src/asm/scala/tools/asm/tree/AnnotationNode.java
index 9f132550e6..9f132550e6 100644
--- a/src/compiler/scala/tools/asm/tree/AnnotationNode.java
+++ b/src/asm/scala/tools/asm/tree/AnnotationNode.java
diff --git a/src/compiler/scala/tools/asm/tree/ClassNode.java b/src/asm/scala/tools/asm/tree/ClassNode.java
index 64effae698..64effae698 100644
--- a/src/compiler/scala/tools/asm/tree/ClassNode.java
+++ b/src/asm/scala/tools/asm/tree/ClassNode.java
diff --git a/src/compiler/scala/tools/asm/tree/FieldInsnNode.java b/src/asm/scala/tools/asm/tree/FieldInsnNode.java
index 6b7a6a142a..6b7a6a142a 100644
--- a/src/compiler/scala/tools/asm/tree/FieldInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/FieldInsnNode.java
diff --git a/src/compiler/scala/tools/asm/tree/FieldNode.java b/src/asm/scala/tools/asm/tree/FieldNode.java
index 9a1e17033c..9a1e17033c 100644
--- a/src/compiler/scala/tools/asm/tree/FieldNode.java
+++ b/src/asm/scala/tools/asm/tree/FieldNode.java
diff --git a/src/compiler/scala/tools/asm/tree/FrameNode.java b/src/asm/scala/tools/asm/tree/FrameNode.java
index 66825de0ac..66825de0ac 100644
--- a/src/compiler/scala/tools/asm/tree/FrameNode.java
+++ b/src/asm/scala/tools/asm/tree/FrameNode.java
diff --git a/src/compiler/scala/tools/asm/tree/IincInsnNode.java b/src/asm/scala/tools/asm/tree/IincInsnNode.java
index 75ac40884d..75ac40884d 100644
--- a/src/compiler/scala/tools/asm/tree/IincInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/IincInsnNode.java
diff --git a/src/compiler/scala/tools/asm/tree/InnerClassNode.java b/src/asm/scala/tools/asm/tree/InnerClassNode.java
index 4579488921..4579488921 100644
--- a/src/compiler/scala/tools/asm/tree/InnerClassNode.java
+++ b/src/asm/scala/tools/asm/tree/InnerClassNode.java
diff --git a/src/compiler/scala/tools/asm/tree/InsnList.java b/src/asm/scala/tools/asm/tree/InsnList.java
index dedd3bba73..dedd3bba73 100644
--- a/src/compiler/scala/tools/asm/tree/InsnList.java
+++ b/src/asm/scala/tools/asm/tree/InsnList.java
diff --git a/src/compiler/scala/tools/asm/tree/InsnNode.java b/src/asm/scala/tools/asm/tree/InsnNode.java
index d4664d23c2..d4664d23c2 100644
--- a/src/compiler/scala/tools/asm/tree/InsnNode.java
+++ b/src/asm/scala/tools/asm/tree/InsnNode.java
diff --git a/src/compiler/scala/tools/asm/tree/IntInsnNode.java b/src/asm/scala/tools/asm/tree/IntInsnNode.java
index b61270c786..b61270c786 100644
--- a/src/compiler/scala/tools/asm/tree/IntInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/IntInsnNode.java
diff --git a/src/compiler/scala/tools/asm/tree/InvokeDynamicInsnNode.java b/src/asm/scala/tools/asm/tree/InvokeDynamicInsnNode.java
index d993b5a054..d993b5a054 100644
--- a/src/compiler/scala/tools/asm/tree/InvokeDynamicInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/InvokeDynamicInsnNode.java
diff --git a/src/compiler/scala/tools/asm/tree/JumpInsnNode.java b/src/asm/scala/tools/asm/tree/JumpInsnNode.java
index 339ebbd2d0..339ebbd2d0 100644
--- a/src/compiler/scala/tools/asm/tree/JumpInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/JumpInsnNode.java
diff --git a/src/compiler/scala/tools/asm/tree/LabelNode.java b/src/asm/scala/tools/asm/tree/LabelNode.java
index 523a8d6442..523a8d6442 100644
--- a/src/compiler/scala/tools/asm/tree/LabelNode.java
+++ b/src/asm/scala/tools/asm/tree/LabelNode.java
diff --git a/src/compiler/scala/tools/asm/tree/LdcInsnNode.java b/src/asm/scala/tools/asm/tree/LdcInsnNode.java
index f8d115acd5..f8d115acd5 100644
--- a/src/compiler/scala/tools/asm/tree/LdcInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/LdcInsnNode.java
diff --git a/src/compiler/scala/tools/asm/tree/LineNumberNode.java b/src/asm/scala/tools/asm/tree/LineNumberNode.java
index acc83c8d30..acc83c8d30 100644
--- a/src/compiler/scala/tools/asm/tree/LineNumberNode.java
+++ b/src/asm/scala/tools/asm/tree/LineNumberNode.java
diff --git a/src/compiler/scala/tools/asm/tree/LocalVariableNode.java b/src/asm/scala/tools/asm/tree/LocalVariableNode.java
index 51cbd3ca00..51cbd3ca00 100644
--- a/src/compiler/scala/tools/asm/tree/LocalVariableNode.java
+++ b/src/asm/scala/tools/asm/tree/LocalVariableNode.java
diff --git a/src/compiler/scala/tools/asm/tree/LookupSwitchInsnNode.java b/src/asm/scala/tools/asm/tree/LookupSwitchInsnNode.java
index 6d0f971c29..6d0f971c29 100644
--- a/src/compiler/scala/tools/asm/tree/LookupSwitchInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/LookupSwitchInsnNode.java
diff --git a/src/compiler/scala/tools/asm/tree/MethodInsnNode.java b/src/asm/scala/tools/asm/tree/MethodInsnNode.java
index c3036bc6b4..c3036bc6b4 100644
--- a/src/compiler/scala/tools/asm/tree/MethodInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/MethodInsnNode.java
diff --git a/src/compiler/scala/tools/asm/tree/MethodNode.java b/src/asm/scala/tools/asm/tree/MethodNode.java
index 70ec39e058..70ec39e058 100644
--- a/src/compiler/scala/tools/asm/tree/MethodNode.java
+++ b/src/asm/scala/tools/asm/tree/MethodNode.java
diff --git a/src/compiler/scala/tools/asm/tree/MultiANewArrayInsnNode.java b/src/asm/scala/tools/asm/tree/MultiANewArrayInsnNode.java
index 9dfba77335..9dfba77335 100644
--- a/src/compiler/scala/tools/asm/tree/MultiANewArrayInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/MultiANewArrayInsnNode.java
diff --git a/src/compiler/scala/tools/asm/tree/TableSwitchInsnNode.java b/src/asm/scala/tools/asm/tree/TableSwitchInsnNode.java
index 929ad9b32b..929ad9b32b 100644
--- a/src/compiler/scala/tools/asm/tree/TableSwitchInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/TableSwitchInsnNode.java
diff --git a/src/compiler/scala/tools/asm/tree/TryCatchBlockNode.java b/src/asm/scala/tools/asm/tree/TryCatchBlockNode.java
index 375b4cfcb9..375b4cfcb9 100644
--- a/src/compiler/scala/tools/asm/tree/TryCatchBlockNode.java
+++ b/src/asm/scala/tools/asm/tree/TryCatchBlockNode.java
diff --git a/src/compiler/scala/tools/asm/tree/TypeInsnNode.java b/src/asm/scala/tools/asm/tree/TypeInsnNode.java
index 0b2666c498..0b2666c498 100644
--- a/src/compiler/scala/tools/asm/tree/TypeInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/TypeInsnNode.java
diff --git a/src/compiler/scala/tools/asm/tree/VarInsnNode.java b/src/asm/scala/tools/asm/tree/VarInsnNode.java
index 89f572db59..89f572db59 100644
--- a/src/compiler/scala/tools/asm/tree/VarInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/VarInsnNode.java
diff --git a/src/compiler/scala/tools/asm/tree/analysis/Analyzer.java b/src/asm/scala/tools/asm/tree/analysis/Analyzer.java
index df387b0b8e..df387b0b8e 100644
--- a/src/compiler/scala/tools/asm/tree/analysis/Analyzer.java
+++ b/src/asm/scala/tools/asm/tree/analysis/Analyzer.java
diff --git a/src/compiler/scala/tools/asm/tree/analysis/AnalyzerException.java b/src/asm/scala/tools/asm/tree/analysis/AnalyzerException.java
index a89bb3513f..a89bb3513f 100644
--- a/src/compiler/scala/tools/asm/tree/analysis/AnalyzerException.java
+++ b/src/asm/scala/tools/asm/tree/analysis/AnalyzerException.java
diff --git a/src/compiler/scala/tools/asm/tree/analysis/BasicInterpreter.java b/src/asm/scala/tools/asm/tree/analysis/BasicInterpreter.java
index 64ddcc11e6..64ddcc11e6 100644
--- a/src/compiler/scala/tools/asm/tree/analysis/BasicInterpreter.java
+++ b/src/asm/scala/tools/asm/tree/analysis/BasicInterpreter.java
diff --git a/src/compiler/scala/tools/asm/tree/analysis/BasicValue.java b/src/asm/scala/tools/asm/tree/analysis/BasicValue.java
index 6c449db9b0..6c449db9b0 100644
--- a/src/compiler/scala/tools/asm/tree/analysis/BasicValue.java
+++ b/src/asm/scala/tools/asm/tree/analysis/BasicValue.java
diff --git a/src/compiler/scala/tools/asm/tree/analysis/BasicVerifier.java b/src/asm/scala/tools/asm/tree/analysis/BasicVerifier.java
index 9297dd9294..9297dd9294 100644
--- a/src/compiler/scala/tools/asm/tree/analysis/BasicVerifier.java
+++ b/src/asm/scala/tools/asm/tree/analysis/BasicVerifier.java
diff --git a/src/compiler/scala/tools/asm/tree/analysis/Frame.java b/src/asm/scala/tools/asm/tree/analysis/Frame.java
index fe19c2c9ae..fe19c2c9ae 100644
--- a/src/compiler/scala/tools/asm/tree/analysis/Frame.java
+++ b/src/asm/scala/tools/asm/tree/analysis/Frame.java
diff --git a/src/compiler/scala/tools/asm/tree/analysis/Interpreter.java b/src/asm/scala/tools/asm/tree/analysis/Interpreter.java
index 930c8f4af8..930c8f4af8 100644
--- a/src/compiler/scala/tools/asm/tree/analysis/Interpreter.java
+++ b/src/asm/scala/tools/asm/tree/analysis/Interpreter.java
diff --git a/src/compiler/scala/tools/asm/tree/analysis/SimpleVerifier.java b/src/asm/scala/tools/asm/tree/analysis/SimpleVerifier.java
index c4f515d328..c4f515d328 100644
--- a/src/compiler/scala/tools/asm/tree/analysis/SimpleVerifier.java
+++ b/src/asm/scala/tools/asm/tree/analysis/SimpleVerifier.java
diff --git a/src/compiler/scala/tools/asm/tree/analysis/SmallSet.java b/src/asm/scala/tools/asm/tree/analysis/SmallSet.java
index 205878d18c..205878d18c 100644
--- a/src/compiler/scala/tools/asm/tree/analysis/SmallSet.java
+++ b/src/asm/scala/tools/asm/tree/analysis/SmallSet.java
diff --git a/src/compiler/scala/tools/asm/tree/analysis/SourceInterpreter.java b/src/asm/scala/tools/asm/tree/analysis/SourceInterpreter.java
index 067200b51e..067200b51e 100644
--- a/src/compiler/scala/tools/asm/tree/analysis/SourceInterpreter.java
+++ b/src/asm/scala/tools/asm/tree/analysis/SourceInterpreter.java
diff --git a/src/compiler/scala/tools/asm/tree/analysis/SourceValue.java b/src/asm/scala/tools/asm/tree/analysis/SourceValue.java
index 57ff212fb2..57ff212fb2 100644
--- a/src/compiler/scala/tools/asm/tree/analysis/SourceValue.java
+++ b/src/asm/scala/tools/asm/tree/analysis/SourceValue.java
diff --git a/src/compiler/scala/tools/asm/tree/analysis/Subroutine.java b/src/asm/scala/tools/asm/tree/analysis/Subroutine.java
index 038880ddcd..038880ddcd 100644
--- a/src/compiler/scala/tools/asm/tree/analysis/Subroutine.java
+++ b/src/asm/scala/tools/asm/tree/analysis/Subroutine.java
diff --git a/src/compiler/scala/tools/asm/tree/analysis/Value.java b/src/asm/scala/tools/asm/tree/analysis/Value.java
index 1edf475ce7..1edf475ce7 100644
--- a/src/compiler/scala/tools/asm/tree/analysis/Value.java
+++ b/src/asm/scala/tools/asm/tree/analysis/Value.java
diff --git a/src/compiler/scala/tools/asm/util/ASMifiable.java b/src/asm/scala/tools/asm/util/ASMifiable.java
index 6a31dd508f..6a31dd508f 100644
--- a/src/compiler/scala/tools/asm/util/ASMifiable.java
+++ b/src/asm/scala/tools/asm/util/ASMifiable.java
diff --git a/src/compiler/scala/tools/asm/util/ASMifier.java b/src/asm/scala/tools/asm/util/ASMifier.java
index 5967c877d1..5967c877d1 100644
--- a/src/compiler/scala/tools/asm/util/ASMifier.java
+++ b/src/asm/scala/tools/asm/util/ASMifier.java
diff --git a/src/compiler/scala/tools/asm/util/CheckAnnotationAdapter.java b/src/asm/scala/tools/asm/util/CheckAnnotationAdapter.java
index 8030c14f2e..8030c14f2e 100644
--- a/src/compiler/scala/tools/asm/util/CheckAnnotationAdapter.java
+++ b/src/asm/scala/tools/asm/util/CheckAnnotationAdapter.java
diff --git a/src/compiler/scala/tools/asm/util/CheckClassAdapter.java b/src/asm/scala/tools/asm/util/CheckClassAdapter.java
index a455322531..a455322531 100644
--- a/src/compiler/scala/tools/asm/util/CheckClassAdapter.java
+++ b/src/asm/scala/tools/asm/util/CheckClassAdapter.java
diff --git a/src/compiler/scala/tools/asm/util/CheckFieldAdapter.java b/src/asm/scala/tools/asm/util/CheckFieldAdapter.java
index bdcbe14b16..bdcbe14b16 100644
--- a/src/compiler/scala/tools/asm/util/CheckFieldAdapter.java
+++ b/src/asm/scala/tools/asm/util/CheckFieldAdapter.java
diff --git a/src/compiler/scala/tools/asm/util/CheckMethodAdapter.java b/src/asm/scala/tools/asm/util/CheckMethodAdapter.java
index 7549765421..7549765421 100644
--- a/src/compiler/scala/tools/asm/util/CheckMethodAdapter.java
+++ b/src/asm/scala/tools/asm/util/CheckMethodAdapter.java
diff --git a/src/compiler/scala/tools/asm/util/CheckSignatureAdapter.java b/src/asm/scala/tools/asm/util/CheckSignatureAdapter.java
index 3a6c3e780f..3a6c3e780f 100644
--- a/src/compiler/scala/tools/asm/util/CheckSignatureAdapter.java
+++ b/src/asm/scala/tools/asm/util/CheckSignatureAdapter.java
diff --git a/src/compiler/scala/tools/asm/util/Printer.java b/src/asm/scala/tools/asm/util/Printer.java
index c39fd548ce..c39fd548ce 100644
--- a/src/compiler/scala/tools/asm/util/Printer.java
+++ b/src/asm/scala/tools/asm/util/Printer.java
diff --git a/src/compiler/scala/tools/asm/util/SignatureChecker.java b/src/asm/scala/tools/asm/util/SignatureChecker.java
index 7b7eea4383..7b7eea4383 100644
--- a/src/compiler/scala/tools/asm/util/SignatureChecker.java
+++ b/src/asm/scala/tools/asm/util/SignatureChecker.java
diff --git a/src/compiler/scala/tools/asm/util/Textifiable.java b/src/asm/scala/tools/asm/util/Textifiable.java
index b80d0139db..b80d0139db 100644
--- a/src/compiler/scala/tools/asm/util/Textifiable.java
+++ b/src/asm/scala/tools/asm/util/Textifiable.java
diff --git a/src/compiler/scala/tools/asm/util/Textifier.java b/src/asm/scala/tools/asm/util/Textifier.java
index 8d40ebd026..8d40ebd026 100644
--- a/src/compiler/scala/tools/asm/util/Textifier.java
+++ b/src/asm/scala/tools/asm/util/Textifier.java
diff --git a/src/compiler/scala/tools/asm/util/TraceAnnotationVisitor.java b/src/asm/scala/tools/asm/util/TraceAnnotationVisitor.java
index f112609031..f112609031 100644
--- a/src/compiler/scala/tools/asm/util/TraceAnnotationVisitor.java
+++ b/src/asm/scala/tools/asm/util/TraceAnnotationVisitor.java
diff --git a/src/compiler/scala/tools/asm/util/TraceClassVisitor.java b/src/asm/scala/tools/asm/util/TraceClassVisitor.java
index bb830b71ce..bb830b71ce 100644
--- a/src/compiler/scala/tools/asm/util/TraceClassVisitor.java
+++ b/src/asm/scala/tools/asm/util/TraceClassVisitor.java
diff --git a/src/compiler/scala/tools/asm/util/TraceFieldVisitor.java b/src/asm/scala/tools/asm/util/TraceFieldVisitor.java
index f537e83be1..f537e83be1 100644
--- a/src/compiler/scala/tools/asm/util/TraceFieldVisitor.java
+++ b/src/asm/scala/tools/asm/util/TraceFieldVisitor.java
diff --git a/src/compiler/scala/tools/asm/util/TraceMethodVisitor.java b/src/asm/scala/tools/asm/util/TraceMethodVisitor.java
index 9aabf2079e..9aabf2079e 100644
--- a/src/compiler/scala/tools/asm/util/TraceMethodVisitor.java
+++ b/src/asm/scala/tools/asm/util/TraceMethodVisitor.java
diff --git a/src/compiler/scala/tools/asm/util/TraceSignatureVisitor.java b/src/asm/scala/tools/asm/util/TraceSignatureVisitor.java
index a37b759811..a37b759811 100644
--- a/src/compiler/scala/tools/asm/util/TraceSignatureVisitor.java
+++ b/src/asm/scala/tools/asm/util/TraceSignatureVisitor.java
diff --git a/src/build/maven/maven-deploy.xml b/src/build/maven/maven-deploy.xml
index ac0f8f745b..7ab54f81c3 100644
--- a/src/build/maven/maven-deploy.xml
+++ b/src/build/maven/maven-deploy.xml
@@ -112,6 +112,7 @@
<deploy-local name="scala-compiler" version="@{version}" repository="@{repository}" />
<deploy-local-plugin name="continuations" version="@{version}" repository="@{repository}"/>
<deploy-local name="scala-actors" version="@{version}" repository="@{repository}" />
+ <deploy-local name="scala-actors-migration" version="@{version}" repository="@{repository}" />
<deploy-local name="scala-swing" version="@{version}" repository="@{repository}"/>
<deploy-local name="scalap" version="@{version}" repository="@{repository}"/>
<deploy-local name="scala-partest" version="@{version}" repository="@{repository}"/>
@@ -172,6 +173,7 @@
<deploy-remote name="scala-compiler" version="@{version}" repository="@{repository}" />
<deploy-remote name="scala-swing" version="@{version}" repository="@{repository}"/>
<deploy-remote name="scala-actors" version="@{version}" repository="@{repository}"/>
+ <deploy-remote name="scala-actors-migration" version="@{version}" repository="@{repository}"/>
<deploy-remote name="scalap" version="@{version}" repository="@{repository}"/>
<deploy-remote name="scala-partest" version="@{version}" repository="@{repository}"/>
<deploy-remote-plugin name="continuations" version="@{version}" repository="@{repository}"/>
@@ -239,6 +241,7 @@
<deploy-remote-signed name="scala-compiler" version="@{version}" repository="@{repository}" />
<deploy-remote-signed name="scala-swing" version="@{version}" repository="@{repository}"/>
<deploy-remote-signed name="scala-actors" version="@{version}" repository="@{repository}"/>
+ <deploy-remote-signed name="scala-actors-migration" version="@{version}" repository="@{repository}"/>
<deploy-remote-signed name="scalap" version="@{version}" repository="@{repository}"/>
<deploy-remote-signed name="scala-partest" version="@{version}" repository="@{repository}"/>
</sequential>
diff --git a/src/build/maven/scala-actors-migration-pom.xml b/src/build/maven/scala-actors-migration-pom.xml
new file mode 100644
index 0000000000..93fc34ece9
--- /dev/null
+++ b/src/build/maven/scala-actors-migration-pom.xml
@@ -0,0 +1,66 @@
+<project
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-actors-migration</artifactId>
+ <packaging>jar</packaging>
+ <version>@VERSION@</version>
+ <name>Scala Migration Kit</name>
+ <description>Migration kit that enables easy transition from the Scala Actors to Akka.</description>
+ <url>http://www.scala-lang.org/</url>
+ <inceptionYear>2012</inceptionYear>
+ <organization>
+ <name>LAMP/EPFL</name>
+ <url>http://lamp.epfl.ch/</url>
+ </organization>
+ <licenses>
+ <license>
+ <name>BSD-like</name>
+ <url>http://www.scala-lang.org/downloads/license.html
+ </url>
+ <distribution>repo</distribution>
+ </license>
+ </licenses>
+ <scm>
+ <connection>scm:git:git://github.com/scala/scala.git</connection>
+ <url>https://github.com/scala/scala.git</url>
+ </scm>
+ <issueManagement>
+ <system>JIRA</system>
+ <url>https://issues.scala-lang.org/</url>
+ </issueManagement>
+ <dependencies>
+ <dependency>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-library</artifactId>
+ <version>@VERSION@</version>
+ </dependency>
+ <dependency>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-actors</artifactId>
+ <version>@VERSION@</version>
+ </dependency>
+ </dependencies>
+ <distributionManagement>
+ <repository>
+ <id>scala-tools.org</id>
+ <url>@RELEASE_REPOSITORY@</url>
+ </repository>
+ <snapshotRepository>
+ <id>scala-tools.org</id>
+ <url>@SNAPSHOT_REPOSITORY@</url>
+ <uniqueVersion>false</uniqueVersion>
+ </snapshotRepository>
+ </distributionManagement>
+ <developers>
+ <developer>
+ <id>lamp</id>
+ <name>EPFL LAMP</name>
+ </developer>
+ <developer>
+ <id>Typesafe</id>
+ <name>Typesafe, Inc.</name>
+ </developer>
+ </developers>
+</project>
diff --git a/src/build/pack.xml b/src/build/pack.xml
index f96c6b9799..956beaef88 100644
--- a/src/build/pack.xml
+++ b/src/build/pack.xml
@@ -139,6 +139,7 @@ MAIN DISTRIBUTION PACKAGING
<mvn-copy-lib mvn.artifact.name="scala-compiler"/>
<mvn-copy-lib mvn.artifact.name="scala-swing"/>
<mvn-copy-lib mvn.artifact.name="scala-actors"/>
+ <mvn-copy-lib mvn.artifact.name="scala-actors-migration"/>
<mvn-copy-lib mvn.artifact.name="scala-partest"/>
<mvn-copy-lib mvn.artifact.name="scalap"/>
</target>
@@ -201,6 +202,11 @@ MAIN DISTRIBUTION PACKAGING
basedir="${build-docs.dir}/continuations-plugin">
<include name="**/*"/>
</jar>
+ <jar destfile="${dists.dir}/maven/${version.number}/scala-actors-migration/scala-actors-migration-docs.jar"
+ basedir="${build-docs.dir}/actors-migration">
+ <include name="**/*"/>
+ </jar>
+
<!-- TODO - Scala swing and actors should maybe have thier own jar, but creating it is SLOW. -->
<copy tofile="${dists.dir}/maven/${version.number}/scala-swing/scala-swing-docs.jar"
file="${dists.dir}/maven/${version.number}/scala-library/scala-library-docs.jar"/>
diff --git a/src/compiler/scala/reflect/internal/Constants.scala b/src/compiler/scala/reflect/internal/Constants.scala
index 135d18d5ad..861bc870a7 100644
--- a/src/compiler/scala/reflect/internal/Constants.scala
+++ b/src/compiler/scala/reflect/internal/Constants.scala
@@ -224,6 +224,7 @@ trait Constants extends api.Constants {
case ClazzTag => "classOf[" + signature(typeValue) + "]"
case CharTag => "'" + escapedChar(charValue) + "'"
case LongTag => longValue.toString() + "L"
+ case EnumTag => symbolValue.name.toString()
case _ => String.valueOf(value)
}
}
diff --git a/src/compiler/scala/reflect/internal/Definitions.scala b/src/compiler/scala/reflect/internal/Definitions.scala
index 0612dcdfd4..0cdef9e79a 100644
--- a/src/compiler/scala/reflect/internal/Definitions.scala
+++ b/src/compiler/scala/reflect/internal/Definitions.scala
@@ -404,6 +404,7 @@ trait Definitions extends reflect.api.StandardDefinitions {
lazy val JavaSerializableClass = requiredClass[java.io.Serializable] modifyInfo fixupAsAnyTrait
lazy val ComparableClass = requiredClass[java.lang.Comparable[_]] modifyInfo fixupAsAnyTrait
lazy val JavaCloneableClass = requiredClass[java.lang.Cloneable]
+ lazy val JavaNumberClass = requiredClass[java.lang.Number]
lazy val RemoteInterfaceClass = requiredClass[java.rmi.Remote]
lazy val RemoteExceptionClass = requiredClass[java.rmi.RemoteException]
@@ -609,6 +610,7 @@ trait Definitions extends reflect.api.StandardDefinitions {
def isTupleTypeOrSubtype(tp: Type) = isTupleType(tp)
def tupleField(n: Int, j: Int) = getMember(TupleClass(n), nme.productAccessorName(j))
+ // NOTE: returns true for NoSymbol since it's included in the TupleClass array -- is this intensional?
def isTupleSymbol(sym: Symbol) = TupleClass contains unspecializedSymbol(sym)
def isProductNClass(sym: Symbol) = ProductClass contains sym
diff --git a/src/compiler/scala/reflect/internal/InfoTransformers.scala b/src/compiler/scala/reflect/internal/InfoTransformers.scala
index 96d9d8f076..e53f714c0c 100644
--- a/src/compiler/scala/reflect/internal/InfoTransformers.scala
+++ b/src/compiler/scala/reflect/internal/InfoTransformers.scala
@@ -20,12 +20,14 @@ trait InfoTransformers {
def transform(sym: Symbol, tpe: Type): Type
def insert(that: InfoTransformer) {
- assert(this.pid != that.pid)
+ assert(this.pid != that.pid, this.pid)
+
if (that.pid < this.pid) {
prev insert that
} else if (next.pid <= that.pid && next.pid != NoPhase.id) {
next insert that
} else {
+ log("Inserting info transformer %s following %s".format(phaseOf(that.pid), phaseOf(this.pid)))
that.next = next
that.prev = this
next.prev = that
diff --git a/src/compiler/scala/reflect/internal/Phase.scala b/src/compiler/scala/reflect/internal/Phase.scala
index 89d643aacf..68dc5ce783 100644
--- a/src/compiler/scala/reflect/internal/Phase.scala
+++ b/src/compiler/scala/reflect/internal/Phase.scala
@@ -7,9 +7,10 @@ package scala.reflect
package internal
abstract class Phase(val prev: Phase) {
+ if ((prev ne null) && (prev ne NoPhase))
+ prev.nx = this
type Id = Int
-
val id: Id = if (prev eq null) 0 else prev.id + 1
/** New flags visible after this phase has completed */
@@ -18,12 +19,13 @@ abstract class Phase(val prev: Phase) {
/** New flags visible once this phase has started */
def newFlags: Long = 0l
- private var fmask: Long =
- if (prev eq null) Flags.InitialFlags else prev.flagMask | prev.nextFlags | newFlags
+ val fmask = (
+ if (prev eq null) Flags.InitialFlags
+ else prev.flagMask | prev.nextFlags | newFlags
+ )
def flagMask: Long = fmask
private var nx: Phase = this
- if ((prev ne null) && (prev ne NoPhase)) prev.nx = this
def next: Phase = nx
def hasNext = next != this
diff --git a/src/compiler/scala/reflect/internal/util/Origins.scala b/src/compiler/scala/reflect/internal/util/Origins.scala
index 19b3adda9d..0bd5ad55ca 100644
--- a/src/compiler/scala/reflect/internal/util/Origins.scala
+++ b/src/compiler/scala/reflect/internal/util/Origins.scala
@@ -15,7 +15,7 @@ import Origins._
* You could do this:
*
* {{{
- * private lazy val origins = Origins[SymbolTable]("phase_=")
+ * private lazy val origins = Origins("arbitraryTag")
* // Commented out original enclosed for contrast
* // final def phase_=(p: Phase): Unit = {
* final def phase_=(p: Phase): Unit = origins {
@@ -23,7 +23,7 @@ import Origins._
*
* And that's it. When the JVM exits it would issue a report something like this:
{{{
- >> Origins scala.tools.nsc.symtab.SymbolTable.phase_= logged 145585 calls from 51 distinguished sources.
+ >> Origins tag 'arbitraryTag' logged 145585 calls from 51 distinguished sources.
71114 scala.tools.nsc.symtab.Symbols$Symbol.unsafeTypeParams(Symbols.scala:862)
16584 scala.tools.nsc.symtab.Symbols$Symbol.rawInfo(Symbols.scala:757)
@@ -37,29 +37,21 @@ import Origins._
*/
abstract class Origins {
type Rep
+ type StackSlice = Array[StackTraceElement]
+
+ def tag: String
+ def isCutoff(el: StackTraceElement): Boolean
def newRep(xs: StackSlice): Rep
def repString(rep: Rep): String
- def originClass: String
-
- private var _tag: String = null
- def tag: String = _tag
- def setTag(tag: String): this.type = {
- _tag = tag
- this
- }
private val origins = new mutable.HashMap[Rep, Int] withDefaultValue 0
private def add(xs: Rep) = origins(xs) += 1
private def total = origins.values.foldLeft(0L)(_ + _)
- // We find the right line by dropping any from around here and any
- // from the method's origin class.
- private def dropStackElement(cn: String) =
- (cn startsWith OriginsName) || (cn startsWith originClass)
-
// Create a stack and whittle it down to the interesting part.
- private def readStack(): Array[StackTraceElement] =
- (new Throwable).getStackTrace dropWhile (el => dropStackElement(el.getClassName))
+ def readStack(): Array[StackTraceElement] = (
+ Thread.currentThread.getStackTrace dropWhile (x => !isCutoff(x)) dropWhile isCutoff drop 1
+ )
def apply[T](body: => T): T = {
add(newRep(readStack()))
@@ -67,7 +59,7 @@ abstract class Origins {
}
def clear() = origins.clear()
def show() = {
- println("\n>> Origins %s.%s logged %s calls from %s distinguished sources.\n".format(originClass, tag, total, origins.keys.size))
+ println("\n>> Origins tag '%s' logged %s calls from %s distinguished sources.\n".format(tag, total, origins.keys.size))
origins.toList sortBy (-_._2) foreach {
case (k, v) => println("%7s %s".format(v, repString(k)))
}
@@ -79,29 +71,49 @@ abstract class Origins {
}
object Origins {
- private type StackSlice = Array[StackTraceElement]
- private val OriginsName = classOf[Origins].getName
- private val counters = new mutable.HashSet[Origins]
+ private val counters = mutable.HashMap[String, Origins]()
+ private val thisClass = this.getClass.getName
- {
- // Console.println("\nOrigins loaded: registering shutdown hook to display results.")
- sys.addShutdownHook(counters foreach (_.purge()))
+ locally {
+ sys.addShutdownHook(counters.values foreach (_.purge()))
}
- def apply[T: ClassTag](tag: String): Origins = apply(tag, classTag[T].erasure)
- def apply(tag: String, clazz: Class[_]): Origins = apply(tag, new OneLine(clazz))
- def apply(tag: String, orElse: => Origins): Origins = {
- counters find (_.tag == tag) getOrElse {
- val res = orElse setTag tag
- counters += res
- res
- }
+ case class OriginId(className: String, methodName: String) {
+ def matches(el: StackTraceElement) = (
+ (methodName == el.getMethodName) && (className startsWith el.getClassName)
+ )
}
- class OneLine(clazz: Class[_]) extends Origins {
- type Rep = StackTraceElement
- val originClass = clazz.getName stripSuffix MODULE_SUFFIX_STRING
- def newRep(xs: StackSlice): Rep = xs(0)
- def repString(rep: Rep) = " " + rep
+ def lookup(tag: String, orElse: String => Origins): Origins =
+ counters.getOrElseUpdate(tag, orElse(tag))
+ def register(x: Origins): Origins = {
+ counters(x.tag) = x
+ x
+ }
+
+ private def preCutoff(el: StackTraceElement) = (
+ (el.getClassName == thisClass)
+ || (el.getClassName startsWith "java.lang.")
+ )
+ private def findCutoff() = {
+ val cutoff = Thread.currentThread.getStackTrace dropWhile preCutoff head;
+ OriginId(cutoff.getClassName, cutoff.getMethodName)
+ }
+
+ def apply(tag: String): Origins = counters.getOrElseUpdate(tag, new OneLine(tag, findCutoff()))
+ def apply(tag: String, frames: Int): Origins = counters.getOrElseUpdate(tag, new MultiLine(tag, findCutoff(), frames))
+
+ class OneLine(val tag: String, id: OriginId) extends Origins {
+ type Rep = StackTraceElement
+ def isCutoff(el: StackTraceElement) = id matches el
+ def newRep(xs: StackSlice): Rep = if ((xs eq null) || (xs.length == 0)) null else xs(0)
+ def repString(rep: Rep) = " " + rep
+ }
+ class MultiLine(val tag: String, id: OriginId, numLines: Int) extends Origins {
+ type Rep = List[StackTraceElement]
+ def isCutoff(el: StackTraceElement) = id matches el
+ def newRep(xs: StackSlice): Rep = (xs take numLines).toList
+ def repString(rep: Rep) = rep.map("\n " + _).mkString
+ override def readStack() = super.readStack() drop 1
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
index 23fcffd657..3a527676b4 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
@@ -208,6 +208,7 @@ abstract class TreeGen extends reflect.internal.TreeGen with TreeDSL {
else AppliedTypeTree(Ident(clazz), targs map TypeTree)
))
}
+ def mkSuperSelect = Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR)
def wildcardStar(tree: Tree) =
atPos(tree.pos) { Typed(tree, Ident(tpnme.WILDCARD_STAR)) }
diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala
index adc490c8e1..6f1a8f488f 100644
--- a/src/compiler/scala/tools/nsc/ast/Trees.scala
+++ b/src/compiler/scala/tools/nsc/ast/Trees.scala
@@ -115,9 +115,7 @@ trait Trees extends reflect.internal.Trees { self: Global =>
// convert (implicit ... ) to ()(implicit ... ) if its the only parameter section
if (vparamss1.isEmpty || !vparamss1.head.isEmpty && vparamss1.head.head.mods.isImplicit)
vparamss1 = List() :: vparamss1;
- val superRef: Tree = atPos(superPos) {
- Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR)
- }
+ val superRef: Tree = atPos(superPos)(gen.mkSuperSelect)
val superCall = (superRef /: argss) (Apply)
List(
atPos(wrappingPos(superPos, lvdefs ::: argss.flatten)) (
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index c02a7e493e..a0524d6932 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -385,7 +385,7 @@ self =>
Nil,
List(Nil),
TypeTree(),
- Block(List(Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), Nil)), Literal(Constant(())))
+ Block(List(Apply(gen.mkSuperSelect, Nil)), Literal(Constant(())))
)
// def main
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Members.scala b/src/compiler/scala/tools/nsc/backend/icode/Members.scala
index 71795a02aa..019e887c4e 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Members.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Members.scala
@@ -257,11 +257,23 @@ trait Members {
var succ = bb
do {
succ = nextBlock(succ);
- bb.removeLastInstruction
- succ.toList foreach { i => bb.emit(i, i.pos) }
- code.removeBlock(succ)
+ val lastInstr = bb.lastInstruction
+ /* Ticket SI-5672
+ * Besides removing the control-flow instruction at the end of `bb` (usually a JUMP), we have to pop any values it pushes.
+ * Examples:
+ * `SWITCH` consisting of just the default case, or
+ * `CJUMP(targetBlock, targetBlock, _, _)` ie where success and failure targets coincide (this one consumes two stack values).
+ */
+ val oldTKs = lastInstr.consumedTypes
+ assert(lastInstr.consumed == oldTKs.size, "Someone forgot to override consumedTypes() in " + lastInstr)
+
+ bb.removeLastInstruction
+ for(tk <- oldTKs.reverse) { bb.emit(DROP(tk), lastInstr.pos) }
+ succ.toList foreach { i => bb.emit(i, i.pos) }
+ code.removeBlock(succ)
+ exh foreach { e => e.covered = e.covered - succ }
+
nextBlock -= bb
- exh foreach { e => e.covered = e.covered - succ }
} while (nextBlock.isDefinedAt(succ))
bb.close
} else
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
index 576cc72f82..3179fc5c56 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
@@ -436,6 +436,8 @@ trait Opcodes { self: ICodes =>
override def consumed = 1
override def produced = 0
+ override val consumedTypes = List(INT)
+
def flatTagsCount: Int = { var acc = 0; var rest = tags; while(rest.nonEmpty) { acc += rest.head.length; rest = rest.tail }; acc } // a one-liner
}
@@ -470,6 +472,8 @@ trait Opcodes { self: ICodes =>
override def consumed = 2
override def produced = 0
+
+ override val consumedTypes = List(kind, kind)
}
/** This class represents a CZJUMP instruction
@@ -489,6 +493,8 @@ trait Opcodes { self: ICodes =>
override def consumed = 1
override def produced = 0
+
+ override val consumedTypes = List(kind)
}
@@ -499,6 +505,8 @@ trait Opcodes { self: ICodes =>
case class RETURN(kind: TypeKind) extends Instruction {
override def consumed = if (kind == UNIT) 0 else 1
override def produced = 0
+
+ // TODO override val consumedTypes = List(kind)
}
/** This class represents a THROW instruction
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
index 81fe907a0c..99832d1327 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
@@ -223,8 +223,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
// unlike javaNameCache, reverseJavaName contains entries only for class symbols and their internal names.
val reverseJavaName = mutable.Map.empty[String, Symbol] ++= List(
- binarynme.RuntimeNothing.toString() -> NothingClass, // neither RuntimeNothingClass nor RuntimeNullClass belong to the co-domain of this map.
- binarynme.RuntimeNull.toString() -> NullClass
+ binarynme.RuntimeNothing.toString() -> RuntimeNothingClass, // RuntimeNothingClass is the bytecode-level return type of Scala methods with Nothing return-type.
+ binarynme.RuntimeNull.toString() -> RuntimeNullClass
)
private def mkFlags(args: Int*) = args.foldLeft(0)(_ | _)
@@ -587,7 +587,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
case None =>
reverseJavaName.put(internalName, trackedSym)
case Some(oldsym) =>
- assert(List(NothingClass, NullClass).contains(oldsym) || oldsym == trackedSym,
+ assert((oldsym == trackedSym) || List(RuntimeNothingClass, RuntimeNullClass).contains(oldsym), // NothingClass, NullClass,
"how can getCommonSuperclass() do its job if different class symbols get the same bytecode-level internal name.")
}
}
@@ -2542,7 +2542,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
} else if(kind.isRefOrArrayType) { // REFERENCE(_) | ARRAY(_)
val Success = success
val Failure = failure
- (cond, nextBlock) match {
+ // @unchecked because references aren't compared with GT, GE, LT, LE.
+ ((cond, nextBlock) : @unchecked) match {
case (EQ, Success) => jcode emitIFNONNULL labels(failure)
case (NE, Failure) => jcode emitIFNONNULL labels(success)
case (EQ, Failure) => jcode emitIFNULL labels(success)
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Index.scala b/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
index f8fffdc726..8ed13e0da2 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
@@ -44,7 +44,7 @@ class Index(universe: doc.Universe, index: doc.Index) extends HtmlPage {
</div>
{ browser }
<div id="content" class="ui-layout-center">
- <iframe id="template" src={ relativeLinkTo{List("package.html")} }/>
+ <iframe id="template" name="template" src={ relativeLinkTo{List("package.html")} }/>
</div>
</body>
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index 7c61ec032e..7373a610d7 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -615,12 +615,11 @@ abstract class ClassfileParser {
// sealed java enums (experimental)
if (isEnum && opt.experimental) {
- // need to give singleton type
- sym setInfo info.narrow
- if (!sym.superClass.isSealed)
- sym.superClass setFlag SEALED
+ val enumClass = sym.owner.linkedClassOfClass
+ if (!enumClass.isSealed)
+ enumClass setFlag (SEALED | ABSTRACT)
- sym.superClass addChild sym
+ enumClass addChild sym
}
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
index cb7d64b9fc..e5fc98f23c 100644
--- a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
+++ b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
@@ -308,8 +308,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
// body until now, because the typer knows that Any has no
// constructor and won't accept a call to super.init.
assert((clazz isSubClass AnyValClass) || clazz.info.parents.isEmpty, clazz)
- val superCall = Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), Nil)
- Block(List(superCall), expr)
+ Block(List(Apply(gen.mkSuperSelect, Nil)), expr)
case Block(stats, expr) =>
// needs `hasSymbol` check because `supercall` could be a block (named / default args)
diff --git a/src/compiler/scala/tools/nsc/transform/InfoTransform.scala b/src/compiler/scala/tools/nsc/transform/InfoTransform.scala
index 0905fa86c6..880f0f0157 100644
--- a/src/compiler/scala/tools/nsc/transform/InfoTransform.scala
+++ b/src/compiler/scala/tools/nsc/transform/InfoTransform.scala
@@ -37,7 +37,7 @@ trait InfoTransform extends Transform {
val changesBaseClasses = InfoTransform.this.changesBaseClasses
def transform(sym: Symbol, tpe: Type): Type = transformInfo(sym, tpe)
}
- infoTransformers.insert(infoTransformer)
+ infoTransformers insert infoTransformer
}
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index 07a10ecb1f..f2e109a5ad 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -1388,26 +1388,23 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
tree
case Select(qual, name) =>
- debuglog("[%s] looking at Select: %s sym: %s: %s [tree.tpe: %s]".format(
- tree.pos.safeLine, tree, symbol, symbol.info, tree.tpe))
+ debuglog("specializing Select %s [tree.tpe: %s]".format(symbol.defString, tree.tpe))
//log("!!! select " + tree + " -> " + symbol.info + " specTypeVars: " + specializedTypeVars(symbol.info))
if (specializedTypeVars(symbol.info).nonEmpty && name != nme.CONSTRUCTOR) {
// log("!!! unifying " + (symbol, symbol.tpe) + " and " + (tree, tree.tpe))
val env = unify(symbol.tpe, tree.tpe, emptyEnv, false)
// log("!!! found env: " + env + "; overloads: " + overloads(symbol))
- debuglog("checking for rerouting: " + tree + " with sym.tpe: " + symbol.tpe + " tree.tpe: " + tree.tpe + " env: " + env)
if (!env.isEmpty) {
+ // debuglog("checking for rerouting: " + tree + " with sym.tpe: " + symbol.tpe + " tree.tpe: " + tree.tpe + " env: " + env)
val specMember = overload(symbol, env)
- //log("!!! found member: " + specMember)
if (specMember.isDefined) {
- // log("** routing " + tree + " to " + specMember.get.sym.fullName)
localTyper.typedOperator(atPos(tree.pos)(Select(transform(qual), specMember.get.sym.name)))
- } else {
+ }
+ else {
val qual1 = transform(qual)
val specMember = qual1.tpe.member(specializedName(symbol, env)).suchThat(_.tpe matches subst(env, symbol.tpe))
if (specMember ne NoSymbol) {
- // log("** using spec member " + specMember + ": " + specMember.tpe)
val tree1 = atPos(tree.pos)(Select(qual1, specMember))
if (specMember.isMethod)
localTyper.typedOperator(tree1)
@@ -1450,10 +1447,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
// log("--> method: " + ddef + " in " + ddef.symbol.owner + ", " + info(symbol))
if (symbol.isConstructor) {
- val t = atOwner(symbol) {
- val superRef: Tree = Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR)
- forwardCtorCall(tree.pos, superRef, vparamss, symbol.owner)
- }
+ val t = atOwner(symbol)(forwardCtorCall(tree.pos, gen.mkSuperSelect, vparamss, symbol.owner))
+
if (symbol.isPrimaryConstructor)
localTyper.typedPos(symbol.pos)(deriveDefDef(tree)(_ => Block(List(t), Literal(Constant()))))
else // duplicate the original constructor
diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
index eb3a1ffb5b..affa9cd63b 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
@@ -690,34 +690,44 @@ trait ContextErrors {
setError(tree)
}
- def NoBestMethodAlternativeError(tree: Tree, argtpes: List[Type], pt: Type) = {
+ // side-effect on the tree, break the overloaded type cycle in infer
+ @inline
+ private def setErrorOnLastTry(lastTry: Boolean, tree: Tree) = if (lastTry) setError(tree)
+
+ def NoBestMethodAlternativeError(tree: Tree, argtpes: List[Type], pt: Type, lastTry: Boolean) = {
issueNormalTypeError(tree,
applyErrorMsg(tree, " cannot be applied to ", argtpes, pt))
// since inferMethodAlternative modifies the state of the tree
// we have to set the type of tree to ErrorType only in the very last
- // fallback action that is done in the inference (tracking it manually is error prone).
+ // fallback action that is done in the inference.
// This avoids entering infinite loop in doTypeApply.
- if (implicitly[Context].reportErrors) setError(tree)
+ setErrorOnLastTry(lastTry, tree)
}
def AmbiguousMethodAlternativeError(tree: Tree, pre: Type, best: Symbol,
- firstCompeting: Symbol, argtpes: List[Type], pt: Type) = {
- val msg0 =
- "argument types " + argtpes.mkString("(", ",", ")") +
- (if (pt == WildcardType) "" else " and expected result type " + pt)
- val (pos, msg) = ambiguousErrorMsgPos(tree.pos, pre, best, firstCompeting, msg0)
- // discover last attempt in a similar way as for NoBestMethodAlternativeError
- if (implicitly[Context].ambiguousErrors) setError(tree)
- issueAmbiguousTypeError(pre, best, firstCompeting, AmbiguousTypeError(tree, pos, msg))
+ firstCompeting: Symbol, argtpes: List[Type], pt: Type, lastTry: Boolean) = {
+
+ if (!(argtpes exists (_.isErroneous)) && !pt.isErroneous) {
+ val msg0 =
+ "argument types " + argtpes.mkString("(", ",", ")") +
+ (if (pt == WildcardType) "" else " and expected result type " + pt)
+ val (pos, msg) = ambiguousErrorMsgPos(tree.pos, pre, best, firstCompeting, msg0)
+ issueAmbiguousTypeError(pre, best, firstCompeting, AmbiguousTypeError(tree, pos, msg))
+ setErrorOnLastTry(lastTry, tree)
+ } else setError(tree) // do not even try further attempts because they should all fail
+ // even if this is not the last attempt (because of the SO's possibility on the horizon)
+
}
- def NoBestExprAlternativeError(tree: Tree, pt: Type) =
+ def NoBestExprAlternativeError(tree: Tree, pt: Type, lastTry: Boolean) = {
issueNormalTypeError(tree, withAddendum(tree.pos)(typeErrorMsg(tree.symbol.tpe, pt, isPossiblyMissingArgs(tree.symbol.tpe, pt))))
+ setErrorOnLastTry(lastTry, tree)
+ }
- def AmbiguousExprAlternativeError(tree: Tree, pre: Type, best: Symbol, firstCompeting: Symbol, pt: Type) = {
+ def AmbiguousExprAlternativeError(tree: Tree, pre: Type, best: Symbol, firstCompeting: Symbol, pt: Type, lastTry: Boolean) = {
val (pos, msg) = ambiguousErrorMsgPos(tree.pos, pre, best, firstCompeting, "expected type " + pt)
- setError(tree)
issueAmbiguousTypeError(pre, best, firstCompeting, AmbiguousTypeError(tree, pos, msg))
+ setErrorOnLastTry(lastTry, tree)
}
// checkBounds
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index 0924789948..f4f081252f 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -438,8 +438,8 @@ trait Contexts { self: Analyzer =>
def enclosingContextChain: List[Context] = this :: outer.enclosingContextChain
- override def toString = "Context(%s@%s unit=%s scope=%s errors=%b)".format(
- owner.fullName, tree.shortClass, unit, scope.##, hasErrors
+ override def toString = "Context(%s@%s unit=%s scope=%s errors=%b, reportErrors=%b, throwErrors=%b)".format(
+ owner.fullName, tree.shortClass, unit, scope.##, hasErrors, reportErrors, throwErrors
)
/** Is `sub` a subclass of `base` or a companion object of such a subclass?
*/
@@ -598,16 +598,16 @@ trait Contexts { self: Analyzer =>
* it is accessible, and if it is imported there is not already a local symbol
* with the same names. Local symbols override imported ones. This fixes #2866.
*/
- private def isQualifyingImplicit(sym: Symbol, pre: Type, imported: Boolean) =
+ private def isQualifyingImplicit(name: Name, sym: Symbol, pre: Type, imported: Boolean) =
sym.isImplicit &&
isAccessible(sym, pre) &&
!(imported && {
- val e = scope.lookupEntry(sym.name)
+ val e = scope.lookupEntry(name)
(e ne null) && (e.owner == scope)
})
private def collectImplicits(syms: List[Symbol], pre: Type, imported: Boolean = false): List[ImplicitInfo] =
- for (sym <- syms if isQualifyingImplicit(sym, pre, imported)) yield
+ for (sym <- syms if isQualifyingImplicit(sym.name, sym, pre, imported)) yield
new ImplicitInfo(sym.name, pre, sym)
private def collectImplicitImports(imp: ImportInfo): List[ImplicitInfo] = {
@@ -621,7 +621,7 @@ trait Contexts { self: Analyzer =>
var impls = collect(sels1) filter (info => info.name != from)
if (to != nme.WILDCARD) {
for (sym <- imp.importedSymbol(to).alternatives)
- if (isQualifyingImplicit(sym, pre, imported = true))
+ if (isQualifyingImplicit(to, sym, pre, imported = true))
impls = new ImplicitInfo(to, pre, sym) :: impls
}
impls
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index 8f025336bb..a671b8d6b5 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -554,7 +554,11 @@ trait Implicits {
val itree = atPos(pos.focus) {
if (info.pre == NoPrefix) Ident(info.name)
- else Select(gen.mkAttributedQualifier(info.pre), info.name)
+ else {
+ // SI-2405 Not info.name, which might be an aliased import
+ val implicitMemberName = info.sym.name
+ Select(gen.mkAttributedQualifier(info.pre), implicitMemberName)
+ }
}
printTyping("typedImplicit1 %s, pt=%s, from implicit %s:%s".format(
typeDebug.ptTree(itree), wildPt, info.name, info.tpe)
@@ -929,7 +933,7 @@ trait Implicits {
}
case None =>
if (pre.isStable) {
- val companion = sym.companionModule
+ val companion = companionSymbolOf(sym, context)
companion.moduleClass match {
case mc: ModuleClassSymbol =>
val infos =
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index 1c1adee343..abe77ead9a 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -305,9 +305,21 @@ trait Infer {
}
- def isCompatible(tp: Type, pt: Type): Boolean = {
+ /** "Compatible" means conforming after conversions.
+ * "Raising to a thunk" is not implicit; therefore, for purposes of applicability and
+ * specificity, an arg type `A` is considered compatible with cbn formal parameter type `=>A`.
+ * For this behavior, the type `pt` must have cbn params preserved; for instance, `formalTypes(removeByName = false)`.
+ *
+ * `isAsSpecific` no longer prefers A by testing applicability to A for both m(A) and m(=>A)
+ * since that induces a tie between m(=>A) and m(=>A,B*) [SI-3761]
+ */
+ private def isCompatible(tp: Type, pt: Type): Boolean = {
+ def isCompatibleByName(tp: Type, pt: Type): Boolean = pt match {
+ case TypeRef(_, ByNameParamClass, List(res)) if !isByNameParamType(tp) => isCompatible(tp, res)
+ case _ => false
+ }
val tp1 = normalize(tp)
- (tp1 weak_<:< pt) || isCoercible(tp1, pt)
+ (tp1 weak_<:< pt) || isCoercible(tp1, pt) || isCompatibleByName(tp, pt)
}
def isCompatibleArgs(tps: List[Type], pts: List[Type]) =
(tps corresponds pts)(isCompatible)
@@ -662,7 +674,7 @@ trait Infer {
case ExistentialType(tparams, qtpe) =>
isApplicable(undetparams, qtpe, argtpes0, pt)
case MethodType(params, _) =>
- val formals = formalTypes(params map { _.tpe }, argtpes0.length)
+ val formals = formalTypes(params map { _.tpe }, argtpes0.length, removeByName = false)
def tryTupleApply: Boolean = {
// if 1 formal, 1 argtpe (a tuple), otherwise unmodified argtpes0
@@ -1449,10 +1461,10 @@ trait Infer {
* If no alternative matches `pt`, take the parameterless one anyway.
*/
def inferExprAlternative(tree: Tree, pt: Type) = tree.tpe match {
- case OverloadedType(pre, alts) => tryTwice {
- val alts0 = alts filter (alt => isWeaklyCompatible(pre.memberType(alt), pt))
- val secondTry = alts0.isEmpty
- val alts1 = if (secondTry) alts else alts0
+ case OverloadedType(pre, alts) => tryTwice { isSecondTry =>
+ val alts0 = alts filter (alt => isWeaklyCompatible(pre.memberType(alt), pt))
+ val noAlternatives = alts0.isEmpty
+ val alts1 = if (noAlternatives) alts else alts0
//println("trying "+alts1+(alts1 map (_.tpe))+(alts1 map (_.locationString))+" for "+pt)
def improves(sym1: Symbol, sym2: Symbol): Boolean =
@@ -1480,10 +1492,10 @@ trait Infer {
}
}
// todo: missing test case
- NoBestExprAlternativeError(tree, pt)
+ NoBestExprAlternativeError(tree, pt, isSecondTry)
} else if (!competing.isEmpty) {
- if (secondTry) { NoBestExprAlternativeError(tree, pt); setError(tree) }
- else if (!pt.isErroneous) AmbiguousExprAlternativeError(tree, pre, best, competing.head, pt)
+ if (noAlternatives) NoBestExprAlternativeError(tree, pt, isSecondTry)
+ else if (!pt.isErroneous) AmbiguousExprAlternativeError(tree, pre, best, competing.head, pt, isSecondTry)
} else {
// val applicable = alts1 filter (alt =>
// global.typer.infer.isWeaklyCompatible(pre.memberType(alt), pt))
@@ -1562,10 +1574,10 @@ trait Infer {
* assignment expression.
*/
def inferMethodAlternative(tree: Tree, undetparams: List[Symbol],
- argtpes: List[Type], pt0: Type, varArgsOnly: Boolean = false): Unit = tree.tpe match {
+ argtpes: List[Type], pt0: Type, varArgsOnly: Boolean = false, lastInferAttempt: Boolean = true): Unit = tree.tpe match {
case OverloadedType(pre, alts) =>
val pt = if (pt0.typeSymbol == UnitClass) WildcardType else pt0
- tryTwice {
+ tryTwice { isSecondTry =>
debuglog("infer method alt "+ tree.symbol +" with alternatives "+
(alts map pre.memberType) +", argtpes = "+ argtpes +", pt = "+ pt)
@@ -1587,13 +1599,10 @@ trait Infer {
if (improves(alt, best)) alt else best)
val competing = applicable.dropWhile(alt => best == alt || improves(best, alt))
if (best == NoSymbol) {
- if (pt == WildcardType) NoBestMethodAlternativeError(tree, argtpes, pt)
- else inferMethodAlternative(tree, undetparams, argtpes, WildcardType)
+ if (pt == WildcardType) NoBestMethodAlternativeError(tree, argtpes, pt, isSecondTry && lastInferAttempt)
+ else inferMethodAlternative(tree, undetparams, argtpes, WildcardType, lastInferAttempt = isSecondTry)
} else if (!competing.isEmpty) {
- if (!(argtpes exists (_.isErroneous)) && !pt.isErroneous)
- AmbiguousMethodAlternativeError(tree, pre, best, competing.head, argtpes, pt)
- else setError(tree)
- ()
+ AmbiguousMethodAlternativeError(tree, pre, best, competing.head, argtpes, pt, isSecondTry && lastInferAttempt)
} else {
// checkNotShadowed(tree.pos, pre, best, applicable)
tree.setSymbol(best).setType(pre.memberType(best))
@@ -1607,29 +1616,28 @@ trait Infer {
*
* @param infer ...
*/
- def tryTwice(infer: => Unit): Unit = {
+ def tryTwice(infer: Boolean => Unit): Unit = {
if (context.implicitsEnabled) {
val saved = context.state
var fallback = false
context.setBufferErrors()
- val res = try {
- context.withImplicitsDisabled(infer)
+ try {
+ context.withImplicitsDisabled(infer(false))
if (context.hasErrors) {
fallback = true
context.restoreState(saved)
context.flushBuffer()
- infer
+ infer(true)
}
} catch {
case ex: CyclicReference => throw ex
case ex: TypeError => // recoverable cyclic references
context.restoreState(saved)
- if (!fallback) infer else ()
+ if (!fallback) infer(true) else ()
}
context.restoreState(saved)
- res
}
- else infer
+ else infer(true)
}
/** Assign <code>tree</code> the type of all polymorphic alternatives
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index d75d9d5705..4eba665b93 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -632,7 +632,7 @@ trait Namers extends MethodSynthesis {
classAndNamerOfModule(m) = (tree, null)
}
val owner = tree.symbol.owner
- if (settings.lint.value && owner.isPackageObjectClass) {
+ if (settings.lint.value && owner.isPackageObjectClass && !mods.isImplicit) {
context.unit.warning(tree.pos,
"it is not recommended to define classes/objects inside of package objects.\n" +
"If possible, define " + tree.symbol + " in " + owner.skipPackageObject + " instead."
@@ -1302,14 +1302,18 @@ trait Namers extends MethodSynthesis {
if (expr1.symbol != null && expr1.symbol.isRootPackage)
RootImportError(tree)
- val newImport = treeCopy.Import(tree, expr1, selectors).asInstanceOf[Import]
- checkSelectors(newImport)
- transformed(tree) = newImport
- // copy symbol and type attributes back into old expression
- // so that the structure builder will find it.
- expr.symbol = expr1.symbol
- expr.tpe = expr1.tpe
- ImportType(expr1)
+ if (expr1.isErrorTyped)
+ ErrorType
+ else {
+ val newImport = treeCopy.Import(tree, expr1, selectors).asInstanceOf[Import]
+ checkSelectors(newImport)
+ transformed(tree) = newImport
+ // copy symbol and type attributes back into old expression
+ // so that the structure builder will find it.
+ expr.symbol = expr1.symbol
+ expr.tpe = expr1.tpe
+ ImportType(expr1)
+ }
}
val result =
diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
index 88e464a1f4..932e4548ef 100644
--- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
@@ -260,19 +260,18 @@ trait NamesDefaults { self: Analyzer =>
def argValDefs(args: List[Tree], paramTypes: List[Type], blockTyper: Typer): List[ValDef] = {
val context = blockTyper.context
val symPs = map2(args, paramTypes)((arg, tpe) => {
- val byName = isByNameParamType(tpe)
- val (argTpe, repeated) =
- if (isScalaRepeatedParamType(tpe)) arg match {
- case Typed(expr, Ident(tpnme.WILDCARD_STAR)) =>
- (expr.tpe, true)
- case _ =>
- (seqType(arg.tpe), true)
- } else (arg.tpe, false)
- val s = context.owner.newValue(unit.freshTermName("x$"), arg.pos)
- val valType = if (byName) functionType(List(), argTpe)
- else if (repeated) argTpe
- else argTpe
- s.setInfo(valType)
+ val byName = isByNameParamType(tpe)
+ val repeated = isScalaRepeatedParamType(tpe)
+ val argTpe = (
+ if (repeated) arg match {
+ case Typed(expr, Ident(tpnme.WILDCARD_STAR)) => expr.tpe
+ case _ => seqType(arg.tpe)
+ }
+ else arg.tpe
+ ).widen // have to widen or types inferred from literal defaults will be singletons
+ val s = context.owner.newValue(unit.freshTermName("x$"), arg.pos) setInfo (
+ if (byName) functionType(Nil, argTpe) else argTpe
+ )
(context.scope.enter(s), byName, repeated)
})
map2(symPs, args) {
@@ -482,6 +481,10 @@ trait NamesDefaults { self: Analyzer =>
try typer.silent { tpr =>
val res = tpr.typed(arg, subst(paramtpe))
// better warning for SI-5044: if `silent` was not actually silent give a hint to the user
+ // [H]: the reason why `silent` is not silent is because the cyclic reference exception is
+ // thrown in a context completely different from `context` here. The exception happens while
+ // completing the type, and TypeCompleter is created/run with a non-silent Namer `context`
+ // and there is at the moment no way to connect the two unless we go through some global state.
if (errsBefore < reporter.ERROR.count)
WarnAfterNonSilentRecursiveInference(param, arg)(context)
res
diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
index 61e02edaff..4d66fb5617 100644
--- a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
@@ -12,6 +12,8 @@ import Flags.{MUTABLE, METHOD, LABEL, SYNTHETIC}
import language.postfixOps
import scala.tools.nsc.transform.TypingTransformers
import scala.tools.nsc.transform.Transform
+import scala.collection.mutable.HashSet
+import scala.collection.mutable.HashMap
/** Translate pattern matching.
*
@@ -306,7 +308,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// it tests the type, checks the outer pointer and casts to the expected type
// TODO: the outer check is mandated by the spec for case classes, but we do it for user-defined unapplies as well [SPEC]
// (the prefix of the argument passed to the unapply must equal the prefix of the type of the binder)
- val treeMaker = TypeTestTreeMaker(patBinder, extractor.paramType, pos)
+ val treeMaker = TypeTestTreeMaker(patBinder, patBinder, extractor.paramType, extractor.paramType)(pos, extractorArgTypeTest = true)
(List(treeMaker), treeMaker.nextBinder)
} else (Nil, patBinder)
@@ -364,7 +366,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// must treat Typed and Bind together -- we need to know the patBinder of the Bind pattern to get at the actual type
case MaybeBoundTyped(subPatBinder, pt) =>
// a typed pattern never has any subtrees
- noFurtherSubPats(TypeAndEqualityTestTreeMaker(subPatBinder, patBinder, pt, pos))
+ noFurtherSubPats(TypeTestTreeMaker(subPatBinder, patBinder, pt, glb(List(patBinder.info.widen, pt)).normalize)(pos))
/** A pattern binder x@p consists of a pattern variable x and a pattern p.
The type of the variable x is the static type T of the pattern p.
@@ -562,8 +564,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
protected def lengthGuard(binder: Symbol): Option[Tree] =
// no need to check unless it's an unapplySeq and the minimal length is non-trivially satisfied
- if (!isSeq || (expectedLength < minLenToCheck)) None
- else { import CODE._
+ checkedLength map { expectedLength => import CODE._
// `binder.lengthCompare(expectedLength)`
def checkExpectedLength = (seqTree(binder) DOT seqLenCmp)(LIT(expectedLength))
@@ -575,8 +576,14 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
else _ INT_== _
// `if (binder != null && $checkExpectedLength [== | >=] 0) then else zero`
- Some((seqTree(binder) ANY_!= NULL) AND compareOp(checkExpectedLength, ZERO))
+ (seqTree(binder) ANY_!= NULL) AND compareOp(checkExpectedLength, ZERO)
}
+
+ def checkedLength: Option[Int] =
+ // no need to check unless it's an unapplySeq and the minimal length is non-trivially satisfied
+ if (!isSeq || (expectedLength < minLenToCheck)) None
+ else Some(expectedLength)
+
}
// TODO: to be called when there's a def unapplyProd(x: T): U
@@ -650,7 +657,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// the extractor call (applied to the binder bound by the flatMap corresponding to the previous (i.e., enclosing/outer) pattern)
val extractorApply = atPos(pos)(spliceApply(patBinderOrCasted))
val binder = freshSym(pos, pureType(resultInMonad)) // can't simplify this when subPatBinders.isEmpty, since UnitClass.tpe is definitely wrong when isSeq, and resultInMonad should always be correct since it comes directly from the extractor's result type
- ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder, Substitution(subPatBinders, subPatRefs(binder)))(resultType.typeSymbol == BooleanClass)
+ ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder, Substitution(subPatBinders, subPatRefs(binder)))(resultType.typeSymbol == BooleanClass, checkedLength, patBinderOrCasted)
}
override protected def seqTree(binder: Symbol): Tree =
@@ -762,7 +769,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
val (fromFiltered, toFiltered) = (from, to).zipped filter { (f, t) => !other.from.contains(f) }
new Substitution(other.from ++ fromFiltered, other.to.map(apply) ++ toFiltered) // a quick benchmarking run indicates the `.map(apply)` is not too costly
}
- override def toString = (from zip to) mkString("Substitution(", ", ", ")")
+ override def toString = (from.map(_.name) zip to) mkString("Substitution(", ", ", ")")
}
object EmptySubstitution extends Substitution(Nil, Nil) {
@@ -775,7 +782,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// the making of the trees
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
trait TreeMakers extends TypedSubstitution { self: CodegenCore =>
- def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): (List[List[TreeMaker]], List[Tree]) =
+ def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type, unchecked: Boolean): (List[List[TreeMaker]], List[Tree]) =
(cases, Nil)
def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type, matchFailGenOverride: Option[Tree => Tree]): Option[Tree] =
@@ -819,11 +826,13 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
case class BodyTreeMaker(body: Tree, matchPt: Type) extends TreeMaker with NoNewBinders {
def chainBefore(next: Tree)(casegen: Casegen): Tree = // assert(next eq EmptyTree)
atPos(body.pos)(casegen.one(substitution(body))) // since SubstOnly treemakers are dropped, need to do it here
+ override def toString = "B"+(body, matchPt)
}
case class SubstOnlyTreeMaker(prevBinder: Symbol, nextBinder: Symbol) extends TreeMaker {
val localSubstitution = Substitution(prevBinder, CODE.REF(nextBinder))
def chainBefore(next: Tree)(casegen: Casegen): Tree = substitution(next)
+ override def toString = "S"+ localSubstitution
}
abstract class FunTreeMaker extends TreeMaker {
@@ -851,7 +860,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
* the function's body is determined by the next TreeMaker
* in this function's body, and all the subsequent ones, references to the symbols in `from` will be replaced by the corresponding tree in `to`
*/
- case class ExtractorTreeMaker(extractor: Tree, extraCond: Option[Tree], nextBinder: Symbol, localSubstitution: Substitution)(extractorReturnsBoolean: Boolean) extends FunTreeMaker {
+ case class ExtractorTreeMaker(extractor: Tree, extraCond: Option[Tree], nextBinder: Symbol, localSubstitution: Substitution)(extractorReturnsBoolean: Boolean, val checkedLength: Option[Int], val prevBinder: Symbol) extends FunTreeMaker {
def chainBefore(next: Tree)(casegen: Casegen): Tree = {
val condAndNext = extraCond map (casegen.ifThenElseZero(_, next)) getOrElse next
atPos(extractor.pos)(
@@ -860,136 +869,157 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
)
}
- override def toString = "X"+(extractor, nextBinder)
+ override def toString = "X"+(extractor, nextBinder.name)
}
// TODO: allow user-defined unapplyProduct
- case class ProductExtractorTreeMaker(prevBinder: Symbol, extraCond: Option[Tree], localSubstitution: Substitution) extends TreeMaker { import CODE._
+ case class ProductExtractorTreeMaker(prevBinder: Symbol, extraCond: Option[Tree], localSubstitution: Substitution) extends FunTreeMaker { import CODE._
+ val nextBinder = prevBinder // just passing through
def chainBefore(next: Tree)(casegen: Casegen): Tree = {
val nullCheck = REF(prevBinder) OBJ_NE NULL
val cond = extraCond map (nullCheck AND _) getOrElse nullCheck
casegen.ifThenElseZero(cond, substitution(next))
}
- override def toString = "P"+(prevBinder, extraCond getOrElse "", localSubstitution)
+ override def toString = "P"+(prevBinder.name, extraCond getOrElse "", localSubstitution)
}
- // tack an outer test onto `cond` if binder.info and expectedType warrant it
- def maybeWithOuterCheck(binder: Symbol, expectedTp: Type)(cond: Tree): Tree = { import CODE._
- if ( !((expectedTp.prefix eq NoPrefix) || expectedTp.prefix.typeSymbol.isPackageClass)
- && needsOuterTest(expectedTp, binder.info, matchOwner)) {
- val expectedPrefix = expectedTp.prefix match {
- case ThisType(clazz) => THIS(clazz)
- case pre => REF(pre.prefix, pre.termSymbol)
- }
+ // typetag-based tests are inserted by the type checker
+ def needsTypeTest(tp: Type, pt: Type): Boolean = !(tp <:< pt)
+
+ object TypeTestTreeMaker {
+ // factored out so that we can consistently generate other representations of the tree that implements the test
+ // (e.g. propositions for exhaustivity and friends, boolean for isPureTypeTest)
+ trait TypeTestCondStrategy {
+ type Result
+
+ def outerTest(testedBinder: Symbol, expectedTp: Type): Result
+ // TODO: can probably always widen
+ def typeTest(testedBinder: Symbol, expectedTp: Type): Result
+ def nonNullTest(testedBinder: Symbol): Result
+ def equalsTest(pat: Tree, testedBinder: Symbol): Result
+ def eqTest(pat: Tree, testedBinder: Symbol): Result
+ def and(a: Result, b: Result): Result
+ }
+
+ object treeCondStrategy extends TypeTestCondStrategy { import CODE._
+ type Result = Tree
- // ExplicitOuter replaces `Select(q, outerSym) OBJ_EQ expectedPrefix` by `Select(q, outerAccessor(outerSym.owner)) OBJ_EQ expectedPrefix`
- // if there's an outer accessor, otherwise the condition becomes `true` -- TODO: can we improve needsOuterTest so there's always an outerAccessor?
- val outer = expectedTp.typeSymbol.newMethod(vpmName.outer) setInfo expectedTp.prefix setFlag SYNTHETIC
- val outerCheck = (Select(codegen._asInstanceOf(binder, expectedTp), outer)) OBJ_EQ expectedPrefix
-
- // first check cond, since that should ensure we're not selecting outer on null
- codegen.and(cond, outerCheck)
- }
- else
- cond
- }
-
- // containsUnchecked: also need to test when erasing pt loses crucial information (maybe we can recover it using a TypeTag)
- def needsTypeTest(tp: Type, pt: Type): Boolean = !(tp <:< pt) // || containsUnchecked(pt)
- // TODO: try to find the TypeTag for the binder's type and the expected type, and if they exists,
- // check that the TypeTag of the binder's type conforms to the TypeTag of the expected type
- private def typeTest(binderToTest: Symbol, expectedTp: Type, disableOuterCheck: Boolean = false, dynamic: Boolean = false): Tree = { import CODE._
- // def coreTest =
- if (disableOuterCheck) codegen._isInstanceOf(binderToTest, expectedTp) else maybeWithOuterCheck(binderToTest, expectedTp)(codegen._isInstanceOf(binderToTest, expectedTp))
- // [Eugene to Adriaan] use `resolveErasureTag` instead of `findManifest`. please, provide a meaningful position
- // if (opt.experimental && containsUnchecked(expectedTp)) {
- // if (dynamic) {
- // val expectedTpTagTree = findManifest(expectedTp, true)
- // if (!expectedTpTagTree.isEmpty)
- // ((expectedTpTagTree DOT "erasure".toTermName) DOT "isAssignableFrom".toTermName)(REF(binderToTest) DOT nme.getClass_)
- // else
- // coreTest
- // } else {
- // val expectedTpTagTree = findManifest(expectedTp, true)
- // val binderTpTagTree = findManifest(binderToTest.info, true)
- // if(!(expectedTpTagTree.isEmpty || binderTpTagTree.isEmpty))
- // coreTest AND (binderTpTagTree DOT nme.CONFORMS)(expectedTpTagTree)
- // else
- // coreTest
- // }
- // } else coreTest
- }
-
- // need to substitute since binder may be used outside of the next extractor call (say, in the body of the case)
- case class TypeTestTreeMaker(prevBinder: Symbol, nextBinderTp: Type, pos: Position) extends CondTreeMaker {
- val cond = typeTest(prevBinder, nextBinderTp, dynamic = true)
- val res = codegen._asInstanceOf(prevBinder, nextBinderTp)
- override def toString = "TT"+(prevBinder, nextBinderTp)
- }
-
- // implements the run-time aspects of (§8.2) (typedPattern has already done the necessary type transformations)
- // TODO: normalize construction, which yields a combination of a EqualityTestTreeMaker (when necessary) and a TypeTestTreeMaker
- case class TypeAndEqualityTestTreeMaker(prevBinder: Symbol, patBinder: Symbol, pt: Type, pos: Position) extends CondTreeMaker {
- val nextBinderTp = glb(List(patBinder.info.widen, pt)).normalize
-
- /** Type patterns consist of types, type variables, and wildcards. A type pattern T is of one of the following forms:
- - A reference to a class C, p.C, or T#C.
- This type pattern matches any non-null instance of the given class.
- Note that the prefix of the class, if it is given, is relevant for determining class instances.
- For instance, the pattern p.C matches only instances of classes C which were created with the path p as prefix.
- The bottom types scala.Nothing and scala.Null cannot be used as type patterns, because they would match nothing in any case.
-
- - A singleton type p.type.
- This type pattern matches only the value denoted by the path p
- (that is, a pattern match involved a comparison of the matched value with p using method eq in class AnyRef). // TODO: the actual pattern matcher uses ==, so that's what I'm using for now
- // https://issues.scala-lang.org/browse/SI-4577 "pattern matcher, still disappointing us at equality time"
-
- - A compound type pattern T1 with ... with Tn where each Ti is a type pat- tern.
- This type pattern matches all values that are matched by each of the type patterns Ti.
-
- - A parameterized type pattern T[a1,...,an], where the ai are type variable patterns or wildcards _.
- This type pattern matches all values which match T for some arbitrary instantiation of the type variables and wildcards.
- The bounds or alias type of these type variable are determined as described in (§8.3).
-
- - A parameterized type pattern scala.Array[T1], where T1 is a type pattern. // TODO
- This type pattern matches any non-null instance of type scala.Array[U1], where U1 is a type matched by T1.
- **/
-
- // generate the tree for the run-time test that follows from the fact that
- // a `scrut` of known type `scrutTp` is expected to have type `expectedTp`
- // uses maybeWithOuterCheck to check the type's prefix
- private def typeAndEqualityTest(patBinder: Symbol, pt: Type): Tree = { import CODE._
- // TODO: `null match { x : T }` will yield a check that (indirectly) tests whether `null ne null`
- // don't bother (so that we don't end up with the warning "comparing values of types Null and Null using `ne' will always yield false")
- def genEqualsAndInstanceOf(sym: Symbol): Tree
- = codegen._equals(REF(sym), patBinder) AND typeTest(patBinder, pt.widen, disableOuterCheck = true)
-
- def isRefTp(tp: Type) = tp <:< AnyRefClass.tpe
-
- val patBinderTp = patBinder.info.widen
- def isMatchUnlessNull = isRefTp(pt) && !needsTypeTest(patBinderTp, pt)
-
- // TODO: [SPEC] type test for Array
- // TODO: use TypeTags to improve tests (for erased types we can do better when we have a TypeTag)
- pt match {
- case SingleType(_, sym) /*this implies sym.isStable*/ => genEqualsAndInstanceOf(sym) // TODO: [SPEC] the spec requires `eq` instead of `==` here
- case ThisType(sym) if sym.isModule => genEqualsAndInstanceOf(sym) // must use == to support e.g. List() == Nil
- case ThisType(sym) => REF(patBinder) OBJ_EQ This(sym)
- case ConstantType(Constant(null)) if isRefTp(patBinderTp) => REF(patBinder) OBJ_EQ NULL
- case ConstantType(const) => codegen._equals(Literal(const), patBinder)
- case _ if isMatchUnlessNull => maybeWithOuterCheck(patBinder, pt)(REF(patBinder) OBJ_NE NULL)
- case _ => typeTest(patBinder, pt)
+ def and(a: Result, b: Result): Result = a AND b
+ def typeTest(testedBinder: Symbol, expectedTp: Type) = codegen._isInstanceOf(testedBinder, expectedTp)
+ def nonNullTest(testedBinder: Symbol) = REF(testedBinder) OBJ_NE NULL
+ def equalsTest(pat: Tree, testedBinder: Symbol) = codegen._equals(pat, testedBinder)
+ def eqTest(pat: Tree, testedBinder: Symbol) = REF(testedBinder) OBJ_EQ pat
+
+ def outerTest(testedBinder: Symbol, expectedTp: Type): Tree = {
+ val expectedOuter = expectedTp.prefix match {
+ case ThisType(clazz) => THIS(clazz)
+ case pre => REF(pre.prefix, pre.termSymbol)
}
+
+ // ExplicitOuter replaces `Select(q, outerSym) OBJ_EQ expectedPrefix` by `Select(q, outerAccessor(outerSym.owner)) OBJ_EQ expectedPrefix`
+ // if there's an outer accessor, otherwise the condition becomes `true` -- TODO: can we improve needsOuterTest so there's always an outerAccessor?
+ val outer = expectedTp.typeSymbol.newMethod(vpmName.outer) setInfo expectedTp.prefix setFlag SYNTHETIC
+
+ (Select(codegen._asInstanceOf(testedBinder, expectedTp), outer)) OBJ_EQ expectedOuter
+ }
}
- val cond = typeAndEqualityTest(patBinder, pt)
- val res = codegen._asInstanceOf(patBinder, nextBinderTp)
+ object pureTypeTestChecker extends TypeTestCondStrategy {
+ type Result = Boolean
- // TODO: remove this
- def isStraightTypeTest = cond match { case TypeApply(_, _) => cond.symbol == Any_isInstanceOf case _ => false }
+ def typeTest(testedBinder: Symbol, expectedTp: Type): Result = true
- override def toString = "TET"+(patBinder, pt)
+ def outerTest(testedBinder: Symbol, expectedTp: Type): Result = false
+ def nonNullTest(testedBinder: Symbol): Result = false
+ def equalsTest(pat: Tree, testedBinder: Symbol): Result = false
+ def eqTest(pat: Tree, testedBinder: Symbol): Result = false
+ def and(a: Result, b: Result): Result = false // we don't and type tests, so the conjunction must include at least one false
+ }
+ }
+
+ /** implements the run-time aspects of (§8.2) (typedPattern has already done the necessary type transformations)
+ *
+ * Type patterns consist of types, type variables, and wildcards. A type pattern T is of one of the following forms:
+ - A reference to a class C, p.C, or T#C.
+ This type pattern matches any non-null instance of the given class.
+ Note that the prefix of the class, if it is given, is relevant for determining class instances.
+ For instance, the pattern p.C matches only instances of classes C which were created with the path p as prefix.
+ The bottom types scala.Nothing and scala.Null cannot be used as type patterns, because they would match nothing in any case.
+
+ - A singleton type p.type.
+ This type pattern matches only the value denoted by the path p
+ (that is, a pattern match involved a comparison of the matched value with p using method eq in class AnyRef). // TODO: the actual pattern matcher uses ==, so that's what I'm using for now
+ // https://issues.scala-lang.org/browse/SI-4577 "pattern matcher, still disappointing us at equality time"
+
+ - A compound type pattern T1 with ... with Tn where each Ti is a type pat- tern.
+ This type pattern matches all values that are matched by each of the type patterns Ti.
+
+ - A parameterized type pattern T[a1,...,an], where the ai are type variable patterns or wildcards _.
+ This type pattern matches all values which match T for some arbitrary instantiation of the type variables and wildcards.
+ The bounds or alias type of these type variable are determined as described in (§8.3).
+
+ - A parameterized type pattern scala.Array[T1], where T1 is a type pattern. // TODO
+ This type pattern matches any non-null instance of type scala.Array[U1], where U1 is a type matched by T1.
+ **/
+ case class TypeTestTreeMaker(prevBinder: Symbol, testedBinder: Symbol, expectedTp: Type, nextBinderTp: Type)(_pos: Position, extractorArgTypeTest: Boolean = false) extends CondTreeMaker {
+ val pos = _pos
+
+ import TypeTestTreeMaker._
+ // println("TTTM"+(prevBinder, extractorArgTypeTest, testedBinder, expectedTp, nextBinderTp))
+
+ lazy val outerTestNeeded = (
+ !((expectedTp.prefix eq NoPrefix) || expectedTp.prefix.typeSymbol.isPackageClass)
+ && needsOuterTest(expectedTp, testedBinder.info, matchOwner))
+
+ // the logic to generate the run-time test that follows from the fact that
+ // a `prevBinder` is expected to have type `expectedTp`
+ // the actual tree-generation logic is factored out, since the analyses generate Cond(ition)s rather than Trees
+ // TODO: `null match { x : T }` will yield a check that (indirectly) tests whether `null ne null`
+ // don't bother (so that we don't end up with the warning "comparing values of types Null and Null using `ne' will always yield false")
+ def renderCondition(cs: TypeTestCondStrategy): cs.Result = {
+ import cs._
+
+ def default =
+ // do type test first to ensure we won't select outer on null
+ if (outerTestNeeded) and(typeTest(testedBinder, expectedTp), outerTest(testedBinder, expectedTp))
+ else typeTest(testedBinder, expectedTp)
+
+ // true when called to type-test the argument to an extractor
+ // don't do any fancy equality checking, just test the type
+ if (extractorArgTypeTest) default
+ else expectedTp match {
+ // TODO: [SPEC] the spec requires `eq` instead of `==` for singleton types
+ // this implies sym.isStable
+ case SingleType(_, sym) => and(equalsTest(CODE.REF(sym), testedBinder), typeTest(testedBinder, expectedTp.widen))
+ // must use == to support e.g. List() == Nil
+ case ThisType(sym) if sym.isModule => and(equalsTest(CODE.REF(sym), testedBinder), typeTest(testedBinder, expectedTp.widen))
+ case ConstantType(const) => equalsTest(Literal(const), testedBinder)
+
+ case ThisType(sym) => eqTest(This(sym), testedBinder)
+ case ConstantType(Constant(null)) if testedBinder.info.widen <:< AnyRefClass.tpe
+ => eqTest(CODE.NULL, testedBinder)
+
+ // TODO: verify that we don't need to special-case Array
+ // I think it's okay:
+ // - the isInstanceOf test includes a test for the element type
+ // - Scala's arrays are invariant (so we don't drop type tests unsoundly)
+ case _ if (expectedTp <:< AnyRefClass.tpe) && !needsTypeTest(testedBinder.info.widen, expectedTp) =>
+ // do non-null check first to ensure we won't select outer on null
+ if (outerTestNeeded) and(nonNullTest(testedBinder), outerTest(testedBinder, expectedTp))
+ else nonNullTest(testedBinder)
+
+ case _ => default
+ }
+ }
+
+ val cond = renderCondition(treeCondStrategy)
+ val res = codegen._asInstanceOf(testedBinder, nextBinderTp)
+
+ // is this purely a type test, e.g. no outer check, no equality tests (used in switch emission)
+ def isPureTypeTest = renderCondition(pureTypeTestChecker)
+
+ override def toString = "TT"+(expectedTp, testedBinder.name, nextBinderTp)
}
// need to substitute to deal with existential types -- TODO: deal with existentials better, don't substitute (see RichClass during quick.comp)
@@ -1000,7 +1030,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// equals need not be well-behaved, so don't intersect with pattern's (stabilized) type (unlike MaybeBoundTyped's accumType, where it's required)
val cond = codegen._equals(patTree, prevBinder)
val res = CODE.REF(prevBinder)
- override def toString = "ET"+(prevBinder, patTree)
+ override def toString = "ET"+(prevBinder.name, patTree)
}
case class AlternativesTreeMaker(prevBinder: Symbol, var altss: List[List[TreeMaker]], pos: Position) extends TreeMaker with NoNewBinders {
@@ -1062,7 +1092,21 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def matchFailGen = (matchFailGenOverride orElse Some(CODE.MATCHERROR(_: Tree)))
// println("combining cases: "+ (casesNoSubstOnly.map(_.mkString(" >> ")).mkString("{", "\n", "}")))
+ def isSwitchAnnotation(tpe: Type) = tpe hasAnnotation SwitchClass
+ def isUncheckedAnnotation(tpe: Type) = tpe hasAnnotation UncheckedClass
+
+ val (unchecked, requireSwitch) = scrut match {
+ case Typed(_, tpt) =>
+ (isUncheckedAnnotation(tpt.tpe),
+ // matches with two or fewer cases need not apply for switchiness (if-then-else will do)
+ isSwitchAnnotation(tpt.tpe) && casesNoSubstOnly.lengthCompare(2) > 0)
+ case _ =>
+ (false, false)
+ }
+
emitSwitch(scrut, scrutSym, casesNoSubstOnly, pt, matchFailGenOverride).getOrElse{
+ if (requireSwitch) typer.context.unit.warning(scrut.pos, "could not emit switch for @switch annotated match")
+
if (casesNoSubstOnly nonEmpty) {
// before optimizing, check casesNoSubstOnly for presence of a default case,
// since DCE will eliminate trivial cases like `case _ =>`, even if they're the last one
@@ -1077,7 +1121,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}) None
else matchFailGen
- val (cases, toHoist) = optimizeCases(scrutSym, casesNoSubstOnly, pt)
+ val (cases, toHoist) = optimizeCases(scrutSym, casesNoSubstOnly, pt, unchecked)
val matchRes = codegen.matcher(scrut, scrutSym, pt)(cases map combineExtractors, synthCatchAll)
@@ -1262,7 +1306,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// decisions, decisions
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
- trait TreeMakerApproximation extends TreeMakers { self: CodegenCore =>
+ trait TreeMakerApproximation extends TreeMakers with Prettification{ self: CodegenCore =>
object Test {
var currId = 0
}
@@ -1281,9 +1325,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
val id = { Test.currId += 1; Test.currId}
override def toString =
- if (cond eq Top) "T"
- else if(cond eq Havoc) "!?"
- else "T"+ id + (if(reusedBy nonEmpty) "!["+ treeMaker +"]" else (if(reuses.isEmpty) "["+ treeMaker +"]" else " cf. T"+reuses.get.id))
+ "T"+ id + "C("+ cond +")" //+ (reuses map ("== T"+_.id) getOrElse (if(reusedBy.isEmpty) treeMaker else reusedBy mkString (treeMaker+ " -->(", ", ",")")))
}
object Cond {
@@ -1304,10 +1346,11 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
// does not contribute any knowledge
- case object Top extends Cond
+ case object Top extends Cond {override def toString = "T"}
+
// takes away knowledge. e.g., a user-defined guard
- case object Havoc extends Cond
+ case object Havoc extends Cond {override def toString = "_|_"}
// we know everything! everything!
// this either means the case is unreachable,
@@ -1315,11 +1358,15 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// case object Bottom extends Cond
+ case class AndCond(a: Cond, b: Cond) extends Cond {override def toString = a +"/\\"+ b}
+ case class OrCond(a: Cond, b: Cond) extends Cond {override def toString = "("+a+") \\/ ("+ b +")"}
+
object EqualityCond {
private val uniques = new collection.mutable.HashMap[(Tree, Tree), EqualityCond]
def apply(testedPath: Tree, rhs: Tree): EqualityCond = uniques getOrElseUpdate((testedPath, rhs), new EqualityCond(testedPath, rhs))
+ def unapply(c: EqualityCond) = Some(c.testedPath, c.rhs)
}
- class EqualityCond(testedPath: Tree, rhs: Tree) extends Cond {
+ class EqualityCond(val testedPath: Tree, val rhs: Tree) extends Cond {
// def negation = TopCond // inequality doesn't teach us anything
// do simplification when we know enough about the tree statically:
// - collapse equal trees
@@ -1329,103 +1376,873 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
override def toString = testedPath +" == "+ rhs +"#"+ id
}
- object TypeCond {
- private val uniques = new collection.mutable.HashMap[(Tree, Type), TypeCond]
- def apply(testedPath: Tree, pt: Type): TypeCond = uniques getOrElseUpdate((testedPath, pt), new TypeCond(testedPath, pt))
+ object NonNullCond {
+ private val uniques = new collection.mutable.HashMap[Tree, NonNullCond]
+ def apply(testedPath: Tree): NonNullCond = uniques getOrElseUpdate(testedPath, new NonNullCond(testedPath))
+ def unapply(c: NonNullCond) = Some(c.testedPath)
}
- class TypeCond(testedPath: Tree, pt: Type) extends Cond {
- // def negation = TopCond // inequality doesn't teach us anything
- // do simplification when we know enough about the tree statically:
- // - collapse equal trees
- // - accumulate tests when (in)equality not known statically
- // - become bottom when we statically know this can never match
- override def toString = testedPath +" <: "+ pt +"#"+ id
+ class NonNullCond(val testedPath: Tree) extends Cond {
+ override def toString = testedPath +" ne null " +"#"+ id
}
- object TypeAndEqualityCond {
- private val uniques = new collection.mutable.HashMap[(Tree, Type), TypeAndEqualityCond]
- def apply(testedPath: Tree, pt: Type): TypeAndEqualityCond = uniques getOrElseUpdate((testedPath, pt), new TypeAndEqualityCond(testedPath, pt))
+ object TypeCond {
+ private val uniques = new collection.mutable.HashMap[(Tree, Type), TypeCond]
+ def apply(testedPath: Tree, pt: Type): TypeCond = uniques getOrElseUpdate((testedPath, pt), new TypeCond(testedPath, pt))
+ def unapply(c: TypeCond) = Some(c.testedPath, c.pt)
}
- class TypeAndEqualityCond(testedPath: Tree, pt: Type) extends Cond {
+ class TypeCond(val testedPath: Tree, val pt: Type) extends Cond {
// def negation = TopCond // inequality doesn't teach us anything
// do simplification when we know enough about the tree statically:
// - collapse equal trees
// - accumulate tests when (in)equality not known statically
// - become bottom when we statically know this can never match
- override def toString = testedPath +" (<: && ==) "+ pt +"#"+ id
+ override def toString = testedPath +" : "+ pt +"#"+ id
}
- def approximateMatch(root: Symbol, cases: List[List[TreeMaker]]): List[List[Test]] = {
+// class OuterEqCond(val testedPath: Tree, val expectedType: Type) extends Cond {
+// val expectedOuter = expectedTp.prefix match {
+// case ThisType(clazz) => THIS(clazz)
+// case pre => REF(pre.prefix, pre.termSymbol)
+// }
+//
+// // ExplicitOuter replaces `Select(q, outerSym) OBJ_EQ expectedPrefix` by `Select(q, outerAccessor(outerSym.owner)) OBJ_EQ expectedPrefix`
+// // if there's an outer accessor, otherwise the condition becomes `true` -- TODO: can we improve needsOuterTest so there's always an outerAccessor?
+// val outer = expectedTp.typeSymbol.newMethod(vpmName.outer) setInfo expectedTp.prefix setFlag SYNTHETIC
+//
+// (Select(codegen._asInstanceOf(testedBinder, expectedTp), outer)) OBJ_EQ expectedOuter
+// }
+
+
+ // returns (tree, tests), where `tree` will be used to refer to `root` in `tests`
+ abstract class TreeMakersToConds(val root: Symbol, val cases: List[List[TreeMaker]]) {
// a variable in this set should never be replaced by a tree that "does not consist of a selection on a variable in this set" (intuitively)
- val pointsToBound = collection.mutable.HashSet(root)
+ private val pointsToBound = collection.mutable.HashSet(root)
// the substitution that renames variables to variables in pointsToBound
- var normalize: Substitution = EmptySubstitution
+ private var normalize: Substitution = EmptySubstitution
// replaces a variable (in pointsToBound) by a selection on another variable in pointsToBound
// TODO check:
// pointsToBound -- accumSubst.from == Set(root) && (accumSubst.from.toSet -- pointsToBound) isEmpty
- var accumSubst: Substitution = EmptySubstitution
+ private var accumSubst: Substitution = EmptySubstitution
+
+ private val trees = new collection.mutable.HashSet[Tree]
- val trees = new collection.mutable.HashSet[Tree]
+ // TODO: improve, e.g., for constants
+ def sameValue(a: Tree, b: Tree): Boolean = (a eq b) || ((a, b) match {
+ case (_ : Ident, _ : Ident) => a.symbol eq b.symbol
+ case _ => false
+ })
+
+ // hashconsing trees (modulo value-equality)
+ def unique(t: Tree, tpOverride: Type = NoType): Tree =
+ trees find (a => a.equalsStructure0(t)(sameValue)) match {
+ case Some(orig) => orig // println("unique: "+ (t eq orig, orig));
+ case _ =>
+ trees += t
+ if (tpOverride != NoType) t setType tpOverride
+ else t
+ }
- def approximateTreeMaker(tm: TreeMaker): Test = {
- val subst = tm.substitution
+ def uniqueTp(tp: Type): Type = tp match {
+ // typerefs etc are already hashconsed
+ case _ : UniqueType => tp
+ case tp@RefinedType(parents, EmptyScope) => tp.memo(tp: Type)(identity) // TODO: does this help?
+ case _ => tp
+ }
+ // produce the unique tree used to refer to this binder
+ // the type of the binder passed to the first invocation
+ // determines the type of the tree that'll be returned for that binder as of then
+ def binderToUniqueTree(b: Symbol) =
+ unique(accumSubst(normalize(CODE.REF(b))), b.tpe)
+
+ // note that the sequencing of operations is important: must visit in same order as match execution
+ // binderToUniqueTree uses the type of the first symbol that was encountered as the type for all future binders
+ def treeMakerToCond(tm: TreeMaker): Cond = {
+ updateSubstitution(tm.substitution)
+
+ tm match {
+ case ttm@TypeTestTreeMaker(prevBinder, testedBinder, pt, _) =>
+ object condStrategy extends TypeTestTreeMaker.TypeTestCondStrategy {
+ type Result = Cond
+ def and(a: Result, b: Result) = AndCond(a, b)
+ def outerTest(testedBinder: Symbol, expectedTp: Type) = Top // TODO OuterEqCond(testedBinder, expectedType)
+ def typeTest(b: Symbol, pt: Type) = TypeCond(binderToUniqueTree(b), uniqueTp(pt))
+ def nonNullTest(testedBinder: Symbol) = NonNullCond(binderToUniqueTree(testedBinder))
+ def equalsTest(pat: Tree, testedBinder: Symbol) = EqualityCond(binderToUniqueTree(testedBinder), unique(pat))
+ def eqTest(pat: Tree, testedBinder: Symbol) = EqualityCond(binderToUniqueTree(testedBinder), unique(pat)) // TODO: eq, not ==
+ }
+ ttm.renderCondition(condStrategy)
+ case EqualityTestTreeMaker(prevBinder, patTree, _) => EqualityCond(binderToUniqueTree(prevBinder), unique(patTree))
+ case AlternativesTreeMaker(_, altss, _) => altss map (_ map treeMakerToCond reduceLeft AndCond) reduceLeft OrCond
+ case ProductExtractorTreeMaker(testedBinder, None, subst) => NonNullCond(binderToUniqueTree(testedBinder))
+ case ExtractorTreeMaker(_, _, _, _)
+ | GuardTreeMaker(_)
+ | ProductExtractorTreeMaker(_, Some(_), _)
+ | BodyTreeMaker(_, _) => Havoc
+ case SubstOnlyTreeMaker(_, _) => Top
+ }
+ }
+
+ private def updateSubstitution(subst: Substitution) = {
// find part of substitution that replaces bound symbols by new symbols, and reverse that part
// so that we don't introduce new aliases for existing symbols, thus keeping the set of bound symbols minimal
val (boundSubst, unboundSubst) = (subst.from zip subst.to) partition {case (f, t) =>
t.isInstanceOf[Ident] && (t.symbol ne NoSymbol) && pointsToBound(f)
}
val (boundFrom, boundTo) = boundSubst.unzip
+ val (unboundFrom, unboundTo) = unboundSubst.unzip
+
+ // reverse substitution that would otherwise replace a variable we already encountered by a new variable
+ // NOTE: this forgets the more precise we have for these later variables, but that's probably okay
normalize >>= Substitution(boundTo map (_.symbol), boundFrom map (CODE.REF(_)))
// println("normalize: "+ normalize)
- val (unboundFrom, unboundTo) = unboundSubst unzip
val okSubst = Substitution(unboundFrom, unboundTo map (normalize(_))) // it's important substitution does not duplicate trees here -- it helps to keep hash consing simple, anyway
pointsToBound ++= ((okSubst.from, okSubst.to).zipped filter { (f, t) => pointsToBound exists (sym => t.exists(_.symbol == sym)) })._1
// println("pointsToBound: "+ pointsToBound)
accumSubst >>= okSubst
// println("accumSubst: "+ accumSubst)
+ }
+
+ def approximateTreeMaker(tm: TreeMaker): Test =
+ Test(treeMakerToCond(tm), tm)
+
+ def approximateMatch: List[List[Test]] = cases.map { _ map approximateTreeMaker }
+ }
+
+ def approximateMatch(root: Symbol, cases: List[List[TreeMaker]]): List[List[Test]] = {
+ object approximator extends TreeMakersToConds(root, cases)
+ approximator.approximateMatch
+ }
+
+ def showTreeMakers(cases: List[List[TreeMaker]]) = {
+ println("treeMakers:")
+ println(alignAcrossRows(cases, ">>"))
+ }
- // TODO: improve, e.g., for constants
- def sameValue(a: Tree, b: Tree): Boolean = (a eq b) || ((a, b) match {
- case (_ : Ident, _ : Ident) => a.symbol eq b.symbol
- case _ => false
- })
-
- // hashconsing trees (modulo value-equality)
- def unique(t: Tree): Tree =
- trees find (a => a.equalsStructure0(t)(sameValue)) match {
- case Some(orig) => orig // println("unique: "+ (t eq orig, orig));
- case _ => trees += t; t
+ def showTests(testss: List[List[Test]]) = {
+ println("tests: ")
+ println(alignAcrossRows(testss, "&"))
+ }
+ }
+
+ trait Prettification {
+ private def max(xs: Seq[Int]) = if (xs isEmpty) 0 else xs max
+
+ def alignedColumns(cols: Seq[AnyRef]): Seq[String] = {
+ def toString(x: AnyRef) = if (x eq null) "" else x.toString
+ if (cols.isEmpty || cols.tails.isEmpty) cols map toString
+ else {
+ val (colStrs, colLens) = cols map {c => val s = toString(c); (s, s.length)} unzip
+ val maxLen = max(colLens)
+ val avgLen = colLens.sum/colLens.length
+ val goalLen = maxLen min avgLen*2
+ def pad(s: String) = {
+ val toAdd = ((goalLen - s.length) max 0) + 2
+ (" " * (toAdd/2)) + s + (" " * (toAdd/2 + (toAdd%2)))
+ }
+ cols map (x => pad(toString(x)))
+ }
+ }
+ def alignAcrossRows(xss: List[List[AnyRef]], sep: String, lineSep: String = "\n"): String = {
+ val maxLen = max(xss map (_.length))
+ val padded = xss map (xs => xs ++ List.fill(maxLen - xs.length)(null))
+ padded.transpose.map(alignedColumns).transpose map (_.mkString(sep)) mkString(lineSep)
+ }
+ }
+
+ // http://www.cis.upenn.edu/~cis510/tcl/chap3.pdf
+ // http://users.encs.concordia.ca/~ta_ahmed/ms_thesis.pdf
+ trait Logic extends Prettification {
+ class Prop
+ case class Eq(p: Var, q: Const) extends Prop
+
+ type Const
+ type Var <: AbsVar
+
+ trait AbsVar {
+ // if the domain is enumerable, at least one assignment must be true
+ def domainEnumerable: Boolean
+ def domain: Option[Set[Const]]
+
+ // for this var, call it V, turn V = C into the equivalent proposition in boolean logic
+ def propForEqualsTo(c: Const): Prop
+
+ def equalitySyms: Set[Sym]
+ }
+
+ // would be nice to statically check whether a prop is equational or pure,
+ // but that requires typing relations like And(x: Tx, y: Ty) : (if(Tx == PureProp && Ty == PureProp) PureProp else Prop)
+ case class And(a: Prop, b: Prop) extends Prop
+ case class Or(a: Prop, b: Prop) extends Prop
+ case class Not(a: Prop) extends Prop
+
+ case object True extends Prop
+ case object False extends Prop
+
+ private def nextSymId = {_symId += 1; _symId}; private var _symId = 0
+
+ // symbols are propositions
+ case class Sym(val variable: Var, val const: Const) extends Prop {
+ override val toString = variable +"="+ const +"#"+ nextSymId
+ }
+
+ trait PropTraverser {
+ def apply(x: Prop): Unit = x match {
+ case And(a, b) => apply(a); apply(b)
+ case Or(a, b) => apply(a); apply(b)
+ case Not(a) => apply(a)
+ case Eq(a, b) => applyVar(a); applyConst(b)
+ case _ =>
+ }
+ def applyVar(x: Var): Unit = {}
+ def applyConst(x: Const): Unit = {}
+ }
+
+ trait PropMap {
+ def apply(x: Prop): Prop = x match { // TODO: mapConserve
+ case And(a, b) => And(apply(a), apply(b))
+ case Or(a, b) => Or(apply(a), apply(b))
+ case Not(a) => Not(apply(a))
+ case p => p
+ }
+ }
+
+ // plan: (aka TODO)
+
+ // convert finite domain propositional logic to boolean propositional logic
+ // for all c in C, there is a corresponding (meta-indexed) proposition Qv(c) that represents V = c,
+ // the only property of equality that is encoded is that a variable can at most be equal to one of the c in C:
+ // thus, for each distinct c, c', c'',... in C, a clause `not (Qv(c) /\ (Qv(c') \/ ... \/ Qv(c'')))` is added
+ def removeVarEq(prop: Prop): Prop = {
+ val vars = new collection.mutable.HashSet[Var]
+
+ object dropEquational extends PropMap {
+ override def apply(p: Prop) = p match {
+ case Eq(v, c) => vars += v; v.propForEqualsTo(c)
+ case _ => super.apply(p)
+ }
+ }
+
+ // dropEquational populates vars, and for each var in vars. var.equalitySyms
+ val pure = dropEquational(prop)
+
+ // X = C is translated to P_X=C
+ // X = C' is translated to P_X=C'
+ // need to enforce X cannot simultaneously equal C and C'
+ // thus, all equality syms are mutually exclusive
+ // X = A, B, C, D --> Not(And(A, B)) /\ Not(And(A, C)) /\ Not(And(A, D))
+ // /\ Not(And(B, C)) /\ Not(And(B, D))
+ // /\ Not(And(C, D))
+ // equivalently Or(Not(A), Not(B)) /\ Or(...)
+
+ var eqAxioms: Prop = True
+ def mutex(a: Sym)(b: Sym) =
+ eqAxioms = And(eqAxioms, Or(Not(a), Not(b)))
+
+ // at least one assignment from the domain must be true
+ def assigned(dom: Set[Sym]) =
+ eqAxioms = And(eqAxioms, dom.reduceLeft(Or))
+
+ // println("vars: "+ vars)
+ vars.foreach { v =>
+ // is the domain enumerable? then create equality syms for all elements in the domain and
+ // assert at least one of them must be selected
+ // if v.domain.isEmpty, we must consider the domain to be infinite
+ v.domain foreach { dom =>
+ // get the Syms for the constants in the domain (making fresh ones for those not encountered in the formula)
+ val domProps = dom map {c => v.propForEqualsTo(c)}
+ val domSyms = new collection.mutable.HashSet[Sym]()
+ object collectSyms extends PropTraverser {
+ override def apply(p: Prop) = p match {
+ case domSym: Sym => domSyms += domSym
+ case _ => super.apply(p)
+ }
}
+ domProps foreach collectSyms.apply
- def uniqueTp(tp: Type): Type = tp match {
- // typerefs etc are already hashconsed
- case _ : UniqueType => tp
- case tp@RefinedType(parents, EmptyScope) => tp.memo(tp: Type)(identity) // TODO: does this help?
- case _ => tp
+ // TODO: an empty domain means involved type tests can never be true --> always non-exhaustive?
+ if (domSyms.nonEmpty) assigned(domSyms.toSet)
}
- def binderToUniqueTree(b: Symbol) = unique(accumSubst(normalize(CODE.REF(b))))
+ // recover mutual-exclusivity (a variable can never be assigned two different constants)
+ var syms = v.equalitySyms.toList
+ while (syms.nonEmpty) {
+ syms.tail.foreach(mutex(syms.head))
+ syms = syms.tail
+ }
+ }
- Test(tm match {
- case ProductExtractorTreeMaker(pb, None, subst) => Top // TODO: NotNullTest(prevBinder)
- case tm@TypeTestTreeMaker(prevBinder, nextBinderTp, _) => TypeCond(binderToUniqueTree(prevBinder), uniqueTp(nextBinderTp))
- case tm@TypeAndEqualityTestTreeMaker(_, patBinder, pt, _) => TypeAndEqualityCond(binderToUniqueTree(patBinder), uniqueTp(pt))
- case tm@EqualityTestTreeMaker(prevBinder, patTree, _) => EqualityCond(binderToUniqueTree(prevBinder), unique(patTree))
- case ExtractorTreeMaker(_, _, _, _)
- | GuardTreeMaker(_)
- | ProductExtractorTreeMaker(_, Some(_), _) => Havoc
- case AlternativesTreeMaker(_, _, _) => Havoc // TODO: can do better here
- case SubstOnlyTreeMaker(_, _) => Top
- case BodyTreeMaker(_, _) => Havoc
- }, tm)
+ // println("eqAxioms:\n"+ cnfString(conjunctiveNormalForm(negationNormalForm(eqAxioms))))
+ // println("pure:\n"+ cnfString(conjunctiveNormalForm(negationNormalForm(pure))))
+
+ And(eqAxioms, pure)
+ }
+
+ // convert propositional logic formula to CNF
+ // http://www.dcs.warwick.ac.uk/people/academic/Ranko.Lazic/fsv/fsv6.pdf
+ def negationNormalForm(p: Prop): Prop = p match {
+ case And(a, b) => And(negationNormalForm(a), negationNormalForm(b))
+ case Or(a, b) => Or(negationNormalForm(a), negationNormalForm(b))
+ case Not(And(a, b)) => negationNormalForm(Or(Not(a), Not(b)))
+ case Not(Or(a, b)) => negationNormalForm(And(Not(a), Not(b)))
+ case Not(Not(p)) => negationNormalForm(p)
+ case Not(True) => False
+ case Not(False) => True
+ case True
+ | False
+ | (_ : Sym)
+ | Not(_ : Sym) => p
+ }
+
+ // CNF: a formula is a conjunction of clauses
+ type Formula = List[Clause] ; def formula(c: Clause*): Formula = c.toList
+
+ // a clause is a disjunction of distinct literals
+ type Clause = Set[Lit] ; def clause(l: Lit*): Clause = l.toSet
+
+ // a literal is a (possibly negated) variable
+ case class Lit(sym: Sym, pos: Boolean = true) {
+ override def toString = if (!pos) "-"+ sym.toString else sym.toString
+ def unary_- = Lit(sym, !pos)
+ }
+
+ val TrueF = formula()
+ val FalseF = formula(clause())
+ def lit(s: Sym) = formula(clause(Lit(s)))
+ def negLit(s: Sym) = formula(clause(Lit(s, false)))
+
+ def conjunctiveNormalForm(p: Prop): Formula = {
+ def distribute(a: Formula, b: Formula): Formula = (a, b) match {
+ // true \/ _ = true
+ case (TrueF, _) => TrueF
+ // _ \/ true = true
+ case (_, TrueF) => TrueF
+ // lit \/ lit
+ case (List(a), List(b)) => formula(a ++ b)
+ // (c1 /\ ... /\ cn) \/ d = ((c1 \/ d) /\ ... /\ (cn \/ d))
+ case (cs, d) if cs.tail nonEmpty => cs flatMap (c => distribute(formula(c), d))
+ // d \/ (c1 /\ ... /\ cn) = ((d \/ c1) /\ ... /\ (d \/ cn))
+ case (d, cs) if cs.tail nonEmpty => cs flatMap (c => distribute(d, formula(c)))
}
- cases.map { _ map approximateTreeMaker }
+ p match {
+ case True => TrueF
+ case False => FalseF
+ case s: Sym => lit(s)
+ case Not(s: Sym) => negLit(s)
+ case And(a, b) => conjunctiveNormalForm(a) ++ conjunctiveNormalForm(b)
+ case Or(a, b) => distribute(conjunctiveNormalForm(a), conjunctiveNormalForm(b))
+ }
+ }
+
+ def normalize(p: Prop) = conjunctiveNormalForm(negationNormalForm(removeVarEq(p)))
+ def cnfString(f: Formula) = alignAcrossRows(f map (_.toList) toList, "\\/", " /\\\n")
+
+ // adapted from http://lara.epfl.ch/w/sav10:simple_sat_solver (original by Hossein Hojjat)
+ type Model = Map[Sym, Boolean]
+ val EmptyModel = Map.empty[Sym, Boolean]
+
+ // returns all solutions, if any (TODO: better infinite recursion backstop -- detect fixpoint??)
+ def fullDPLL(f: Formula): List[Model] = {
+ // the negation of a model -(S1=True/False /\ ... /\ SN=True/False) = clause(S1=False/True, ...., SN=False/True)
+ def negateModel(m: Model) = clause(m.toSeq.map{ case (sym, pos) => Lit(sym, !pos) } : _*)
+
+ def findAllModels(f: Formula, models: List[Model], recursionDepthAllowed: Int = 20): List[Model]=
+ if (recursionDepthAllowed == 0) models
+ else {
+ val (ok, model) = DPLL(f)
+ // if we found a solution, conjunct the formula with the model's negation and recurse
+ if (ok) findAllModels(f :+ negateModel(model), model :: models, recursionDepthAllowed - 1)
+ else models
+ }
+
+ findAllModels(f, Nil)
+ }
+
+ def DPLL(f: Formula): (Boolean, Model) = {
+ @inline def withLit(res: (Boolean, Model), l: Lit) = (res._1, res._2 + (l.sym -> l.pos))
+ @inline def orElse(a: (Boolean, Model), b: => (Boolean, Model)) = if (a._1) a else b
+
+// println("dpll\n"+ cnfString(f))
+
+ if (f isEmpty) (true, EmptyModel)
+ else if(f exists (_.isEmpty)) (false, EmptyModel)
+ else f.find(_.size == 1) map { unitClause =>
+ val unitLit = unitClause.head
+// println("unit: "+ unitLit)
+ val negated = -unitLit
+ // drop entire clauses that are trivially true
+ // (i.e., disjunctions that contain the literal we're making true in the returned model),
+ // and simplify clauses by dropping the negation of the literal we're making true
+ // (since False \/ X == X)
+ val simplified = f.filterNot(_.contains(unitLit)).map(_ - negated)
+ withLit(DPLL(simplified), unitLit)
+ } getOrElse {
+ // partition symbols according to whether they appear in positive and/or negative literals
+ val pos = new HashSet[Sym]()
+ val neg = new HashSet[Sym]()
+ f.foreach{_.foreach{ lit =>
+ if (lit.pos) pos += lit.sym else neg += lit.sym
+ }}
+ // appearing in both positive and negative
+ val impures = pos intersect neg
+ // appearing only in either positive/negative positions
+ val pures = (pos ++ neg) -- impures
+
+ if (pures nonEmpty) {
+ val pureSym = pures.head
+ // turn it back into a literal
+ // (since equality on literals is in terms of equality
+ // of the underlying symbol and its positivity, simply construct a new Lit)
+ val pureLit = Lit(pureSym, pos(pureSym))
+// println("pure: "+ pureLit +" pures: "+ pures +" impures: "+ impures)
+ val simplified = f.filterNot(_.contains(pureLit))
+ withLit(DPLL(simplified), pureLit)
+ } else {
+ val split = f.head.head
+// println("split: "+ split)
+ orElse(DPLL(f :+ clause(split)), DPLL(f :+ clause(-split)))
+ }
+ }
+ }
+ }
+
+ /**
+ * Represent a match as a formula in propositional logic that encodes whether the match matches (abstractly: we only consider types)
+ *
+ */
+ trait SymbolicMatchAnalysis extends TreeMakerApproximation with Logic { self: CodegenCore =>
+ object Var {
+ private var _nextId = 0
+ def nextId = {_nextId += 1; _nextId}
+
+ private val uniques = new collection.mutable.HashMap[Tree, Var]
+ def apply(x: Tree): Var = uniques getOrElseUpdate(x, new Var(x, x.tpe))
+ }
+ class Var(val path: Tree, fullTp: Type, checked: Boolean = true) extends AbsVar {
+ // when looking at the domain, we only care about types we can check at run time
+ val domainTp: Type = checkableType(fullTp)
+
+ // case None => domain is unknown,
+ // case Some(List(tps: _*)) => domain is exactly tps
+ // we enumerate the subtypes of the full type, as that allows us to filter out more types statically,
+ // once we go to run-time checks (on Const's), convert them to checkable types
+ // TODO: there seems to be bug for singleton domains (variable does not show up in model)
+ val domain = if (checked) enumerateSubtypes(fullTp).map(_.map(Const).toSet) else None
+
+ def describe = toString + ": "+ fullTp + domain.map(_.map(_.tp).mkString(" ::= ", " | ", "")).getOrElse(" ::= ??") +" // = "+ path
+ def domainEnumerable = domain.nonEmpty
+
+ private val domMap = new collection.mutable.HashMap[Const, Sym]
+ private def symForEqualsTo(c: Const) = {
+ domMap getOrElseUpdate(c, {
+ // println("creating symbol for equality "+ this +" = "+ c)
+ Sym(this, c)
+ })
+ }
+
+ // for this var, call it V, turn V = C into the equivalent proposition in boolean logic
+ // over all executions of this method on the same Var object,
+ def propForEqualsTo(c: Const): Prop = {
+ domain match {
+ case None => symForEqualsTo(c)
+ case Some(domainConsts) =>
+ val domainTps = domainConsts map (_.tp)
+ val checkedTp = c.tp
+ // find all the domain types that could make the type test true
+ // if the checked type is a supertype of the lub of the domain,
+ // we'll end up \/'ing the whole domain together,
+ // but must not simplify to True, as the type test may also fail...
+ val matches = domainTps.filter(_ <:< checkedTp).map{ tp => symForEqualsTo(Const(tp)) }
+ // println("type-equals-prop for "+ this +" = "+ c +": "+ (checkedTp, domainTp, domainTps) +" matches: "+ matches)
+
+ if (matches isEmpty) False else matches.reduceLeft(Or)
+ }
+ }
+
+ def equalitySyms: Set[Sym] = domMap.values.toSet
+
+ private[this] val id: Int = Var.nextId
+ override def toString = "V"+ id
+ }
+
+ // all our variables range over types
+ // a literal constant becomes ConstantType(Constant(v)) when the type allows it (roughly, anyval + string + null)
+ // equality between variables: SingleType(x) (note that pattern variables cannot relate to each other -- it's always patternVar == nonPatternVar)
+ case class Const(tp: Type) {
+ override def toString = tp.toString
+
+ def toValueString = tp match {
+ case ConstantType(c) => c.escapedStringValue
+ case _ => tp.toString
+ }
+ }
+
+ // make sure it's not a primitive, else (5: Byte) match { case 5 => ... } sees no Byte
+ // TODO: domain of feasibly enumerable built-in types (enums, char?)
+ def enumerateSubtypes(tp: Type): Option[List[Type]] =
+ tp.typeSymbol match {
+ case BooleanClass =>
+ // println("enum bool "+ tp)
+ Some(List(ConstantType(Constant(true)), ConstantType(Constant(false))))
+ // TODO case _ if tp.isTupleType => // recurse into component types
+ case sym if !sym.isSealed || isPrimitiveValueClass(sym) =>
+ // println("enum unsealed "+ (tp, sym, sym.isSealed, isPrimitiveValueClass(sym)))
+ None
+ case sym =>
+ val subclasses = (
+ sym.sealedDescendants.toList sortBy (_.sealedSortName)
+ // symbols which are both sealed and abstract need not be covered themselves, because
+ // all of their children must be and they cannot otherwise be created.
+ filterNot (x => x.isSealed && x.isAbstractClass && !isPrimitiveValueClass(x)))
+ // println("subclasses "+ (sym, subclasses))
+
+ val tpApprox = typer.infer.approximateAbstracts(tp)
+ val pre = tpApprox.prefix
+ // valid subtypes are turned into checkable types, as we are entering the realm of the dynamic
+ val validSubTypes = (subclasses flatMap {sym =>
+ // have to filter out children which cannot match: see ticket #3683 for an example
+ // compare to the fully known type `tp` (modulo abstract types),
+ // so that we can rule out stuff like: sealed trait X[T]; class XInt extends X[Int] --> XInt not valid when enumerating X[String]
+ // however, must approximate abstract types in
+ val subTp = appliedType(pre.memberType(sym), sym.typeParams.map(_ => WildcardType))
+ val subTpApprox = typer.infer.approximateAbstracts(subTp) // TODO: needed?
+ // println("subtp"+(subTpApprox <:< tpApprox, subTpApprox, tpApprox))
+ if (subTpApprox <:< tpApprox) Some(checkableType(subTp))
+ else None
+ })
+ // println("enum sealed "+ (tp, tpApprox) + " as "+ validSubTypes)
+ Some(validSubTypes)
+ }
+
+ def narrowTypeOf(p: Tree) = p match {
+ case Literal(c) => ConstantType(c)
+ case Ident(_) if p.symbol.isStable => singleType(p.tpe.prefix, p.symbol)
+ case x => x.tpe.narrow
+ }
+
+ // approximate a type to the static type that is fully checkable at run time,
+ // hiding statically known but dynamically uncheckable information using existential quantification
+ // TODO: this is subject to the availability of TypeTags (since an abstract type with a type tag is checkable at run time)
+ def checkableType(tp: Type): Type = {
+ // TODO: this is extremely rough...
+ object toCheckable extends TypeMap {
+ def apply(tp: Type) = tp match {
+ case TypeRef(pre, sym, a :: as) if sym ne ArrayClass =>
+ // replace type args by existentials, since they can't be checked
+ // TODO: when type tags are available, we will check -- when this is implemented, can we take that into account here?
+ // TODO: don't reuse sym.typeParams, they have bounds (and those must not be considered)
+ newExistentialType(sym.typeParams, sym.tpe).asSeenFrom(pre, sym.owner)
+ case _ => mapOver(tp)
+ }
+ }
+ val res = toCheckable(tp)
+ // println("checkable "+(tp, res))
+ res
+ }
+
+ // a type is "uncheckable" (for exhaustivity) if we don't statically know its subtypes (i.e., it's unsealed)
+ // we consider tuple types with at least one component of a checkable type as a checkable type
+ def uncheckableType(tp: Type): Boolean = {
+ @inline def tupleComponents(tp: Type) = tp.normalize.typeArgs
+ val checkable = (
+ (isTupleType(tp) && tupleComponents(tp).exists(tp => !uncheckableType(tp)))
+ || enumerateSubtypes(tp).nonEmpty)
+ // if (!checkable) println("deemed uncheckable: "+ tp)
+ !checkable
+ }
+
+ def exhaustive(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): List[String] = if (uncheckableType(prevBinder.info)) Nil else {
+ // customize TreeMakersToConds (which turns a tree of tree makers into a more abstract DAG of tests)
+ // - approximate the pattern `List()` (unapplySeq on List with empty length) as `Nil`,
+ // otherwise the common (xs: List[Any]) match { case List() => case x :: xs => } is deemed unexhaustive
+ // - back off (to avoid crying exhaustive too often) when:
+ // - there are guards -->
+ // - there are extractor calls (that we can't secretly/soundly) rewrite
+ var backoff = false
+ object exhaustivityApproximation extends TreeMakersToConds(prevBinder, cases) {
+ override def treeMakerToCond(tm: TreeMaker): Cond = tm match {
+ case p@ExtractorTreeMaker(extractor, Some(lenCheck), testedBinder, _) =>
+ p.checkedLength match {
+ // pattern: `List()` (interpret as `Nil`)
+ // TODO: make it more general List(1, 2) => 1 :: 2 :: Nil
+ case Some(0) if testedBinder.tpe.typeSymbol == ListClass => // extractor.symbol.owner == SeqFactory
+ EqualityCond(binderToUniqueTree(p.prevBinder), unique(Ident(NilModule) setType NilModule.tpe))
+ case _ =>
+ super.treeMakerToCond(tm)
+ }
+ case ExtractorTreeMaker(_, _, _, _) =>
+// println("backing off due to "+ tm)
+ backoff = true
+ super.treeMakerToCond(tm)
+ case GuardTreeMaker(guard) =>
+ guard.tpe match {
+ case ConstantType(Constant(true)) => Top
+ case ConstantType(Constant(false)) => Havoc
+ case _ =>
+// println("can't statically interpret guard: "+(guard, guard.tpe))
+ backoff = true
+ Havoc
+ }
+ case _ =>
+ super.treeMakerToCond(tm)
+ }
+ }
+
+ def symbolic(t: Cond): Prop = t match {
+ case AndCond(a, b) => And(symbolic(a), symbolic(b))
+ case OrCond(a, b) => Or(symbolic(a), symbolic(b))
+ case Top => True
+ case Havoc => False
+ case TypeCond(p, pt) => Eq(Var(p), Const(checkableType(pt)))
+ case EqualityCond(p, q) => Eq(Var(p), Const(narrowTypeOf(q)))
+ case NonNullCond(p) => True // ignoring nullness because it generates too much noise Not(Eq(Var(p), Const(NullClass.tpe)))
+ }
+
+ def symbolicCase(tests: List[Test]) = {
+ val testsBeforeBody = tests.takeWhile(t => !t.treeMaker.isInstanceOf[BodyTreeMaker])
+ testsBeforeBody.map(t => symbolic(t.cond)).foldLeft(True: Prop)(And)
+ }
+
+ val tests = exhaustivityApproximation.approximateMatch
+
+ if (backoff) Nil else {
+ val symbolicCases = tests map symbolicCase
+
+ val prevBinderTree = exhaustivityApproximation.binderToUniqueTree(prevBinder)
+
+ // TODO: null tests generate too much noise, so disabled them -- is there any way to bring them back?
+ // assuming we're matching on a non-null scrutinee (prevBinder), when does the match fail?
+ // val nonNullScrutineeCond =
+ // assume non-null for all the components of the tuple we're matching on (if we're matching on a tuple)
+ // if (isTupleType(prevBinder.tpe))
+ // prevBinder.tpe.typeArgs.mapWithIndex{case (_, i) => NonNullCond(codegen.tupleSel(prevBinderTree)(i))}.reduceLeft(AndCond)
+ // else
+ // NonNullCond(prevBinderTree)
+ // val matchFails = And(symbolic(nonNullScrutineeCond), Not(symbolicCases reduceLeft (Or(_, _))))
+
+ // when does the match fail?
+ val matchFails = Not(symbolicCases reduceLeft (Or(_, _)))
+
+
+ // debug output:
+ // println("analysing:")
+ // showTreeMakers(cases)
+ // showTests(tests)
+ //
+ // def gatherVariables(p: Prop): Set[Var] = {
+ // val vars = new HashSet[Var]()
+ // (new PropTraverser {
+ // override def applyVar(v: Var) = vars += v
+ // })(p)
+ // vars.toSet
+ // }
+ // val vars = gatherVariables(matchFails)
+ // println("\nvars:\n"+ (vars map (_.describe) mkString ("\n")))
+ //
+ // println("\nmatchFails as CNF:\n"+ cnfString(normalize(matchFails)))
+
+ // find the models (under which the match fails)
+ val matchFailModels = fullDPLL(normalize(matchFails))
+
+ val scrutVar = Var(prevBinderTree)
+ val counterExamples = matchFailModels.map(modelToCounterExample(scrutVar))
+
+ CounterExample.prune(counterExamples).map(_.toString).sorted
+ }
+ }
+
+ object CounterExample {
+ def prune(examples: List[CounterExample]): List[CounterExample] = {
+ val distinct = examples.filterNot(_ == NoExample).toSet
+ distinct.filterNot(ce => distinct.exists(other => (ce ne other) && ce.coveredBy(other))).toList
+ }
+ }
+
+ // a way to construct a value that will make the match fail: a constructor invocation, a constant, an object of some type)
+ class CounterExample {
+ protected[SymbolicMatchAnalysis] def flattenConsArgs: List[CounterExample] = Nil
+ def coveredBy(other: CounterExample): Boolean = this == other || other == WildcardExample
+ }
+ case class ValueExample(c: Const) extends CounterExample { override def toString = c.toValueString }
+ case class TypeExample(c: Const) extends CounterExample { override def toString = "(_ : "+ c +")" }
+ case class NegativeExample(nonTrivialNonEqualTo: List[Const]) extends CounterExample {
+ override def toString = {
+ val negation =
+ if (nonTrivialNonEqualTo.tail.isEmpty) nonTrivialNonEqualTo.head.toValueString
+ else nonTrivialNonEqualTo.map(_.toValueString).sorted.mkString("in (", ", ", ")")
+ "<not "+ negation +">"
+ }
+ }
+ case class ListExample(ctorArgs: List[CounterExample]) extends CounterExample {
+ protected[SymbolicMatchAnalysis] override def flattenConsArgs: List[CounterExample] = ctorArgs match {
+ case hd :: tl :: Nil => hd :: tl.flattenConsArgs
+ case _ => Nil
+ }
+ protected[SymbolicMatchAnalysis] lazy val elems = flattenConsArgs
+
+ override def coveredBy(other: CounterExample): Boolean =
+ other match {
+ case other@ListExample(_) =>
+ this == other || ((elems.length == other.elems.length) && (elems zip other.elems).forall{case (a, b) => a coveredBy b})
+ case _ => super.coveredBy(other)
+ }
+
+ override def toString = elems.mkString("List(", ", ", ")")
+ }
+ case class TupleExample(ctorArgs: List[CounterExample]) extends CounterExample {
+ override def toString = ctorArgs.mkString("(", ", ", ")")
+
+ override def coveredBy(other: CounterExample): Boolean =
+ other match {
+ case TupleExample(otherArgs) =>
+ this == other || ((ctorArgs.length == otherArgs.length) && (ctorArgs zip otherArgs).forall{case (a, b) => a coveredBy b})
+ case _ => super.coveredBy(other)
+ }
+ }
+ case class ConstructorExample(cls: Symbol, ctorArgs: List[CounterExample]) extends CounterExample {
+ override def toString = cls.decodedName + (if (cls.isModuleClass) "" else ctorArgs.mkString("(", ", ", ")"))
+ }
+
+ case object WildcardExample extends CounterExample { override def toString = "_" }
+ case object NoExample extends CounterExample { override def toString = "??" }
+
+ // return constructor call when the model is a true counter example
+ // (the variables don't take into account type information derived from other variables,
+ // so, naively, you might try to construct a counter example like _ :: Nil(_ :: _, _ :: _),
+ // since we didn't realize the tail of the outer cons was a Nil)
+ def modelToCounterExample(scrutVar: Var)(model: Model): CounterExample = {
+ // x1 = ...
+ // x1.hd = ...
+ // x1.tl = ...
+ // x1.hd.hd = ...
+ // ...
+ val varAssignment = model.toSeq.groupBy{f => f match {case (sym, value) => sym.variable} }.mapValues{ xs =>
+ val (trues, falses) = xs.partition(_._2)
+ (trues map (_._1.const), falses map (_._1.const))
+ // should never be more than one value in trues...
+ }
+
+ // println("var assignment:\n"+
+ // varAssignment.toSeq.sortBy(_._1.toString).map { case (v, (trues, falses)) =>
+ // val assignment = "== "+ (trues mkString("(", ", ", ")")) +" != ("+ (falses mkString(", ")) +")"
+ // v +"(="+ v.path +": "+ v.domainTp +") "+ assignment
+ // }.mkString("\n"))
+
+
+ // chop a path into a list of symbols
+ def chop(path: Tree): List[Symbol] = path match {
+ case Ident(_) => List(path.symbol)
+ case Select(pre, name) => chop(pre) :+ path.symbol
+ case _ => println("don't know how to chop "+ path); Nil
+ }
+
+ // turn the variable assignments into a tree
+ // the root is the scrutinee (x1), edges are labelled by the fields that are assigned
+ // a node is a variable example (which is later turned into a counter example)
+ object VariableAssignment {
+ private def findVar(path: List[Symbol]) = path match {
+ case List(root) if root == scrutVar.path.symbol => Some(scrutVar)
+ case _ => varAssignment.find{case (v, a) => chop(v.path) == path}.map(_._1)
+ }
+
+ private val uniques = new collection.mutable.HashMap[Var, VariableAssignment]
+ private def unique(variable: Var): VariableAssignment =
+ uniques.getOrElseUpdate(variable, {
+ val (eqTo, neqTo) = varAssignment.getOrElse(variable, (Nil, Nil)) // TODO
+ VariableAssignment(variable, eqTo.toList, neqTo.toList, HashMap.empty)
+ })
+
+ def apply(variable: Var): VariableAssignment = {
+ val path = chop(variable.path)
+ val pre = path.init
+ val field = path.last
+
+ val newCtor = unique(variable)
+
+ if (pre.isEmpty) newCtor
+ else {
+ findVar(pre) foreach { preVar =>
+ val outerCtor = this(preVar)
+ outerCtor.fields(field) = newCtor
+ }
+ newCtor
+ }
+ }
+ }
+
+ // node in the tree that describes how to construct a counter-example
+ case class VariableAssignment(variable: Var, equalTo: List[Const], notEqualTo: List[Const], fields: collection.mutable.Map[Symbol, VariableAssignment]) {
+ private lazy val ctor = (equalTo match { case List(Const(tp)) => tp case _ => variable.domainTp }).typeSymbol.primaryConstructor
+ private lazy val ctorParams = if (ctor == NoSymbol || ctor.paramss.isEmpty) Nil else ctor.paramss.head
+ private lazy val cls = if (ctor == NoSymbol) NoSymbol else ctor.owner
+ private lazy val caseFieldAccs = if (cls == NoSymbol) Nil else cls.caseFieldAccessors
+
+
+ def allFieldAssignmentsLegal: Boolean =
+ (fields.keySet subsetOf caseFieldAccs.toSet) && fields.values.forall(_.allFieldAssignmentsLegal)
+
+ private lazy val nonTrivialNonEqualTo = notEqualTo.filterNot{c => val sym = c.tp.typeSymbol; sym == AnyClass } // sym == NullClass ||
+
+ // NoExample if the constructor call is ill-typed
+ // (thus statically impossible -- can we incorporate this into the formula?)
+ // beBrief is used to suppress negative information nested in tuples -- it tends to get too noisy
+ def toCounterExample(beBrief: Boolean = false): CounterExample =
+ if (!allFieldAssignmentsLegal) NoExample
+ else {
+// println("describing "+ (variable, equalTo, notEqualTo, fields, cls, allFieldAssignmentsLegal))
+ val res = equalTo match {
+ // a definite assignment to a value
+ case List(eq@Const(_: ConstantType)) if fields.isEmpty => ValueExample(eq)
+
+ // constructor call
+ // or we did not gather any information about equality but we have information about the fields
+ // --> typical example is when the scrutinee is a tuple and all the cases first unwrap that tuple and only then test something interesting
+ case _ if cls != NoSymbol &&
+ ( equalTo.nonEmpty
+ || (fields.nonEmpty && !isPrimitiveValueClass(cls) && equalTo.isEmpty && notEqualTo.isEmpty)) =>
+
+ @inline def args(brevity: Boolean = beBrief) = {
+ // figure out the constructor arguments from the field assignment
+ val argLen = (caseFieldAccs.length min ctorParams.length)
+
+ (0 until argLen).map(i => fields.get(caseFieldAccs(i)).map(_.toCounterExample(brevity)) getOrElse WildcardExample).toList
+ }
+
+ cls match {
+ case ConsClass => ListExample(args())
+ case _ if isTupleSymbol(cls) => TupleExample(args(true))
+ case _ => ConstructorExample(cls, args())
+ }
+
+ // a definite assignment to a type
+ case List(eq) if fields.isEmpty => TypeExample(eq)
+
+ // negative information
+ case Nil if nonTrivialNonEqualTo.nonEmpty =>
+ // negation tends to get pretty verbose
+ if (beBrief) WildcardExample else NegativeExample(nonTrivialNonEqualTo)
+
+ // not a valid counter-example, possibly since we have a definite type but there was a field mismatch
+ // TODO: improve reasoning -- in the mean time, a false negative is better than an annoying false positive
+ case _ => NoExample
+ }
+// println("described as: "+ res)
+ res
+ }
+
+ override def toString = toCounterExample().toString
+ }
+
+ // slurp in information from other variables
+ varAssignment.keys.foreach{ v => if (v != scrutVar) VariableAssignment(v) }
+
+ // this is the variable we want a counter example for
+ VariableAssignment(scrutVar).toCounterExample()
}
}
@@ -1446,28 +2263,49 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// interpret:
val dependencies = new collection.mutable.LinkedHashMap[Test, Set[Cond]]
val tested = new collection.mutable.HashSet[Cond]
- testss foreach { tests =>
- tested.clear()
- tests dropWhile { test =>
- val cond = test.cond
- if ((cond eq Havoc) || (cond eq Top)) (cond eq Top) // stop when we encounter a havoc, skip top
- else {
- tested += cond
+
+ def storeDependencies(test: Test) = {
+ val cond = test.cond
+
+ def simplify(c: Cond): Set[Cond] = c match {
+ case AndCond(a, b) => simplify(a) ++ simplify(b)
+ case OrCond(_, _) => Set(Havoc) // TODO: supremum?
+ case NonNullCond(_) => Set(Top) // not worth remembering
+ case _ => Set(c)
+ }
+ val conds = simplify(cond)
+
+ if (conds(Havoc)) false // stop when we encounter a havoc
+ else {
+ val nonTrivial = conds filterNot (_ == Top)
+ if (nonTrivial nonEmpty) {
+ tested ++= nonTrivial
// is there an earlier test that checks our condition and whose dependencies are implied by ours?
- dependencies find { case (priorTest, deps) =>
- ((priorTest.cond eq cond) || (deps contains cond)) && (deps subsetOf tested)
- } foreach { case (priorTest, deps) =>
- // if so, note the dependency in both tests
- priorTest registerReuseBy test
+ dependencies find {
+ case (priorTest, deps) =>
+ ((simplify(priorTest.cond) == nonTrivial) || // our conditions are implied by priorTest if it checks the same thing directly
+ (nonTrivial subsetOf deps) // or if it depends on a superset of our conditions
+ ) && (deps subsetOf tested) // the conditions we've tested when we are here in the match satisfy the prior test, and hence what it tested
+ } foreach {
+ case (priorTest, _) =>
+ // if so, note the dependency in both tests
+ priorTest registerReuseBy test
}
dependencies(test) = tested.toSet // copies
- true
}
+ true
}
}
+
+ testss foreach { tests =>
+ tested.clear()
+ tests dropWhile storeDependencies
+ }
+ // println("dependencies: "+ dependencies)
+
// find longest prefix of tests that reuse a prior test, and whose dependent conditions monotonically increase
// then, collapse these contiguous sequences of reusing tests
// store the result of the final test and the intermediate results in hoisted mutable variables (TODO: optimize: don't store intermediate results that aren't used)
@@ -1476,7 +2314,12 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
var okToCall = false
val reusedOrOrig = (tm: TreeMaker) => {assert(okToCall); reused.getOrElse(tm, tm)}
- val res = testss map { tests =>
+ // maybe collapse: replace shared prefix of tree makers by a ReusingCondTreeMaker
+ // once this has been computed, we'll know which tree makers are reused,
+ // and we'll replace those by the ReusedCondTreeMakers we've constructed (and stored in the reused map)
+ val collapsed = testss map { tests =>
+ // map tests to the equivalent list of treemakers, replacing shared prefixes by a reusing treemaker
+ // if there's no sharing, simply map to the tree makers corresponding to the tests
var currDeps = Set[Cond]()
val (sharedPrefix, suffix) = tests span { test =>
(test.cond eq Top) || (for(
@@ -1487,23 +2330,33 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
yield diff).nonEmpty
}
- val collapsedTreeMakers = if (sharedPrefix.nonEmpty) { // even sharing prefixes of length 1 brings some benefit (overhead-percentage for compiler: 26->24%, lib: 19->16%)
- for (test <- sharedPrefix; reusedTest <- test.reuses) reusedTest.treeMaker match {
- case reusedCTM: CondTreeMaker => reused(reusedCTM) = ReusedCondTreeMaker(reusedCTM)
- case _ =>
- }
+ val collapsedTreeMakers =
+ if (sharedPrefix.isEmpty) None
+ else { // even sharing prefixes of length 1 brings some benefit (overhead-percentage for compiler: 26->24%, lib: 19->16%)
+ for (test <- sharedPrefix; reusedTest <- test.reuses) reusedTest.treeMaker match {
+ case reusedCTM: CondTreeMaker => reused(reusedCTM) = ReusedCondTreeMaker(reusedCTM)
+ case _ =>
+ }
- // println("sharedPrefix: "+ sharedPrefix)
- for (lastShared <- sharedPrefix.reverse.dropWhile(_.cond eq Top).headOption;
- lastReused <- lastShared.reuses)
- yield ReusingCondTreeMaker(sharedPrefix, reusedOrOrig) :: suffix.map(_.treeMaker)
- } else None
+ // println("sharedPrefix: "+ sharedPrefix)
+ // if the shared prefix contains interesting conditions (!= Top)
+ // and the last of such interesting shared conditions reuses another treemaker's test
+ // replace the whole sharedPrefix by a ReusingCondTreeMaker
+ for (lastShared <- sharedPrefix.reverse.dropWhile(_.cond eq Top).headOption;
+ lastReused <- lastShared.reuses)
+ yield ReusingCondTreeMaker(sharedPrefix, reusedOrOrig) :: suffix.map(_.treeMaker)
+ }
collapsedTreeMakers getOrElse tests.map(_.treeMaker) // sharedPrefix need not be empty (but it only contains Top-tests, which are dropped above)
}
okToCall = true // TODO: remove (debugging)
- res mapConserve (_ mapConserve reusedOrOrig)
+ // replace original treemakers that are reused (as determined when computing collapsed),
+ // by ReusedCondTreeMakers
+ val reusedMakers = collapsed mapConserve (_ mapConserve reusedOrOrig)
+// println("after CSE:")
+// showTreeMakers(reusedMakers)
+ reusedMakers
}
object ReusedCondTreeMaker {
@@ -1520,28 +2373,45 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// TODO: finer-grained duplication
def chainBefore(next: Tree)(casegen: Casegen): Tree = // assert(codegen eq optimizedCodegen)
atPos(pos)(casegen.asInstanceOf[optimizedCodegen.OptimizedCasegen].flatMapCondStored(cond, storedCond, res, nextBinder, substitution(next).duplicate))
+
+ override def toString = "Memo"+(nextBinder.name, storedCond.name, cond, res, substitution)
}
case class ReusingCondTreeMaker(sharedPrefix: List[Test], toReused: TreeMaker => TreeMaker) extends TreeMaker { import CODE._
- lazy val dropped_priors = sharedPrefix map (t => (toReused(t.treeMaker), t.reuses map (test => toReused(test.treeMaker))))
lazy val localSubstitution = {
- val (from, to) = dropped_priors.collect {
- case (dropped: CondTreeMaker, Some(prior: ReusedCondTreeMaker)) =>
- (dropped.nextBinder, REF(prior.nextBinder))
- }.unzip
- val oldSubs = dropped_priors.collect {
- case (dropped: TreeMaker, _) =>
- dropped.substitution
+ // replace binder of each dropped treemaker by corresponding binder bound by the most recent reused treemaker
+ var mostRecentReusedMaker: ReusedCondTreeMaker = null
+ def mapToStored(droppedBinder: Symbol) = if (mostRecentReusedMaker eq null) Nil else List((droppedBinder, REF(mostRecentReusedMaker.nextBinder)))
+ val (from, to) = sharedPrefix.flatMap { dropped =>
+ dropped.reuses.map(test => toReused(test.treeMaker)).foreach {
+ case reusedMaker: ReusedCondTreeMaker =>
+ mostRecentReusedMaker = reusedMaker
+ case _ =>
}
- oldSubs.foldLeft(Substitution(from, to))(_ >> _)
+
+ // TODO: have super-trait for retrieving the variable that's operated on by a tree maker
+ // and thus assumed in scope, either because it binds it or because it refers to it
+ dropped.treeMaker match {
+ case dropped: FunTreeMaker =>
+ mapToStored(dropped.nextBinder)
+ case _ => Nil
+ }
+ }.unzip
+ val rerouteToReusedBinders = Substitution(from, to)
+
+ val collapsedDroppedSubst = sharedPrefix map (t => (toReused(t.treeMaker).substitution))
+
+ collapsedDroppedSubst.foldLeft(rerouteToReusedBinders)(_ >> _)
}
- def chainBefore(next: Tree)(casegen: Casegen): Tree = {
- val cond = REF(dropped_priors.reverse.collectFirst{case (_, Some(ctm: ReusedCondTreeMaker)) => ctm}.get.storedCond)
+ lazy val lastReusedTreeMaker = sharedPrefix.reverse.flatMap(tm => tm.reuses map (test => toReused(test.treeMaker))).collectFirst{case x: ReusedCondTreeMaker => x}.head
- // TODO: finer-grained duplication -- MUST duplicate though, or we'll get VerifyErrors since sharing trees confuses lambdalift, and its confusion it emits illegal casts (diagnosed by Grzegorz: checkcast T ; invokevirtual S.m, where T not a subtype of S)
- casegen.ifThenElseZero(cond, substitution(next).duplicate)
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = {
+ // TODO: finer-grained duplication -- MUST duplicate though, or we'll get VerifyErrors since sharing trees confuses lambdalift,
+ // and in its confusion it emits illegal casts (diagnosed by Grzegorz: checkcast T ; invokevirtual S.m, where T not a subtype of S)
+ casegen.ifThenElseZero(REF(lastReusedTreeMaker.storedCond), substitution(next).duplicate)
}
+ override def toString = "R"+(lastReusedTreeMaker.storedCond.name, substitution)
}
}
@@ -1646,7 +2516,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// TODO: if patterns allow switch but the type of the scrutinee doesn't, cast (type-test) the scrutinee to the corresponding switchable type and switch on the result
if (regularSwitchMaker.switchableTpe(scrutSym.tpe)) {
val caseDefsWithDefault = regularSwitchMaker(cases map {c => (scrutSym, c)}, pt)
- if (caseDefsWithDefault.length <= 2) None // not worth emitting a switch... also, the optimizer has trouble digesting tiny switches, apparently, so let's be nice and not generate them
+ if (caseDefsWithDefault isEmpty) None // not worth emitting a switch.
else {
// match on scrutSym -- converted to an int if necessary -- not on scrut directly (to avoid duplicating scrut)
val scrutToInt: Tree =
@@ -1669,10 +2539,8 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// analyze the result of approximateTreeMaker rather than the TreeMaker itself
object SwitchableTreeMaker extends SwitchableTreeMakerExtractor {
def unapply(x: TreeMaker): Option[Tree] = x match {
- case tm@TypeTestTreeMaker(_, _, _) =>
- Some(Bind(tm.nextBinder, Typed(Ident(nme.WILDCARD), TypeTree(tm.nextBinderTp)) /* not used by back-end */)) // -- TODO: use this if binder does not occur in the body
- case tm@TypeAndEqualityTestTreeMaker(_, patBinder, pt, _) if tm.isStraightTypeTest =>
- Some(Bind(tm.nextBinder, Typed(Ident(nme.WILDCARD), TypeTree(tm.nextBinderTp)) /* not used by back-end */))
+ case tm@TypeTestTreeMaker(_, _, pt, _) if tm.isPureTypeTest => // -- TODO: use this if binder does not occur in the body
+ Some(Bind(tm.nextBinder, Typed(Ident(nme.WILDCARD), TypeTree(pt)) /* not used by back-end */))
case _ =>
None
}
@@ -1823,8 +2691,18 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
trait MatchOptimizations extends CommonSubconditionElimination
with DeadCodeElimination
with SwitchEmission
- with OptimizedCodegen { self: TreeMakers =>
- override def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): (List[List[TreeMaker]], List[Tree]) = {
+ with OptimizedCodegen
+ with SymbolicMatchAnalysis { self: TreeMakers =>
+ override def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type, unchecked: Boolean): (List[List[TreeMaker]], List[Tree]) = {
+ val counterExamples = if (unchecked) Nil else exhaustive(prevBinder, cases, pt)
+ if (counterExamples.nonEmpty) {
+ val ceString =
+ if (counterExamples.tail.isEmpty) "input: " + counterExamples.head
+ else "inputs: " + counterExamples.mkString(", ")
+
+ typer.context.unit.warning(prevBinder.pos, "match may not be exhaustive.\nIt would fail on the following "+ ceString)
+ }
+
val optCases = doCSE(prevBinder, doDCE(prevBinder, cases, pt), pt)
val toHoist = (
for (treeMakers <- optCases)
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index 4e578e3f0d..3373878beb 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -1084,8 +1084,16 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
def isEitherNullable = (NullClass.tpe <:< receiver.info) || (NullClass.tpe <:< actual.info)
def isBoolean(s: Symbol) = unboxedValueClass(s) == BooleanClass
def isUnit(s: Symbol) = unboxedValueClass(s) == UnitClass
- def isNumeric(s: Symbol) = isNumericValueClass(unboxedValueClass(s)) || (s isSubClass ScalaNumberClass)
- def isSpecial(s: Symbol) = isPrimitiveValueClass(unboxedValueClass(s)) || (s isSubClass ScalaNumberClass) || isMaybeValue(s)
+ def isNumeric(s: Symbol) = isNumericValueClass(unboxedValueClass(s)) || isAnyNumber(s)
+ def isScalaNumber(s: Symbol) = s isSubClass ScalaNumberClass
+ // test is behind a platform guard
+ def isJavaNumber(s: Symbol) = !forMSIL && (s isSubClass JavaNumberClass)
+ // includes java.lang.Number if appropriate [SI-5779]
+ def isAnyNumber(s: Symbol) = isScalaNumber(s) || isJavaNumber(s)
+ def isMaybeAnyValue(s: Symbol) = isPrimitiveValueClass(unboxedValueClass(s)) || isMaybeValue(s)
+ // used to short-circuit unrelatedTypes check if both sides are special
+ def isSpecial(s: Symbol) = isMaybeAnyValue(s) || isAnyNumber(s)
+ // unused
def possibleNumericCount = onSyms(_ filter (x => isNumeric(x) || isMaybeValue(x)) size)
val nullCount = onSyms(_ filter (_ == NullClass) size)
@@ -1155,7 +1163,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
if (isCaseEquals) {
def thisCase = receiver.info.member(nme.equals_).owner
actual.info.baseClasses.find(_.isCase) match {
- case Some(p) if (p != thisCase) => nonSensible("case class ", false)
+ case Some(p) if p != thisCase => nonSensible("case class ", false)
case None =>
// stronger message on (Some(1) == None)
//if (receiver.isCase && receiver.isEffectivelyFinal && !(receiver isSubClass actual)) nonSensiblyNeq()
diff --git a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
index a6a8d6009f..fde760c752 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
@@ -14,22 +14,7 @@ import util.returning
abstract class TreeCheckers extends Analyzer {
import global._
- private val everything = ListBuffer[(Phase, Map[Tree, (Symbol, Type)])]()
- private val currentTrees = mutable.Map[Tree, (Symbol, Type)]()
- private val tpeOfTree = mutable.HashMap[Tree, Type]()
-
- if (settings.debug.value) {
- sys addShutdownHook {
- for ((ph, map) <- everything.toList) {
- println("\n>>>> " + ph + "\n")
- for ((tree, (sym, tpe)) <- map.toList.sortBy(_._1.summaryString)) {
- println("%20s %20s %s".format(sym, tpe, ("" + tree) take 50))
- }
- }
- }
- }
-
- private def classstr(x: AnyRef) = (x.getClass.getName split """\\.|\\$""").last
+ private def classstr(x: AnyRef) = x.getClass.getName split """\\.|\\$""" last;
private def typestr(x: Type) = " (tpe = " + x + ")"
private def treestr(t: Tree) = t + " [" + classstr(t) + "]" + typestr(t.tpe)
private def ownerstr(s: Symbol) = "'" + s + "'" + s.locationString
@@ -50,14 +35,13 @@ abstract class TreeCheckers extends Analyzer {
object SymbolTracker extends Traverser {
type PhaseMap = mutable.HashMap[Symbol, List[Tree]]
- val defSyms = mutable.HashMap[Symbol, List[DefTree]]() withDefaultValue Nil
- val newSyms = mutable.HashSet[Symbol]()
val maps = ListBuffer[(Phase, PhaseMap)]()
- val movedMsgs = ListBuffer[String]()
-
def prev = maps.init.last._2
def latest = maps.last._2
- def sortedNewSyms = newSyms.toList.distinct sortBy (_.name)
+ val defSyms = mutable.HashMap[Symbol, List[DefTree]]()
+ val newSyms = mutable.HashSet[Symbol]()
+ val movedMsgs = new ListBuffer[String]
+ def sortedNewSyms = newSyms.toList.distinct sortBy (_.name.toString)
def inPrev(sym: Symbol) = {
(maps.size >= 2) && (prev contains sym)
@@ -108,21 +92,18 @@ abstract class TreeCheckers extends Analyzer {
if (maps.isEmpty || maps.last._1 != ph)
maps += ((ph, new PhaseMap))
- currentTrees.clear()
traverse(unit.body)
- everything += ((ph, currentTrees.toMap))
-
reportChanges()
}
override def traverse(tree: Tree): Unit = {
val sym = tree.symbol
- currentTrees(tree) = ((sym, tree.tpe))
-
if (sym != null && sym != NoSymbol) {
record(sym, tree)
tree match {
- case x: DefTree => defSyms(sym) :+= x
- case _ => ()
+ case x: DefTree =>
+ if (defSyms contains sym) defSyms(sym) = defSyms(sym) :+ x
+ else defSyms(sym) = List(x)
+ case _ => ()
}
}
@@ -130,6 +111,8 @@ abstract class TreeCheckers extends Analyzer {
}
}
+ lazy val tpeOfTree = mutable.HashMap[Tree, Type]()
+
def posstr(p: Position) =
try p.source.path + ":" + p.line
catch { case _: UnsupportedOperationException => p.toString }
@@ -144,9 +127,20 @@ abstract class TreeCheckers extends Analyzer {
def assertFn(cond: Boolean, msg: => Any) =
if (!cond) errorFn(msg)
+ private def wrap[T](msg: => Any)(body: => Unit) {
+ try body
+ catch { case x =>
+ Console.println("Caught " + x)
+ Console.println(msg)
+ x.printStackTrace
+ }
+ }
+
def checkTrees() {
- informFn("[consistency check at the beginning of phase " + phase + "]")
- currentRun.units foreach check
+ if (settings.verbose.value)
+ Console.println("[consistency check at the beginning of phase " + phase + "]")
+
+ currentRun.units foreach (x => wrap(x)(check(x)))
}
def printingTypings[T](body: => T): T = {
@@ -168,7 +162,7 @@ abstract class TreeCheckers extends Analyzer {
informProgress("checking "+unit)
val context = rootContext(unit)
context.checking = true
- tpeOfTree.clear()
+ tpeOfTree.clear
SymbolTracker.check(phase, unit)
val checker = new TreeChecker(context)
runWithUnit(unit) {
@@ -215,11 +209,11 @@ abstract class TreeCheckers extends Analyzer {
tree.tpe = null
saved
})
- super.typed(tree, mode, pt) match {
+ wrap(tree)(super.typed(tree, mode, pt) match {
case _: Literal => ()
case x if x ne tree => treesDiffer(tree, x)
case _ => ()
- }
+ })
case _ => ()
}
@@ -286,7 +280,12 @@ abstract class TreeCheckers extends Analyzer {
if (sym.owner != currentOwner) {
val expected = currentOwner.ownerChain find (x => cond(x)) getOrElse fail("DefTree can't find owner: ")
if (sym.owner != expected)
- fail("Expected owner %s (out of %s), found %s: ".format(expected, currentOwner.ownerChain, sym.owner))
+ fail("""|
+ | currentOwner chain: %s
+ | symbol chain: %s""".stripMargin.format(
+ currentOwner.ownerChain take 3 mkString " -> ",
+ sym.ownerChain mkString " -> ")
+ )
}
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 7bb9d5fef3..75174ca494 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -2327,7 +2327,7 @@ trait Typers extends Modes with Adaptations with Taggings {
val methodBodyTyper = newTyper(context.makeNewScope(context.tree, methodSym)) // should use the DefDef for the context's tree, but it doesn't exist yet (we need the typer we're creating to create it)
paramSyms foreach (methodBodyTyper.context.scope enter _)
- val match_ = methodBodyTyper.typedMatch(selector, cases, mode, ptRes)
+ val match_ = methodBodyTyper.typedMatch(gen.mkUnchecked(selector), cases, mode, ptRes)
val resTp = match_.tpe
val methFormals = paramSyms map (_.tpe)
@@ -2367,7 +2367,7 @@ trait Typers extends Modes with Adaptations with Taggings {
val methodBodyTyper = newTyper(context.makeNewScope(context.tree, methodSym)) // should use the DefDef for the context's tree, but it doesn't exist yet (we need the typer we're creating to create it)
paramSyms foreach (methodBodyTyper.context.scope enter _)
- val match_ = methodBodyTyper.typedMatch(selector, cases, mode, ptRes)
+ val match_ = methodBodyTyper.typedMatch(gen.mkUnchecked(selector), cases, mode, ptRes)
val resTp = match_.tpe
anonClass setInfo ClassInfoType(parentsPartial(List(argTp, resTp)), newScope, anonClass)
@@ -2394,7 +2394,7 @@ trait Typers extends Modes with Adaptations with Taggings {
paramSyms foreach (methodBodyTyper.context.scope enter _)
methodSym setInfoAndEnter MethodType(paramSyms, BooleanClass.tpe)
- val match_ = methodBodyTyper.typedMatch(selector, casesTrue, mode, BooleanClass.tpe)
+ val match_ = methodBodyTyper.typedMatch(gen.mkUnchecked(selector), casesTrue, mode, BooleanClass.tpe)
val body = methodBodyTyper.virtualizedMatch(match_ withAttachment DefaultOverrideMatchAttachment(FALSE_typed), mode, BooleanClass.tpe)
DefDef(methodSym, body)
@@ -2510,10 +2510,7 @@ trait Typers extends Modes with Adaptations with Taggings {
namer.enterSyms(stats)
// need to delay rest of typedRefinement to avoid cyclic reference errors
unit.toCheck += { () =>
- // go to next outer context which is not silent, see #3614
- var c = context
- while (c.bufferErrors) c = c.outer
- val stats1 = newTyper(c).typedStats(stats, NoSymbol)
+ val stats1 = typedStats(stats, NoSymbol)
for (stat <- stats1 if stat.isDef) {
val member = stat.symbol
if (!(context.owner.ancestors forall
@@ -2552,9 +2549,11 @@ trait Typers extends Modes with Adaptations with Taggings {
else
stat match {
case imp @ Import(_, _) =>
- context = context.makeNewImport(imp)
imp.symbol.initialize
- typedImport(imp)
+ if (!imp.symbol.isError) {
+ context = context.makeNewImport(imp)
+ typedImport(imp)
+ } else EmptyTree
case _ =>
if (localTarget && !includesTargetPos(stat)) {
// skip typechecking of statements in a sequence where some other statement includes
diff --git a/src/library/scala/collection/mutable/FlatHashTable.scala b/src/library/scala/collection/mutable/FlatHashTable.scala
index ee6d4d1d22..4070174902 100644
--- a/src/library/scala/collection/mutable/FlatHashTable.scala
+++ b/src/library/scala/collection/mutable/FlatHashTable.scala
@@ -50,6 +50,10 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
protected def capacity(expectedSize: Int) = if (expectedSize == 0) 1 else powerOfTwo(expectedSize)
+ /** The initial size of the hash table.
+ */
+ def initialSize: Int = 32
+
private def initialCapacity = capacity(initialSize)
protected def randomSeed = seedGenerator.get.nextInt()
@@ -361,10 +365,6 @@ private[collection] object FlatHashTable {
def defaultLoadFactor: Int = 450
final def loadFactorDenum = 1000
- /** The initial size of the hash table.
- */
- def initialSize: Int = 32
-
def sizeForThreshold(size: Int, _loadFactor: Int) = math.max(32, (size.toLong * loadFactorDenum / _loadFactor).toInt)
def newThreshold(_loadFactor: Int, size: Int) = {
diff --git a/src/library/scala/collection/mutable/HashTable.scala b/src/library/scala/collection/mutable/HashTable.scala
index a2f78dbde9..c307e6dcab 100644
--- a/src/library/scala/collection/mutable/HashTable.scala
+++ b/src/library/scala/collection/mutable/HashTable.scala
@@ -56,7 +56,15 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
protected def tableSizeSeed = Integer.bitCount(table.length - 1)
- protected def initialSize: Int = HashTable.initialSize
+ /** The initial size of the hash table.
+ */
+ protected def initialSize: Int = 16
+
+ /** The initial threshold.
+ */
+ private def initialThreshold(_loadFactor: Int): Int = newThreshold(_loadFactor, initialCapacity)
+
+ private def initialCapacity = capacity(initialSize)
private def lastPopulatedIndex = {
var idx = table.length - 1
@@ -354,16 +362,6 @@ private[collection] object HashTable {
private[collection] final def defaultLoadFactor: Int = 750 // corresponds to 75%
private[collection] final def loadFactorDenum = 1000;
- /** The initial size of the hash table.
- */
- private[collection] final def initialSize: Int = 16
-
- /** The initial threshold.
- */
- private[collection] final def initialThreshold(_loadFactor: Int): Int = newThreshold(_loadFactor, initialCapacity)
-
- private[collection] final def initialCapacity = capacity(initialSize)
-
private[collection] final def newThreshold(_loadFactor: Int, size: Int) = ((size.toLong * _loadFactor) / loadFactorDenum).toInt
private[collection] final def sizeForThreshold(_loadFactor: Int, thr: Int) = ((thr.toLong * loadFactorDenum) / _loadFactor).toInt
diff --git a/src/library/scala/collection/mutable/PriorityQueue.scala b/src/library/scala/collection/mutable/PriorityQueue.scala
index 12dee45bae..af55a01ed6 100644
--- a/src/library/scala/collection/mutable/PriorityQueue.scala
+++ b/src/library/scala/collection/mutable/PriorityQueue.scala
@@ -161,10 +161,13 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
*/
def clear(): Unit = { resarr.p_size0 = 1 }
- /** Returns an iterator which yields all the elements of the priority
- * queue in descending priority order.
+ /** Returns an iterator which yields all the elements.
*
- * @return an iterator over all elements sorted in descending order.
+ * Note: The order of elements returned is undefined.
+ * If you want to traverse the elements in priority queue
+ * order, use `clone().dequeueAll.iterator`.
+ *
+ * @return an iterator over all the elements.
*/
override def iterator: Iterator[A] = new AbstractIterator[A] {
private var i = 1
@@ -176,7 +179,6 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
}
}
-
/** Returns the reverse of this queue. The priority queue that gets
* returned will have an inversed ordering - if for some elements
* `x` and `y` the original queue's ordering
@@ -198,6 +200,13 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
revq
}
+ /** Returns an iterator which yields all the elements in the reverse order
+ * than that returned by the method `iterator`.
+ *
+ * Note: The order of elements returned is undefined.
+ *
+ * @return an iterator over all elements sorted in descending order.
+ */
def reverseIterator: Iterator[A] = new AbstractIterator[A] {
private var i = resarr.p_size0 - 1
def hasNext: Boolean = i >= 1
@@ -217,6 +226,8 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
throw new UnsupportedOperationException("unsuitable as hash key")
/** Returns a regular queue containing the same elements.
+ *
+ * Note: the order of elements is undefined.
*/
def toQueue: Queue[A] = new Queue[A] ++= this.iterator
@@ -225,6 +236,13 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
* @return the string representation of this queue.
*/
override def toString() = toList.mkString("PriorityQueue(", ", ", ")")
+
+ /** Converts this $coll to a list.
+ *
+ * Note: the order of elements is undefined.
+ *
+ * @return a list containing all elements of this $coll.
+ */
override def toList = this.iterator.toList
/** This method clones the priority queue.
diff --git a/src/library/scala/collection/mutable/StackProxy.scala b/src/library/scala/collection/mutable/StackProxy.scala
index 8884f03bbd..9eadfe4045 100644
--- a/src/library/scala/collection/mutable/StackProxy.scala
+++ b/src/library/scala/collection/mutable/StackProxy.scala
@@ -54,6 +54,11 @@ trait StackProxy[A] extends Stack[A] with Proxy {
this
}
+ override def push(elem: A): this.type = {
+ self.push(elem)
+ this
+ }
+
/** Returns the top element of the stack. This method will not remove
* the element from the stack. An error is signaled if there is no
* element on the stack.
diff --git a/src/library/scala/collection/package.scala b/src/library/scala/collection/package.scala
index 237ca28018..ac5cb66942 100644
--- a/src/library/scala/collection/package.scala
+++ b/src/library/scala/collection/package.scala
@@ -45,7 +45,7 @@ package scala
*
* The most common way to create a collection is to use the companion objects as factories.
* Of these, the three most common
- * are [[scala.collection.immutable.Seq]], [[scala.collection.immutable.Set]], and [[scala.collection.immutable.Map]]. Their
+ * are [[scala.collection.Seq]], [[scala.collection.immutable.Set]], and [[scala.collection.immutable.Map]]. Their
* companion objects are all available
* as type aliases the either the [[scala]] package or in `scala.Predef`, and can be used
* like so:
@@ -61,7 +61,7 @@ package scala
* }}}
*
* It is also typical to use the [[scala.collection.immutable]] collections over those
- * in [[scala.collection.mutable]]; The types aliased in the [[scala]] package and
+ * in [[scala.collection.mutable]]; The types aliased in
* the `scala.Predef` object are the immutable versions.
*
* Also note that the collections library was carefully designed to include several implementations of
diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala
index 4df2bb63af..c42393eee2 100644
--- a/src/library/scala/concurrent/Future.scala
+++ b/src/library/scala/concurrent/Future.scala
@@ -82,9 +82,29 @@ import language.higherKinds
* {{{
* f flatMap { (x: Int) => g map { (y: Int) => x + y } }
* }}}
+ *
+ * @define callbackInContext
+ * The provided callback always runs in the provided implicit
+ *`ExecutionContext`, though there is no guarantee that the
+ * `execute()` method on the `ExecutionContext` will be called once
+ * per callback or that `execute()` will be called in the current
+ * thread. That is, the implementation may run multiple callbacks
+ * in a batch within a single `execute()` and it may run
+ * `execute()` either immediately or asynchronously.
*/
trait Future[+T] extends Awaitable[T] {
+ // The executor within the lexical scope
+ // of the Future trait. Note that this will
+ // (modulo bugs) _never_ execute a callback
+ // other than those below in this same file.
+ // As a nice side benefit, having this implicit
+ // here forces an ambiguity in those methods
+ // that also have an executor parameter, which
+ // keeps us from accidentally forgetting to use
+ // the executor parameter.
+ private implicit def internalExecutor: ExecutionContext = Future.InternalCallbackExecutor
+
/* Callbacks */
/** When this future is completed successfully (i.e. with a value),
@@ -95,11 +115,12 @@ trait Future[+T] extends Awaitable[T] {
* this will either be applied immediately or be scheduled asynchronously.
*
* $multipleCallbacks
+ * $callbackInContext
*/
- def onSuccess[U](pf: PartialFunction[T, U]): Unit = onComplete {
+ def onSuccess[U](pf: PartialFunction[T, U])(implicit executor: ExecutionContext): Unit = onComplete {
case Right(v) if pf isDefinedAt v => pf(v)
case _ =>
- }
+ }(executor)
/** When this future is completed with a failure (i.e. with a throwable),
* apply the provided callback to the throwable.
@@ -112,11 +133,12 @@ trait Future[+T] extends Awaitable[T] {
* Will not be called in case that the future is completed with a value.
*
* $multipleCallbacks
+ * $callbackInContext
*/
- def onFailure[U](callback: PartialFunction[Throwable, U]): Unit = onComplete {
+ def onFailure[U](callback: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Unit = onComplete {
case Left(t) if (isFutureThrowable(t) && callback.isDefinedAt(t)) => callback(t)
case _ =>
- }
+ }(executor)
/** When this future is completed, either through an exception, or a value,
* apply the provided function.
@@ -125,8 +147,9 @@ trait Future[+T] extends Awaitable[T] {
* this will either be applied immediately or be scheduled asynchronously.
*
* $multipleCallbacks
+ * $callbackInContext
*/
- def onComplete[U](func: Either[Throwable, T] => U): Unit
+ def onComplete[U](func: Either[Throwable, T] => U)(implicit executor: ExecutionContext): Unit
/* Miscellaneous */
@@ -182,10 +205,10 @@ trait Future[+T] extends Awaitable[T] {
*
* Will not be called if the future fails.
*/
- def foreach[U](f: T => U): Unit = onComplete {
+ def foreach[U](f: T => U)(implicit executor: ExecutionContext): Unit = onComplete {
case Right(r) => f(r)
case _ => // do nothing
- }
+ }(executor)
/** Creates a new future by applying the 's' function to the successful result of
* this future, or the 'f' function to the failed result. If there is any non-fatal
@@ -198,7 +221,7 @@ trait Future[+T] extends Awaitable[T] {
* the returned future
* @return a future that will be completed with the transformed value
*/
- def transform[S](s: T => S, f: Throwable => Throwable): Future[S] = {
+ def transform[S](s: T => S, f: Throwable => Throwable)(implicit executor: ExecutionContext): Future[S] = {
val p = Promise[S]()
onComplete {
@@ -211,7 +234,7 @@ trait Future[+T] extends Awaitable[T] {
} catch {
case NonFatal(t) => p failure t
}
- }
+ }(executor)
p.future
}
@@ -222,7 +245,7 @@ trait Future[+T] extends Awaitable[T] {
*
* $forComprehensionExamples
*/
- def map[S](f: T => S): Future[S] = { // transform(f, identity)
+ def map[S](f: T => S)(implicit executor: ExecutionContext): Future[S] = { // transform(f, identity)
val p = Promise[S]()
onComplete {
@@ -235,7 +258,7 @@ trait Future[+T] extends Awaitable[T] {
} catch {
case NonFatal(t) => p failure t
}
- }
+ }(executor)
p.future
}
@@ -247,21 +270,21 @@ trait Future[+T] extends Awaitable[T] {
*
* $forComprehensionExamples
*/
- def flatMap[S](f: T => Future[S]): Future[S] = {
+ def flatMap[S](f: T => Future[S])(implicit executor: ExecutionContext): Future[S] = {
val p = Promise[S]()
onComplete {
case l: Left[_, _] => p complete l.asInstanceOf[Left[Throwable, S]]
case Right(v) =>
try {
- f(v) onComplete {
+ f(v).onComplete({
case l: Left[_, _] => p complete l.asInstanceOf[Left[Throwable, S]]
case Right(v) => p success v
- }
+ })(internalExecutor)
} catch {
case NonFatal(t) => p failure t
}
- }
+ }(executor)
p.future
}
@@ -282,7 +305,7 @@ trait Future[+T] extends Awaitable[T] {
* await(h, 0) // throw a NoSuchElementException
* }}}
*/
- def filter(pred: T => Boolean): Future[T] = {
+ def filter(pred: T => Boolean)(implicit executor: ExecutionContext): Future[T] = {
val p = Promise[T]()
onComplete {
@@ -294,14 +317,14 @@ trait Future[+T] extends Awaitable[T] {
} catch {
case NonFatal(t) => p failure t
}
- }
+ }(executor)
p.future
}
/** Used by for-comprehensions.
*/
- final def withFilter(p: T => Boolean): Future[T] = filter(p)
+ final def withFilter(p: T => Boolean)(implicit executor: ExecutionContext): Future[T] = filter(p)(executor)
// final def withFilter(p: T => Boolean) = new FutureWithFilter[T](this, p)
// final class FutureWithFilter[+S](self: Future[S], p: S => Boolean) {
@@ -331,7 +354,7 @@ trait Future[+T] extends Awaitable[T] {
* await(h, 0) // throw a NoSuchElementException
* }}}
*/
- def collect[S](pf: PartialFunction[T, S]): Future[S] = {
+ def collect[S](pf: PartialFunction[T, S])(implicit executor: ExecutionContext): Future[S] = {
val p = Promise[S]()
onComplete {
@@ -343,7 +366,7 @@ trait Future[+T] extends Awaitable[T] {
} catch {
case NonFatal(t) => p failure t
}
- }
+ }(executor)
p.future
}
@@ -360,7 +383,7 @@ trait Future[+T] extends Awaitable[T] {
* future (6 / 2) recover { case e: ArithmeticException ⇒ 0 } // result: 3
* }}}
*/
- def recover[U >: T](pf: PartialFunction[Throwable, U]): Future[U] = {
+ def recover[U >: T](pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Future[U] = {
val p = Promise[U]()
onComplete {
@@ -370,7 +393,7 @@ trait Future[+T] extends Awaitable[T] {
case NonFatal(t) => p failure t
}
case otherwise => p complete otherwise
- }
+ }(executor)
p.future
}
@@ -388,7 +411,7 @@ trait Future[+T] extends Awaitable[T] {
* future (6 / 0) recoverWith { case e: ArithmeticException => f } // result: Int.MaxValue
* }}}
*/
- def recoverWith[U >: T](pf: PartialFunction[Throwable, Future[U]]): Future[U] = {
+ def recoverWith[U >: T](pf: PartialFunction[Throwable, Future[U]])(implicit executor: ExecutionContext): Future[U] = {
val p = Promise[U]()
onComplete {
@@ -399,7 +422,7 @@ trait Future[+T] extends Awaitable[T] {
case NonFatal(t) => p failure t
}
case otherwise => p complete otherwise
- }
+ }(executor)
p.future
}
@@ -498,12 +521,12 @@ trait Future[+T] extends Awaitable[T] {
* }
* }}}
*/
- def andThen[U](pf: PartialFunction[Either[Throwable, T], U]): Future[T] = {
+ def andThen[U](pf: PartialFunction[Either[Throwable, T], U])(implicit executor: ExecutionContext): Future[T] = {
val p = Promise[T]()
onComplete {
case r => try if (pf isDefinedAt r) pf(r) finally p complete r
- }
+ }(executor)
p.future
}
@@ -656,6 +679,33 @@ object Future {
for (r <- fr; b <- fb) yield (r += b)
}.map(_.result)
+ // This is used to run callbacks which are internal
+ // to scala.concurrent; our own callbacks are only
+ // ever used to eventually run another callback,
+ // and that other callback will have its own
+ // executor because all callbacks come with
+ // an executor. Our own callbacks never block
+ // and have no "expected" exceptions.
+ // As a result, this executor can do nothing;
+ // some other executor will always come after
+ // it (and sometimes one will be before it),
+ // and those will be performing the "real"
+ // dispatch to code outside scala.concurrent.
+ // Because this exists, ExecutionContext.defaultExecutionContext
+ // isn't instantiated by Future internals, so
+ // if some code for some reason wants to avoid
+ // ever starting up the default context, it can do so
+ // by just not ever using it itself. scala.concurrent
+ // doesn't need to create defaultExecutionContext as
+ // a side effect.
+ private[concurrent] object InternalCallbackExecutor extends ExecutionContext {
+ def execute(runnable: Runnable): Unit =
+ runnable.run()
+ def internalBlockingCall[T](awaitable: Awaitable[T], atMost: Duration): T =
+ throw new IllegalStateException("bug in scala.concurrent, called blocking() from internal callback")
+ def reportFailure(t: Throwable): Unit =
+ throw new IllegalStateException("problem in scala.concurrent internal callback", t)
+ }
}
diff --git a/src/library/scala/concurrent/Promise.scala b/src/library/scala/concurrent/Promise.scala
index 2e9de4a0d0..578642966f 100644
--- a/src/library/scala/concurrent/Promise.scala
+++ b/src/library/scala/concurrent/Promise.scala
@@ -25,6 +25,11 @@ package scala.concurrent
*/
trait Promise[T] {
+ // used for internal callbacks defined in
+ // the lexical scope of this trait;
+ // _never_ for application callbacks.
+ private implicit def internalExecutor: ExecutionContext = Future.InternalCallbackExecutor
+
/** Future containing the value of this promise.
*/
def future: Future[T]
@@ -106,26 +111,23 @@ object Promise {
/** Creates a promise object which can be completed with a value.
*
* @tparam T the type of the value in the promise
- * @param executor the execution context on which the promise is created on
* @return the newly created `Promise` object
*/
- def apply[T]()(implicit executor: ExecutionContext): Promise[T] = new impl.Promise.DefaultPromise[T]()
+ def apply[T](): Promise[T] = new impl.Promise.DefaultPromise[T]()
/** Creates an already completed Promise with the specified exception.
*
* @tparam T the type of the value in the promise
- * @param executor the execution context on which the promise is created on
* @return the newly created `Promise` object
*/
- def failed[T](exception: Throwable)(implicit executor: ExecutionContext): Promise[T] = new impl.Promise.KeptPromise[T](Left(exception))
+ def failed[T](exception: Throwable): Promise[T] = new impl.Promise.KeptPromise[T](Left(exception))
/** Creates an already completed Promise with the specified result.
*
* @tparam T the type of the value in the promise
- * @param executor the execution context on which the promise is created on
* @return the newly created `Promise` object
*/
- def successful[T](result: T)(implicit executor: ExecutionContext): Promise[T] = new impl.Promise.KeptPromise[T](Right(result))
+ def successful[T](result: T): Promise[T] = new impl.Promise.KeptPromise[T](Right(result))
}
diff --git a/src/library/scala/concurrent/impl/Future.scala b/src/library/scala/concurrent/impl/Future.scala
index a54e81bd05..47534e398b 100644
--- a/src/library/scala/concurrent/impl/Future.scala
+++ b/src/library/scala/concurrent/impl/Future.scala
@@ -17,8 +17,6 @@ import scala.collection.mutable.Stack
private[concurrent] trait Future[+T] extends scala.concurrent.Future[T] with Awaitable[T] {
- implicit def executor: ExecutionContext
-
}
private[concurrent] object Future {
diff --git a/src/library/scala/concurrent/impl/Promise.scala b/src/library/scala/concurrent/impl/Promise.scala
index 78053f5117..1d573ef818 100644
--- a/src/library/scala/concurrent/impl/Promise.scala
+++ b/src/library/scala/concurrent/impl/Promise.scala
@@ -42,7 +42,7 @@ object Promise {
/** Default promise implementation.
*/
- class DefaultPromise[T](implicit val executor: ExecutionContext) extends AbstractPromise with Promise[T] { self =>
+ class DefaultPromise[T] extends AbstractPromise with Promise[T] { self =>
updateState(null, Nil) // Start at "No callbacks" //FIXME switch to Unsafe instead of ARFU
protected final def tryAwait(atMost: Duration): Boolean = {
@@ -108,21 +108,26 @@ object Promise {
}) match {
case null => false
case cs if cs.isEmpty => true
- case cs => Future.dispatchFuture(executor, () => cs.foreach(f => notifyCompleted(f, resolved))); true
+ // this assumes that f(resolved) will go via dispatchFuture
+ // and notifyCompleted (see onComplete below)
+ case cs => cs.foreach(f => f(resolved)); true
}
}
- def onComplete[U](func: Either[Throwable, T] => U): Unit = {
+ def onComplete[U](func: Either[Throwable, T] => U)(implicit executor: ExecutionContext): Unit = {
+ val bound: Either[Throwable, T] => Unit = (either: Either[Throwable, T]) =>
+ Future.dispatchFuture(executor, () => notifyCompleted(func, either))
+
@tailrec //Tries to add the callback, if already completed, it dispatches the callback to be executed
def dispatchOrAddCallback(): Unit =
getState match {
- case r: Either[_, _] => Future.dispatchFuture(executor, () => notifyCompleted(func, r.asInstanceOf[Either[Throwable, T]]))
- case listeners: List[_] => if (updateState(listeners, func :: listeners)) () else dispatchOrAddCallback()
+ case r: Either[_, _] => bound(r.asInstanceOf[Either[Throwable, T]])
+ case listeners: List[_] => if (updateState(listeners, bound :: listeners)) () else dispatchOrAddCallback()
}
dispatchOrAddCallback()
}
- private final def notifyCompleted(func: Either[Throwable, T] => Any, result: Either[Throwable, T]) {
+ private final def notifyCompleted(func: Either[Throwable, T] => Any, result: Either[Throwable, T])(implicit executor: ExecutionContext) {
try {
func(result)
} catch {
@@ -135,7 +140,7 @@ object Promise {
*
* Useful in Future-composition when a value to contribute is already available.
*/
- final class KeptPromise[T](suppliedValue: Either[Throwable, T])(implicit val executor: ExecutionContext) extends Promise[T] {
+ final class KeptPromise[T](suppliedValue: Either[Throwable, T]) extends Promise[T] {
val value = Some(resolveEither(suppliedValue))
@@ -143,7 +148,7 @@ object Promise {
def tryComplete(value: Either[Throwable, T]): Boolean = false
- def onComplete[U](func: Either[Throwable, T] => U): Unit = {
+ def onComplete[U](func: Either[Throwable, T] => U)(implicit executor: ExecutionContext): Unit = {
val completedAs = value.get // Avoid closing over "this"
Future.dispatchFuture(executor, () => func(completedAs))
}
diff --git a/src/partest/scala/tools/partest/PartestTask.scala b/src/partest/scala/tools/partest/PartestTask.scala
index ad2e155182..67b38d2e24 100644
--- a/src/partest/scala/tools/partest/PartestTask.scala
+++ b/src/partest/scala/tools/partest/PartestTask.scala
@@ -309,6 +309,16 @@ class PartestTask extends Task with CompilationPathProperty {
}
} getOrElse sys.error("Provided classpath does not contain a Scala actors.")
+ val scalaActorsMigration = {
+ (classpath.list map { fs => new File(fs) }) find { f =>
+ f.getName match {
+ case "scala-actors-migration.jar" => true
+ case "actors-migration" if (f.getParentFile.getName == "classes") => true
+ case _ => false
+ }
+ }
+ } getOrElse sys.error("Provided classpath does not contain a Scala actors.")
+
def scalacArgsFlat: Option[Seq[String]] = scalacArgs map (_ flatMap { a =>
val parts = a.getParts
if(parts eq null) Seq[String]() else parts.toSeq
@@ -335,6 +345,7 @@ class PartestTask extends Task with CompilationPathProperty {
antFileManager.LATEST_COMP = scalaCompiler.getAbsolutePath
antFileManager.LATEST_PARTEST = scalaPartest.getAbsolutePath
antFileManager.LATEST_ACTORS = scalaActors.getAbsolutePath
+ antFileManager.LATEST_ACTORS_MIGRATION = scalaActorsMigration.getAbsolutePath
javacmd foreach (x => antFileManager.JAVACMD = x.getAbsolutePath)
javaccmd foreach (x => antFileManager.JAVAC_CMD = x.getAbsolutePath)
diff --git a/src/partest/scala/tools/partest/nest/AntRunner.scala b/src/partest/scala/tools/partest/nest/AntRunner.scala
index e77385d6e9..dc83e4ea66 100644
--- a/src/partest/scala/tools/partest/nest/AntRunner.scala
+++ b/src/partest/scala/tools/partest/nest/AntRunner.scala
@@ -23,6 +23,7 @@ class AntRunner extends DirectRunner {
var LATEST_COMP: String = _
var LATEST_PARTEST: String = _
var LATEST_ACTORS: String = _
+ var LATEST_ACTORS_MIGRATION: String = _
val testRootPath: String = "test"
val testRootDir: Directory = Directory(testRootPath)
}
diff --git a/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala b/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala
index 8d239a84bd..b270a6b65a 100644
--- a/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala
+++ b/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala
@@ -84,6 +84,7 @@ class ConsoleFileManager extends FileManager {
latestFile = testClassesDir.parent / "bin"
latestLibFile = testClassesDir / "library"
latestActorsFile = testClassesDir / "library" / "actors"
+ latestActMigFile = testClassesDir / "actors-migration"
latestCompFile = testClassesDir / "compiler"
latestPartestFile = testClassesDir / "partest"
latestFjbgFile = testParent / "lib" / "fjbg.jar"
@@ -94,6 +95,7 @@ class ConsoleFileManager extends FileManager {
latestFile = dir / "bin"
latestLibFile = dir / "lib/scala-library.jar"
latestActorsFile = dir / "lib/scala-actors.jar"
+ latestActMigFile = dir / "lib/scala-actors-migration.jar"
latestCompFile = dir / "lib/scala-compiler.jar"
latestPartestFile = dir / "lib/scala-partest.jar"
latestFjbgFile = testParent / "lib" / "fjbg.jar"
@@ -104,6 +106,7 @@ class ConsoleFileManager extends FileManager {
latestFile = prefixFile("build/quick/bin")
latestLibFile = prefixFile("build/quick/classes/library")
latestActorsFile = prefixFile("build/quick/classes/library/actors")
+ latestActMigFile = prefixFile("build/quick/classes/actors-migration")
latestCompFile = prefixFile("build/quick/classes/compiler")
latestPartestFile = prefixFile("build/quick/classes/partest")
}
@@ -114,6 +117,7 @@ class ConsoleFileManager extends FileManager {
latestFile = prefixFileWith(p, "bin")
latestLibFile = prefixFileWith(p, "lib/scala-library.jar")
latestActorsFile = prefixFileWith(p, "lib/scala-actors.jar")
+ latestActMigFile = prefixFileWith(p, "lib/scala-actors-migration.jar")
latestCompFile = prefixFileWith(p, "lib/scala-compiler.jar")
latestPartestFile = prefixFileWith(p, "lib/scala-partest.jar")
}
@@ -123,6 +127,7 @@ class ConsoleFileManager extends FileManager {
latestFile = prefixFile("dists/latest/bin")
latestLibFile = prefixFile("dists/latest/lib/scala-library.jar")
latestActorsFile = prefixFile("dists/latest/lib/scala-actors.jar")
+ latestActMigFile = prefixFile("dists/latest/lib/scala-actors-migration.jar")
latestCompFile = prefixFile("dists/latest/lib/scala-compiler.jar")
latestPartestFile = prefixFile("dists/latest/lib/scala-partest.jar")
}
@@ -132,6 +137,7 @@ class ConsoleFileManager extends FileManager {
latestFile = prefixFile("build/pack/bin")
latestLibFile = prefixFile("build/pack/lib/scala-library.jar")
latestActorsFile = prefixFile("build/pack/lib/scala-actors.jar")
+ latestActMigFile = prefixFile("build/pack/lib/scala-actors-migration.jar")
latestCompFile = prefixFile("build/pack/lib/scala-compiler.jar")
latestPartestFile = prefixFile("build/pack/lib/scala-partest.jar")
}
@@ -167,16 +173,19 @@ class ConsoleFileManager extends FileManager {
LATEST_COMP = latestCompFile.getAbsolutePath
LATEST_PARTEST = latestPartestFile.getAbsolutePath
LATEST_ACTORS = latestActorsFile.getAbsolutePath
+ LATEST_ACTORS_MIGRATION = latestActMigFile.getAbsolutePath
}
var LATEST_LIB: String = ""
var LATEST_COMP: String = ""
var LATEST_PARTEST: String = ""
var LATEST_ACTORS: String = ""
+ var LATEST_ACTORS_MIGRATION: String = ""
var latestFile: File = _
var latestLibFile: File = _
var latestActorsFile: File = _
+ var latestActMigFile: File = _
var latestCompFile: File = _
var latestPartestFile: File = _
var latestFjbgFile: File = _
diff --git a/src/partest/scala/tools/partest/nest/DirectRunner.scala b/src/partest/scala/tools/partest/nest/DirectRunner.scala
index 20f435cfbb..815c27f567 100644
--- a/src/partest/scala/tools/partest/nest/DirectRunner.scala
+++ b/src/partest/scala/tools/partest/nest/DirectRunner.scala
@@ -61,10 +61,10 @@ trait DirectRunner {
val latestLibFile = new File(fileManager.LATEST_LIB)
val latestPartestFile = new File(fileManager.LATEST_PARTEST)
val latestActorsFile = new File(fileManager.LATEST_ACTORS)
-
+ val latestActMigFile = new File(fileManager.LATEST_ACTORS_MIGRATION)
val scalacheckURL = PathSettings.scalaCheck.toURL
val scalaCheckParentClassLoader = ScalaClassLoader.fromURLs(
- scalacheckURL :: (List(latestCompFile, latestLibFile, latestActorsFile, latestPartestFile).map(_.toURI.toURL))
+ scalacheckURL :: (List(latestCompFile, latestLibFile, latestActorsFile, latestActMigFile, latestPartestFile).map(_.toURI.toURL))
)
Output.init()
diff --git a/src/partest/scala/tools/partest/nest/FileManager.scala b/src/partest/scala/tools/partest/nest/FileManager.scala
index 6d9e64730f..cf7160f521 100644
--- a/src/partest/scala/tools/partest/nest/FileManager.scala
+++ b/src/partest/scala/tools/partest/nest/FileManager.scala
@@ -63,6 +63,7 @@ trait FileManager extends FileUtil {
var LATEST_COMP: String
var LATEST_PARTEST: String
var LATEST_ACTORS: String
+ var LATEST_ACTORS_MIGRATION: String
var showDiff = false
var updateCheck = false
diff --git a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
index a0511774a9..a5d5952ff7 100644
--- a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
+++ b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
@@ -48,9 +48,9 @@ class ReflectiveRunner {
new ConsoleFileManager
import fileManager.
- { latestCompFile, latestLibFile, latestPartestFile, latestFjbgFile, latestScalapFile, latestActorsFile }
+ { latestCompFile, latestLibFile, latestPartestFile, latestFjbgFile, latestScalapFile, latestActorsFile, latestActMigFile }
val files =
- Array(latestCompFile, latestLibFile, latestPartestFile, latestFjbgFile, latestScalapFile, latestActorsFile) map (x => io.File(x))
+ Array(latestCompFile, latestLibFile, latestPartestFile, latestFjbgFile, latestScalapFile, latestActorsFile, latestActMigFile) map (x => io.File(x))
val sepUrls = files map (_.toURL)
var sepLoader = new URLClassLoader(sepUrls, null)
diff --git a/src/partest/scala/tools/partest/nest/SBTRunner.scala b/src/partest/scala/tools/partest/nest/SBTRunner.scala
index 5d994eeb37..14e2dc3df9 100644
--- a/src/partest/scala/tools/partest/nest/SBTRunner.scala
+++ b/src/partest/scala/tools/partest/nest/SBTRunner.scala
@@ -16,6 +16,7 @@ object SBTRunner extends DirectRunner {
var LATEST_COMP: String = _
var LATEST_PARTEST: String = _
var LATEST_ACTORS: String = _
+ var LATEST_ACTORS_MIGRATION: String = _
val testRootPath: String = "test"
val testRootDir: Directory = Directory(testRootPath)
}
@@ -65,6 +66,7 @@ object SBTRunner extends DirectRunner {
fileManager.LATEST_COMP = findClasspath("scala-compiler", "scala-compiler") getOrElse sys.error("No scala-compiler found! Classpath = " + fileManager.CLASSPATH)
fileManager.LATEST_PARTEST = findClasspath("scala-partest", "partest") getOrElse sys.error("No scala-partest found! Classpath = " + fileManager.CLASSPATH)
fileManager.LATEST_ACTORS = findClasspath("scala-actors", "actors") getOrElse sys.error("No scala-actors found! Classpath = " + fileManager.CLASSPATH)
+ fileManager.LATEST_ACTORS_MIGRATION = findClasspath("scala-actors-migration", "actors-migration") getOrElse sys.error("No scala-actors-migration found! Classpath = " + fileManager.CLASSPATH)
// TODO - Do something useful here!!!
fileManager.JAVAC_CMD = "javac"
diff --git a/src/partest/scala/tools/partest/nest/Worker.scala b/src/partest/scala/tools/partest/nest/Worker.scala
index 9326a8b232..40325c6375 100644
--- a/src/partest/scala/tools/partest/nest/Worker.scala
+++ b/src/partest/scala/tools/partest/nest/Worker.scala
@@ -56,6 +56,7 @@ class ScalaCheckFileManager(val origmanager: FileManager) extends FileManager {
var LATEST_COMP: String = origmanager.LATEST_COMP
var LATEST_PARTEST: String = origmanager.LATEST_PARTEST
var LATEST_ACTORS: String = origmanager.LATEST_ACTORS
+ var LATEST_ACTORS_MIGRATION: String = origmanager.LATEST_ACTORS_MIGRATION
}
object Output {
diff --git a/test/files/buildmanager/t2792/t2792.check b/test/files/buildmanager/t2792/t2792.check
index 68e14c6386..00a2b83469 100644
--- a/test/files/buildmanager/t2792/t2792.check
+++ b/test/files/buildmanager/t2792/t2792.check
@@ -9,3 +9,6 @@ compiling Set(A2.scala)
A2.scala:2: error: stable identifier required, but A.x found.
import A.x.y
^
+A2.scala:3: error: not found: value y
+ val z = y
+ ^
diff --git a/test/files/jvm/scala-concurrent-tck.scala b/test/files/jvm/scala-concurrent-tck.scala
index 86655ad89c..012460147a 100644
--- a/test/files/jvm/scala-concurrent-tck.scala
+++ b/test/files/jvm/scala-concurrent-tck.scala
@@ -808,6 +808,126 @@ trait TryEitherExtractor extends TestBase {
testLeftMatch()
}
+trait CustomExecutionContext extends TestBase {
+ import scala.concurrent.{ ExecutionContext, Awaitable }
+
+ def defaultEC = ExecutionContext.defaultExecutionContext
+
+ val inEC = new java.lang.ThreadLocal[Int]() {
+ override def initialValue = 0
+ }
+
+ def enterEC() = inEC.set(inEC.get + 1)
+ def leaveEC() = inEC.set(inEC.get - 1)
+ def assertEC() = assert(inEC.get > 0)
+ def assertNoEC() = assert(inEC.get == 0)
+
+ class CountingExecutionContext extends ExecutionContext {
+ val _count = new java.util.concurrent.atomic.AtomicInteger(0)
+ def count = _count.get
+
+ def delegate = ExecutionContext.defaultExecutionContext
+
+ override def execute(runnable: Runnable) = {
+ _count.incrementAndGet()
+ val wrapper = new Runnable() {
+ override def run() = {
+ enterEC()
+ try {
+ runnable.run()
+ } finally {
+ leaveEC()
+ }
+ }
+ }
+ delegate.execute(wrapper)
+ }
+
+ override def internalBlockingCall[T](awaitable: Awaitable[T], atMost: Duration): T =
+ delegate.internalBlockingCall(awaitable, atMost)
+
+ override def reportFailure(t: Throwable): Unit = {
+ System.err.println("Failure: " + t.getClass.getSimpleName + ": " + t.getMessage)
+ delegate.reportFailure(t)
+ }
+ }
+
+ def countExecs(block: (ExecutionContext) => Unit): Int = {
+ val context = new CountingExecutionContext()
+ block(context)
+ context.count
+ }
+
+ def testOnSuccessCustomEC(): Unit = {
+ val count = countExecs { implicit ec =>
+ once { done =>
+ val f = future({ assertNoEC() })(defaultEC)
+ f onSuccess {
+ case _ =>
+ assertEC()
+ done()
+ }
+ assertNoEC()
+ }
+ }
+
+ // should be onSuccess, but not future body
+ assert(count == 1)
+ }
+
+ def testKeptPromiseCustomEC(): Unit = {
+ val count = countExecs { implicit ec =>
+ once { done =>
+ val f = Promise.successful(10).future
+ f onSuccess {
+ case _ =>
+ assertEC()
+ done()
+ }
+ }
+ }
+
+ // should be onSuccess called once in proper EC
+ assert(count == 1)
+ }
+
+ def testCallbackChainCustomEC(): Unit = {
+ val count = countExecs { implicit ec =>
+ once { done =>
+ assertNoEC()
+ val addOne = { x: Int => assertEC(); x + 1 }
+ val f = Promise.successful(10).future
+ f.map(addOne).filter { x =>
+ assertEC()
+ x == 11
+ } flatMap { x =>
+ Promise.successful(x + 1).future.map(addOne).map(addOne)
+ } onComplete {
+ case Left(t) =>
+ try {
+ throw new AssertionError("error in test: " + t.getMessage, t)
+ } finally {
+ done()
+ }
+ case Right(x) =>
+ assertEC()
+ assert(x == 14)
+ done()
+ }
+ assertNoEC()
+ }
+ }
+
+ // the count is not defined (other than >=1)
+ // due to the batching optimizations.
+ assert(count >= 1)
+ }
+
+ testOnSuccessCustomEC()
+ testKeptPromiseCustomEC()
+ testCallbackChainCustomEC()
+}
+
object Test
extends App
with FutureCallbacks
@@ -816,6 +936,7 @@ with FutureProjections
with Promises
with Exceptions
with TryEitherExtractor
+with CustomExecutionContext
{
System.exit(0)
}
diff --git a/test/files/neg/exhausting.check b/test/files/neg/exhausting.check
index 0bef21e077..7140b99428 100644
--- a/test/files/neg/exhausting.check
+++ b/test/files/neg/exhausting.check
@@ -1,29 +1,25 @@
-exhausting.scala:20: error: match is not exhaustive!
-missing combination * Nil
-
+exhausting.scala:21: error: match may not be exhaustive.
+It would fail on the following input: List(_, _, _)
def fail1[T](xs: List[T]) = xs match {
^
-exhausting.scala:24: error: match is not exhaustive!
-missing combination Nil
-
+exhausting.scala:27: error: match may not be exhaustive.
+It would fail on the following input: Nil
def fail2[T](xs: List[T]) = xs match {
^
-exhausting.scala:27: error: match is not exhaustive!
-missing combination Bar3
-
+exhausting.scala:32: error: match may not be exhaustive.
+It would fail on the following input: List(<not in (1, 2)>)
+ def fail3a(xs: List[Int]) = xs match {
+ ^
+exhausting.scala:39: error: match may not be exhaustive.
+It would fail on the following input: Bar3
def fail3[T](x: Foo[T]) = x match {
^
-exhausting.scala:31: error: match is not exhaustive!
-missing combination Bar2 Bar2
-
+exhausting.scala:47: error: match may not be exhaustive.
+It would fail on the following inputs: (Bar1, Bar2), (Bar1, Bar3), (Bar2, Bar1), (Bar2, Bar2)
def fail4[T <: AnyRef](xx: (Foo[T], Foo[T])) = xx match {
^
-exhausting.scala:36: error: match is not exhaustive!
-missing combination Bar1 Bar2
-missing combination Bar1 Bar3
-missing combination Bar2 Bar1
-missing combination Bar2 Bar2
-
+exhausting.scala:56: error: match may not be exhaustive.
+It would fail on the following inputs: (Bar1, Bar2), (Bar1, Bar3), (Bar2, Bar1), (Bar2, Bar2)
def fail5[T](xx: (Foo[T], Foo[T])) = xx match {
^
-5 errors found
+6 errors found
diff --git a/test/files/neg/exhausting.flags b/test/files/neg/exhausting.flags
index b7eb21d5f5..85d8eb2ba2 100644
--- a/test/files/neg/exhausting.flags
+++ b/test/files/neg/exhausting.flags
@@ -1 +1 @@
--Xfatal-warnings -Xoldpatmat
+-Xfatal-warnings
diff --git a/test/files/neg/exhausting.scala b/test/files/neg/exhausting.scala
index 14b05695aa..5554ee2671 100644
--- a/test/files/neg/exhausting.scala
+++ b/test/files/neg/exhausting.scala
@@ -16,30 +16,46 @@ object Test {
def ex3[T](xx: (Foo[T], Foo[T])) = xx match {
case (_: Foo[_], _: Foo[_]) => ()
}
-
+
+ // fails for: ::(_, ::(_, ::(_, _)))
def fail1[T](xs: List[T]) = xs match {
case Nil => "ok"
case x :: y :: Nil => "ok"
}
+
+ // fails for: Nil
def fail2[T](xs: List[T]) = xs match {
case _ :: _ => "ok"
}
+
+ // fails for: ::(<not in (2, 1)>, _)
+ def fail3a(xs: List[Int]) = xs match {
+ case 1 :: _ =>
+ case 2 :: _ =>
+ case Nil =>
+ }
+
+ // fails for: Bar3
def fail3[T](x: Foo[T]) = x match {
case Bar1 => "ok"
case Bar2 => "ok"
}
+ // fails for: (Bar1, Bar2)
+ // fails for: (Bar1, Bar3)
+ // fails for: (Bar2, Bar2)
+ // fails for: (Bar2, Bar1)
def fail4[T <: AnyRef](xx: (Foo[T], Foo[T])) = xx match {
case (Bar1, Bar1) => ()
case (Bar2, Bar3) => ()
case (Bar3, _) => ()
}
+ // fails for: (Bar1, Bar2)
+ // fails for: (Bar1, Bar3)
+ // fails for: (Bar2, Bar1)
+ // fails for: (Bar2, Bar2)
def fail5[T](xx: (Foo[T], Foo[T])) = xx match {
case (Bar1, Bar1) => ()
case (Bar2, Bar3) => ()
case (Bar3, _) => ()
}
-
- def main(args: Array[String]): Unit = {
-
- }
}
diff --git a/test/files/neg/pat_unreachable.flags b/test/files/neg/pat_unreachable.flags
index ba80cad69b..cb8324a345 100644
--- a/test/files/neg/pat_unreachable.flags
+++ b/test/files/neg/pat_unreachable.flags
@@ -1 +1 @@
--Xoldpatmat
+-Xoldpatmat \ No newline at end of file
diff --git a/test/files/neg/patmatexhaust.check b/test/files/neg/patmatexhaust.check
index 5426d61d31..6172811e13 100644
--- a/test/files/neg/patmatexhaust.check
+++ b/test/files/neg/patmatexhaust.check
@@ -1,54 +1,41 @@
-patmatexhaust.scala:7: error: match is not exhaustive!
-missing combination Baz
-
+patmatexhaust.scala:7: error: match may not be exhaustive.
+It would fail on the following input: Baz
def ma1(x:Foo) = x match {
^
-patmatexhaust.scala:11: error: match is not exhaustive!
-missing combination Bar
-
+patmatexhaust.scala:11: error: match may not be exhaustive.
+It would fail on the following input: Bar(_)
def ma2(x:Foo) = x match {
^
-patmatexhaust.scala:23: error: match is not exhaustive!
-missing combination Kult Kult
-missing combination Qult Qult
-
+patmatexhaust.scala:23: error: match may not be exhaustive.
+It would fail on the following inputs: (Kult(_), Kult(_)), (Qult(), Qult())
def ma3(x:Mult) = (x,x) match { // not exhaustive
^
-patmatexhaust.scala:49: error: match is not exhaustive!
-missing combination Gp
-missing combination Gu
-
+patmatexhaust.scala:49: error: match may not be exhaustive.
+It would fail on the following inputs: Gp(), Gu
def ma4(x:Deep) = x match { // missing cases: Gu, Gp
^
-patmatexhaust.scala:53: error: match is not exhaustive!
-missing combination Gp
-
- def ma5(x:Deep) = x match { // Gp
+patmatexhaust.scala:53: error: match may not be exhaustive.
+It would fail on the following input: Gp()
+ def ma5(x:Deep) = x match {
^
-patmatexhaust.scala:59: error: match is not exhaustive!
-missing combination Nil
-
+patmatexhaust.scala:59: error: match may not be exhaustive.
+It would fail on the following input: Nil
def ma6() = List(1,2) match { // give up
^
-patmatexhaust.scala:75: error: match is not exhaustive!
-missing combination B
-
+patmatexhaust.scala:75: error: match may not be exhaustive.
+It would fail on the following input: B()
def ma9(x: B) = x match {
^
-patmatexhaust.scala:100: error: match is not exhaustive!
-missing combination C1
-
+patmatexhaust.scala:100: error: match may not be exhaustive.
+It would fail on the following input: C1()
def ma10(x: C) = x match { // not exhaustive: C1 is not sealed.
^
-patmatexhaust.scala:114: error: match is not exhaustive!
-missing combination D1
-missing combination D2
-
+patmatexhaust.scala:114: error: match may not be exhaustive.
+It would fail on the following inputs: D1, D2()
def ma10(x: C) = x match { // not exhaustive: C1 has subclasses.
^
-patmatexhaust.scala:126: error: match is not exhaustive!
-missing combination C1
-
+patmatexhaust.scala:126: error: match may not be exhaustive.
+It would fail on the following input: C1()
def ma10(x: C) = x match { // not exhaustive: C1 is not abstract.
^
10 errors found
diff --git a/test/files/neg/patmatexhaust.flags b/test/files/neg/patmatexhaust.flags
index b7eb21d5f5..85d8eb2ba2 100644
--- a/test/files/neg/patmatexhaust.flags
+++ b/test/files/neg/patmatexhaust.flags
@@ -1 +1 @@
--Xfatal-warnings -Xoldpatmat
+-Xfatal-warnings
diff --git a/test/files/neg/patmatexhaust.scala b/test/files/neg/patmatexhaust.scala
index 9297e09d0d..ceb960ee97 100644
--- a/test/files/neg/patmatexhaust.scala
+++ b/test/files/neg/patmatexhaust.scala
@@ -50,7 +50,7 @@ class TestSealedExhaustive { // compile only
case Ga =>
}
- def ma5(x:Deep) = x match { // Gp
+ def ma5(x:Deep) = x match {
case Gu =>
case _ if 1 == 0 =>
case Ga =>
diff --git a/test/files/neg/sealed-java-enums.check b/test/files/neg/sealed-java-enums.check
index 9303c2df9c..20d00c8e91 100644
--- a/test/files/neg/sealed-java-enums.check
+++ b/test/files/neg/sealed-java-enums.check
@@ -1,9 +1,5 @@
-sealed-java-enums.scala:5: error: match is not exhaustive!
-missing combination BLOCKED
-missing combination State
-missing combination TERMINATED
-missing combination TIMED_WAITING
-
+sealed-java-enums.scala:5: error: match may not be exhaustive.
+It would fail on the following inputs: BLOCKED, TERMINATED, TIMED_WAITING
def f(state: State) = state match {
^
one error found
diff --git a/test/files/neg/sealed-java-enums.flags b/test/files/neg/sealed-java-enums.flags
index 312f3a87ec..e709c65918 100644
--- a/test/files/neg/sealed-java-enums.flags
+++ b/test/files/neg/sealed-java-enums.flags
@@ -1 +1 @@
--Xexperimental -Xfatal-warnings -Xoldpatmat
+-Xexperimental -Xfatal-warnings
diff --git a/test/files/neg/switch.check b/test/files/neg/switch.check
index 7212c1a22b..8955c94b32 100644
--- a/test/files/neg/switch.check
+++ b/test/files/neg/switch.check
@@ -1,10 +1,10 @@
switch.scala:28: error: could not emit switch for @switch annotated match
def fail1(c: Char) = (c: @switch) match {
- ^
+ ^
switch.scala:38: error: could not emit switch for @switch annotated match
def fail2(c: Char) = (c: @switch @unchecked) match {
- ^
+ ^
switch.scala:45: error: could not emit switch for @switch annotated match
def fail3(c: Char) = (c: @unchecked @switch) match {
- ^
+ ^
three errors found
diff --git a/test/files/neg/switch.flags b/test/files/neg/switch.flags
index 809e9ff2f2..e8fb65d50c 100644
--- a/test/files/neg/switch.flags
+++ b/test/files/neg/switch.flags
@@ -1 +1 @@
- -Xoldpatmat
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/neg/t2405.check b/test/files/neg/t2405.check
new file mode 100644
index 0000000000..78360bcc21
--- /dev/null
+++ b/test/files/neg/t2405.check
@@ -0,0 +1,8 @@
+t2405.scala:6: warning: imported `y' is permanently hidden by definition of method y
+ import A.{x => y}
+ ^
+t2405.scala:8: error: could not find implicit value for parameter e: Int
+ implicitly[Int]
+ ^
+one warning found
+one error found
diff --git a/test/files/neg/t2405.scala b/test/files/neg/t2405.scala
new file mode 100644
index 0000000000..6982285b98
--- /dev/null
+++ b/test/files/neg/t2405.scala
@@ -0,0 +1,10 @@
+object A { implicit val x: Int = 1 }
+
+// Expecting shadowing #1
+object Test2 {
+ {
+ import A.{x => y}
+ def y: Int = 0
+ implicitly[Int]
+ }
+}
diff --git a/test/files/neg/t3098.check b/test/files/neg/t3098.check
index 403da281c8..85829747b9 100644
--- a/test/files/neg/t3098.check
+++ b/test/files/neg/t3098.check
@@ -1,6 +1,5 @@
-b.scala:3: error: match is not exhaustive!
-missing combination C
-
+b.scala:3: error: match may not be exhaustive.
+It would fail on the following input: (_ : C)
def f = (null: T) match {
^
one error found
diff --git a/test/files/neg/t3098.flags b/test/files/neg/t3098.flags
index b7eb21d5f5..85d8eb2ba2 100644
--- a/test/files/neg/t3098.flags
+++ b/test/files/neg/t3098.flags
@@ -1 +1 @@
--Xfatal-warnings -Xoldpatmat
+-Xfatal-warnings
diff --git a/test/files/neg/t3614.check b/test/files/neg/t3614.check
new file mode 100644
index 0000000000..5fdb5cbf1f
--- /dev/null
+++ b/test/files/neg/t3614.check
@@ -0,0 +1,4 @@
+t3614.scala:2: error: class type required but AnyRef{def a: <?>} found
+ def v = new ({ def a=0 })
+ ^
+one error found \ No newline at end of file
diff --git a/test/files/neg/t3614.scala b/test/files/neg/t3614.scala
new file mode 100644
index 0000000000..5b02cdf2b2
--- /dev/null
+++ b/test/files/neg/t3614.scala
@@ -0,0 +1,3 @@
+object t3614 {
+ def v = new ({ def a=0 })
+} \ No newline at end of file
diff --git a/test/files/neg/t3683a.check b/test/files/neg/t3683a.check
index 18e80dd5e8..3de3ad784e 100644
--- a/test/files/neg/t3683a.check
+++ b/test/files/neg/t3683a.check
@@ -1,6 +1,5 @@
-t3683a.scala:14: error: match is not exhaustive!
-missing combination XX
-
+t3683a.scala:14: error: match may not be exhaustive.
+It would fail on the following input: XX()
w match {
^
one error found
diff --git a/test/files/neg/t3683a.flags b/test/files/neg/t3683a.flags
index b7eb21d5f5..85d8eb2ba2 100644
--- a/test/files/neg/t3683a.flags
+++ b/test/files/neg/t3683a.flags
@@ -1 +1 @@
--Xfatal-warnings -Xoldpatmat
+-Xfatal-warnings
diff --git a/test/files/neg/t3692-new.flags b/test/files/neg/t3692-new.flags
index 82becdfbfd..cb8324a345 100644
--- a/test/files/neg/t3692-new.flags
+++ b/test/files/neg/t3692-new.flags
@@ -1 +1 @@
- -Xoldpatmat \ No newline at end of file
+-Xoldpatmat \ No newline at end of file
diff --git a/test/files/neg/t3692-old.flags b/test/files/neg/t3692-old.flags
index 82becdfbfd..cb8324a345 100644
--- a/test/files/neg/t3692-old.flags
+++ b/test/files/neg/t3692-old.flags
@@ -1 +1 @@
- -Xoldpatmat \ No newline at end of file
+-Xoldpatmat \ No newline at end of file
diff --git a/test/files/neg/t3761-overload-byname.check b/test/files/neg/t3761-overload-byname.check
new file mode 100644
index 0000000000..ae7d21dfa6
--- /dev/null
+++ b/test/files/neg/t3761-overload-byname.check
@@ -0,0 +1,13 @@
+t3761-overload-byname.scala:9: error: ambiguous reference to overloaded definition,
+both method m1 in object t of type (x: => Int, s: Object)Int
+and method m1 in object t of type (x: => AnyVal, s: String)Int
+match argument types (Int,String)
+ m1(1, "")
+ ^
+t3761-overload-byname.scala:11: error: ambiguous reference to overloaded definition,
+both method m2 in object t of type (x: => Int, s: Object)Int
+and method m2 in object t of type (x: => Any, s: String)Int
+match argument types (Int,String)
+ m2(1, "")
+ ^
+two errors found
diff --git a/test/files/neg/t3761-overload-byname.scala b/test/files/neg/t3761-overload-byname.scala
new file mode 100644
index 0000000000..5b9a381b93
--- /dev/null
+++ b/test/files/neg/t3761-overload-byname.scala
@@ -0,0 +1,13 @@
+object t {
+ def m1(x: => AnyVal, s: String) = 0
+ def m1(x: => Int, s: Object) = 1
+
+ def m2(x: => Any, s: String) = 0
+ def m2(x: => Int, s: Object) = 1
+
+
+ m1(1, "")
+ m1(1d, "")
+ m2(1, "")
+ m2("", "")
+}
diff --git a/test/files/neg/t5735.check b/test/files/neg/t5735.check
new file mode 100644
index 0000000000..f6e0028044
--- /dev/null
+++ b/test/files/neg/t5735.check
@@ -0,0 +1,6 @@
+t5735.scala:6: error: type mismatch;
+ found : (x: Int)Int <and> => String
+ required: Int
+ val z: Int = a
+ ^
+one error found
diff --git a/test/files/neg/t5735.scala b/test/files/neg/t5735.scala
new file mode 100644
index 0000000000..fde71ff962
--- /dev/null
+++ b/test/files/neg/t5735.scala
@@ -0,0 +1,7 @@
+abstract class Base {
+ def a: String = "one"
+}
+class Clazz extends Base {
+ def a(x: Int): Int = 2
+ val z: Int = a
+}
diff --git a/test/files/neg/t5821.check b/test/files/neg/t5821.check
new file mode 100644
index 0000000000..f9c00604bc
--- /dev/null
+++ b/test/files/neg/t5821.check
@@ -0,0 +1,4 @@
+t5821.scala:1: error: not found: object SthImportant
+import SthImportant._
+ ^
+one error found
diff --git a/test/files/neg/t5821.scala b/test/files/neg/t5821.scala
new file mode 100644
index 0000000000..4af0a2bf7f
--- /dev/null
+++ b/test/files/neg/t5821.scala
@@ -0,0 +1,8 @@
+import SthImportant._
+
+class Bar
+
+class Foo2 {
+ type Sth = Array[Bar]
+ def foo(xs: Sth): Bar = if ((xs eq null) || (xs.length == 0)) null else xs(0)
+}
diff --git a/test/files/neg/t639.check b/test/files/neg/t639.check
index 3b53da0515..6d41d872de 100644
--- a/test/files/neg/t639.check
+++ b/test/files/neg/t639.check
@@ -1,4 +1,7 @@
t639.scala:3: error: not found: object a
import a._
^
-one error found
+t639.scala:5: error: not found: type B
+@B
+ ^
+two errors found
diff --git a/test/files/neg/t963b.check b/test/files/neg/t963b.check
new file mode 100644
index 0000000000..9918a98c46
--- /dev/null
+++ b/test/files/neg/t963b.check
@@ -0,0 +1,6 @@
+t963b.scala:25: error: type mismatch;
+ found : B.type
+ required: AnyRef{val y: A}
+ B.f(B)
+ ^
+one error found
diff --git a/test/pending/neg/t963.scala b/test/files/neg/t963b.scala
index 3be0be1b84..b34aae8095 100644
--- a/test/pending/neg/t963.scala
+++ b/test/files/neg/t963b.scala
@@ -5,8 +5,8 @@ trait A {
}
object B {
- def f(x : { val y : A }) { x.y.v = x.y.v }
-
+ def f(x : { val y : A }) { x.y.v = x.y.v }
+
var a : A = _
var b : Boolean = false
def y : A = {
@@ -21,6 +21,6 @@ object B {
}
}
-object Test extends Application {
+object Test extends App {
B.f(B)
}
diff --git a/test/files/pos/arrays3.scala b/test/files/pos/arrays3.scala
new file mode 100644
index 0000000000..f96be0b427
--- /dev/null
+++ b/test/files/pos/arrays3.scala
@@ -0,0 +1,11 @@
+trait Foo {
+ type Repr <: String
+ def f2(x: Repr) = x.length
+}
+trait Fooz[Repr <: Array[_]] {
+ def f0(x: Repr) = x.length
+}
+
+trait Bar[Repr <: List[_]] extends Foo with Fooz[Array[Int]] {
+ def f1(x: Repr) = x.length
+}
diff --git a/test/files/pos/exhaust_alternatives.flags b/test/files/pos/exhaust_alternatives.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/pos/exhaust_alternatives.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/pos/exhaust_alternatives.scala b/test/files/pos/exhaust_alternatives.scala
new file mode 100644
index 0000000000..cc81d0be7d
--- /dev/null
+++ b/test/files/pos/exhaust_alternatives.scala
@@ -0,0 +1,10 @@
+sealed abstract class X
+sealed case class A(x: Boolean) extends X
+case object B extends X
+
+object Test {
+ def test(x: X) = x match {
+ case A(true) =>
+ case A(false) | B =>
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/exhaustive_heuristics.scala b/test/files/pos/exhaustive_heuristics.scala
new file mode 100644
index 0000000000..f6bea455a5
--- /dev/null
+++ b/test/files/pos/exhaustive_heuristics.scala
@@ -0,0 +1,16 @@
+// tests exhaustivity doesn't give warnings (due to its heuristic rewrites kicking in or it backing off)
+object Test {
+ // List() => Nil
+ List(1) match {
+ case List() =>
+ case x :: xs =>
+ }
+
+ // we don't look into guards
+ val turnOffChecks = true
+ List(1) match {
+ case _ if turnOffChecks =>
+ }
+
+ // TODO: we back off when there are any user-defined extractors
+} \ No newline at end of file
diff --git a/test/files/pos/t2405.scala b/test/files/pos/t2405.scala
new file mode 100644
index 0000000000..224b2ce83b
--- /dev/null
+++ b/test/files/pos/t2405.scala
@@ -0,0 +1,23 @@
+object A { implicit val x: Int = 1 }
+
+// Problem as stated in the ticket.
+object Test1 {
+ import A.{x => y}
+ implicitly[Int]
+}
+
+// Testing for the absense of shadowing #1.
+object Test2 {
+ import A.{x => y}
+ val x = 2
+ implicitly[Int]
+}
+
+// Testing for the absense of shadowing #2.
+object Test3 {
+ {
+ import A.{x => y}
+ def x: Int = 0
+ implicitly[Int]
+ }
+}
diff --git a/test/files/pos/t3097.scala b/test/files/pos/t3097.scala
deleted file mode 100644
index a034b960f7..0000000000
--- a/test/files/pos/t3097.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-package seal
-
-sealed trait ISimpleValue
-
-sealed trait IListValue extends ISimpleValue {
- def items: List[IAtomicValue[_]]
-}
-sealed trait IAtomicValue[O] extends ISimpleValue {
- def data: O
-}
-
-sealed trait IAbstractDoubleValue[O] extends IAtomicValue[O] { }
-sealed trait IDoubleValue extends IAbstractDoubleValue[Double]
-
-case class ListValue(val items: List[IAtomicValue[_]]) extends IListValue
-class DoubleValue(val data: Double) extends IDoubleValue {
- def asDouble = data
-}
-
-object Test {
- /**
- * @param args the command line arguments
- */
- def main(args: Array[String]): Unit = {
- val v: ISimpleValue = new DoubleValue(1)
- v match {
- case m: IListValue => println("list")
- case a: IAtomicValue[_] => println("atomic")
- }
- }
-}
diff --git a/test/files/pos/t3856.scala b/test/files/pos/t3856.scala
index fd253a56a8..5ea4b84e2c 100644
--- a/test/files/pos/t3856.scala
+++ b/test/files/pos/t3856.scala
@@ -2,6 +2,7 @@ case class C[T](x: T)
case class CS(xs: C[_]*)
+// t3856
object Test {
val x = CS(C(5), C("abc")) match { case CS(C(5), xs @ _*) => xs }
println(x)
diff --git a/test/files/pos/t4975.scala b/test/files/pos/t4975.scala
new file mode 100644
index 0000000000..12d889c0d5
--- /dev/null
+++ b/test/files/pos/t4975.scala
@@ -0,0 +1,12 @@
+object ImplicitScope {
+ class A[T]
+
+ def foo {
+ trait B
+ object B {
+ implicit def ab = new A[B]
+ }
+
+ implicitly[A[B]] // Error
+ }
+}
diff --git a/test/files/pos/t5305.scala b/test/files/pos/t5305.scala
new file mode 100644
index 0000000000..4c32cd7c6d
--- /dev/null
+++ b/test/files/pos/t5305.scala
@@ -0,0 +1,13 @@
+object t5305 {
+ def in(a: Any) = {}
+
+ object O {
+ type F = Int
+ val v = ""
+ }
+
+ in {
+ import O.{F, v}
+ type x = {type l = (F, v.type)} // not found: type F
+ }
+}
diff --git a/test/files/pos/t5779-numeq-warn.scala b/test/files/pos/t5779-numeq-warn.scala
new file mode 100644
index 0000000000..76ef2970fd
--- /dev/null
+++ b/test/files/pos/t5779-numeq-warn.scala
@@ -0,0 +1,13 @@
+
+object Test {
+ def main(args: Array[String]) {
+ val d: Double = (BigInt(1) << 64).toDouble
+ val f: Float = d.toFloat
+ val n: java.lang.Number = d.toFloat
+ assert (d == f) // ok
+ assert (d == n) // was: comparing values of types Double and Number using `==' will always yield false
+ assert (n == d) // was: Number and Double are unrelated: they will most likely never compare equal
+ assert (f == n)
+ assert (n == f)
+ }
+}
diff --git a/test/files/pos/t5809.flags b/test/files/pos/t5809.flags
new file mode 100644
index 0000000000..e93641e931
--- /dev/null
+++ b/test/files/pos/t5809.flags
@@ -0,0 +1 @@
+-Xlint -Xfatal-warnings \ No newline at end of file
diff --git a/test/files/pos/t5809.scala b/test/files/pos/t5809.scala
new file mode 100644
index 0000000000..8f6ce708d2
--- /dev/null
+++ b/test/files/pos/t5809.scala
@@ -0,0 +1,10 @@
+package scala.reflect
+
+package object api {
+ implicit class PimpedExpr[T](expr: Universe # Expr[T]) {
+ def runtimeEval: T = {
+ println("hello, dear")
+ expr.eval
+ }
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/virtpatmat_exhaust.scala b/test/files/pos/virtpatmat_exhaust.scala
new file mode 100644
index 0000000000..a2f47c88c8
--- /dev/null
+++ b/test/files/pos/virtpatmat_exhaust.scala
@@ -0,0 +1,24 @@
+sealed trait Option {}
+case class Choice(a: Option, b: Option) extends Option;
+case class Some(x: Boolean) extends Option;
+case object None extends Option;
+
+object test {
+
+// drop any case and it will report an error
+// note that booleans are taken into account
+ def f(opt: Option) = opt match {
+ case Choice(None, None) => 1;
+ case Choice(None, Some(_)) => 1;
+ case Choice(None, Choice(_, _)) => 1;
+ case Choice(Some(true), None) => 1;
+ case Choice(Some(false), None) => 1;
+ case Choice(Some(_), Some(_)) => 1;
+ case Choice(Some(_), Choice(_, _)) => 1;
+ case Choice(Choice(_, _), None) => 1;
+ case Choice(Choice(_, _), Some(_)) => 1;
+ case Choice(Choice(_, _), Choice(_, _)) => 1;
+ case Some(b) => 4;
+ case None => 5;
+ }
+}
diff --git a/test/files/pos/virtpatmat_exhaust_unchecked.flags b/test/files/pos/virtpatmat_exhaust_unchecked.flags
new file mode 100644
index 0000000000..85d8eb2ba2
--- /dev/null
+++ b/test/files/pos/virtpatmat_exhaust_unchecked.flags
@@ -0,0 +1 @@
+-Xfatal-warnings
diff --git a/test/files/pos/virtpatmat_exhaust_unchecked.scala b/test/files/pos/virtpatmat_exhaust_unchecked.scala
new file mode 100644
index 0000000000..641f2b4f9a
--- /dev/null
+++ b/test/files/pos/virtpatmat_exhaust_unchecked.scala
@@ -0,0 +1,24 @@
+sealed trait Option {}
+case class Choice(a: Option, b: Option) extends Option;
+case class Some(x: Boolean) extends Option;
+case object None extends Option;
+
+object test {
+
+// drop any case and it will report an error
+// note that booleans are taken into account
+ def f(opt: Option) = (opt: @unchecked) match {
+ case Choice(None, None) => 1;
+ case Choice(None, Some(_)) => 1;
+ case Choice(None, Choice(_, _)) => 1;
+ case Choice(Some(true), None) => 1;
+ // case Choice(Some(false), None) => 1;
+ case Choice(Some(_), Some(_)) => 1;
+ case Choice(Some(_), Choice(_, _)) => 1;
+ case Choice(Choice(_, _), None) => 1;
+ case Choice(Choice(_, _), Some(_)) => 1;
+ case Choice(Choice(_, _), Choice(_, _)) => 1;
+ case Some(b) => 4;
+ case None => 5;
+ }
+}
diff --git a/test/files/pos/z1730.flags b/test/files/pos/z1730.flags
new file mode 100644
index 0000000000..5319681590
--- /dev/null
+++ b/test/files/pos/z1730.flags
@@ -0,0 +1 @@
+-Ycheck:all \ No newline at end of file
diff --git a/test/files/pos/z1730.scala b/test/files/pos/z1730.scala
new file mode 100644
index 0000000000..0c5875a818
--- /dev/null
+++ b/test/files/pos/z1730.scala
@@ -0,0 +1,13 @@
+// /scala/trac/z1730/a.scala
+// Wed May 23 07:41:25 PDT 2012
+
+class X[R] {
+ def xx(value: => R, addTweak: Boolean = true) = 0
+}
+
+class Boo {
+ implicit def toX[R](v: R) : X[R] = null
+ def goo2 {
+ 3.xx(34)
+ }
+}
diff --git a/test/files/run/inline-ex-handlers.check b/test/files/run/inline-ex-handlers.check
index a5d7e93334..708fcc6985 100644
--- a/test/files/run/inline-ex-handlers.check
+++ b/test/files/run/inline-ex-handlers.check
@@ -1,27 +1,23 @@
172c172
-< locals: value x$1, value x1, value x2, value x
+< locals: value x$1, value x1
---
-> locals: value x$1, value x1, value x2, value x, variable boxed1
+> locals: value x$1, value x1, variable boxed1
174c174
-< blocks: [1,2,3,5,6,7]
+< blocks: [1,2,3,4]
---
-> blocks: [1,3,5,6]
-180,182d179
+> blocks: [1,3,4]
+186a187,188
+> 92 STORE_LOCAL(variable boxed1)
+> 92 LOAD_LOCAL(variable boxed1)
+195,197d196
< 92 JUMP 2
<
< 2:
-194,196d190
-< 92 JUMP 7
-<
-< 7:
-204a199,200
-> 92 STORE_LOCAL(variable boxed1)
-> 92 LOAD_LOCAL(variable boxed1)
-395c391
+385c384
< blocks: [1,2,3,4,5,8,11,13,14,16]
---
> blocks: [1,2,3,5,8,11,13,14,16,17]
-419c415,424
+409c408,417
< 103 THROW(MyException)
---
> ? STORE_LOCAL(value ex5)
@@ -34,15 +30,15 @@
> 106 LOAD_LOCAL(value x3)
> 106 IS_INSTANCE REF(class MyException)
> 106 CZJUMP (BOOL)NE ? 5 : 11
-432,434d436
+422,424d429
< 101 JUMP 4
<
< 4:
-522c524
+512c517
< blocks: [1,2,3,4,6,7,8,9,10]
---
> blocks: [1,2,3,4,6,7,8,9,10,11,12,13]
-551c553,558
+541c546,551
< 306 THROW(MyException)
---
> ? JUMP 11
@@ -51,7 +47,7 @@
> ? LOAD_LOCAL(variable monitor4)
> 305 MONITOR_EXIT
> ? JUMP 12
-557c564,570
+547c557,563
< ? THROW(Throwable)
---
> ? JUMP 12
@@ -61,7 +57,7 @@
> 304 MONITOR_EXIT
> ? STORE_LOCAL(value t)
> ? JUMP 13
-563c576,589
+553c569,582
< ? THROW(Throwable)
---
> ? STORE_LOCAL(value t)
@@ -78,30 +74,29 @@
> 310 CALL_PRIMITIVE(EndConcat)
> 310 CALL_METHOD scala.Predef.println (dynamic)
> 310 JUMP 2
-587c613
+577c606
< catch (Throwable) in ArrayBuffer(7, 8, 9, 10) starting at: 6
---
> catch (Throwable) in ArrayBuffer(7, 8, 9, 10, 11) starting at: 6
-590c616
+580c609
< catch (Throwable) in ArrayBuffer(4, 6, 7, 8, 9, 10) starting at: 3
---
> catch (Throwable) in ArrayBuffer(4, 6, 7, 8, 9, 10, 11, 12) starting at: 3
-622c648
+612c641
< blocks: [1,2,3,4,5,6,7,9,10]
---
> blocks: [1,2,3,4,5,6,7,9,10,11,12]
-646c672,673
+636c665,671
< 78 THROW(IllegalArgumentException)
---
> ? STORE_LOCAL(value e)
> ? JUMP 11
-647a675,679
+>
> 11:
> 81 LOAD_LOCAL(value e)
> ? STORE_LOCAL(variable exc1)
> ? JUMP 12
->
-675c707,721
+665c700,714
< 81 THROW(Exception)
---
> ? STORE_LOCAL(variable exc1)
@@ -119,15 +114,15 @@
> 84 STORE_LOCAL(variable result)
> 84 LOAD_LOCAL(variable exc1)
> 84 THROW(Throwable)
-697c743
+687c736
< catch (<none>) in ArrayBuffer(4, 6, 7, 9) starting at: 3
---
> catch (<none>) in ArrayBuffer(4, 6, 7, 9, 11) starting at: 3
-723c769
+713c762
< blocks: [1,2,3,4,5,6,9,12,14,17,18,19,22,25,27,28,30,31]
---
> blocks: [1,2,3,4,5,6,9,12,14,17,18,19,22,25,27,28,30,31,32,33,34]
-747c793,800
+737c786,793
< 172 THROW(MyException)
---
> ? STORE_LOCAL(value ex5)
@@ -138,12 +133,12 @@
> 170 STORE_LOCAL(value x3)
> 170 SCOPE_ENTER value x3
> 170 JUMP 18
-803c856,857
+793c849,850
< 177 THROW(MyException)
---
> ? STORE_LOCAL(value ex5)
> ? JUMP 33
-807c861,868
+797c854,861
< 170 THROW(Throwable)
---
> ? STORE_LOCAL(value ex5)
@@ -154,17 +149,17 @@
> 169 STORE_LOCAL(value x3)
> 169 SCOPE_ENTER value x3
> 169 JUMP 5
-840c901,902
+830c894,895
< 182 THROW(MyException)
---
> ? STORE_LOCAL(variable exc2)
> ? JUMP 34
-844c906,907
+834c899,900
< 169 THROW(Throwable)
---
> ? STORE_LOCAL(variable exc2)
> ? JUMP 34
-845a909,921
+835a902,914
> 34:
> 184 LOAD_MODULE object Predef
> 184 CONSTANT("finally")
@@ -178,19 +173,19 @@
> 185 LOAD_LOCAL(variable exc2)
> 185 THROW(Throwable)
>
-866c942
+856c935
< catch (Throwable) in ArrayBuffer(17, 18, 19, 22, 25, 27, 28, 30) starting at: 4
---
> catch (Throwable) in ArrayBuffer(17, 18, 19, 22, 25, 27, 28, 30, 32) starting at: 4
-869c945
+859c938
< catch (<none>) in ArrayBuffer(4, 5, 6, 9, 12, 17, 18, 19, 22, 25, 27, 28, 30) starting at: 3
---
> catch (<none>) in ArrayBuffer(4, 5, 6, 9, 12, 17, 18, 19, 22, 25, 27, 28, 30, 32, 33) starting at: 3
-895c971
+885c964
< blocks: [1,2,3,6,7,8,11,14,16,17,19]
---
> blocks: [1,2,3,6,7,8,11,14,16,17,19,20]
-919c995,1002
+909c988,995
< 124 THROW(MyException)
---
> ? STORE_LOCAL(value ex5)
@@ -201,15 +196,15 @@
> 122 STORE_LOCAL(value x3)
> 122 SCOPE_ENTER value x3
> 122 JUMP 7
-979c1062
+969c1055
< catch (IllegalArgumentException) in ArrayBuffer(6, 7, 8, 11, 14, 16, 17, 19) starting at: 3
---
> catch (IllegalArgumentException) in ArrayBuffer(6, 7, 8, 11, 14, 16, 17, 19, 20) starting at: 3
-1005c1088
+995c1081
< blocks: [1,2,3,4,5,8,11,15,16,17,19]
---
> blocks: [1,2,3,5,8,11,15,16,17,19,20]
-1029c1112,1121
+1019c1105,1114
< 148 THROW(MyException)
---
> ? STORE_LOCAL(value ex5)
@@ -222,15 +217,15 @@
> 154 LOAD_LOCAL(value x3)
> 154 IS_INSTANCE REF(class MyException)
> 154 CZJUMP (BOOL)NE ? 5 : 11
-1050,1052d1141
+1040,1042d1134
< 145 JUMP 4
<
< 4:
-1285c1374
+1275c1367
< blocks: [1,2,3,4,5,7]
---
> blocks: [1,2,3,4,5,7,8]
-1309c1398,1405
+1299c1391,1398
< 38 THROW(IllegalArgumentException)
---
> ? STORE_LOCAL(value e)
@@ -241,16 +236,16 @@
> 42 CONSTANT("IllegalArgumentException")
> 42 CALL_METHOD scala.Predef.println (dynamic)
> 42 JUMP 2
-1358c1454
+1348c1447
< blocks: [1,2,3,4,5,8,11,13,14,16,17,19]
---
> blocks: [1,2,3,5,8,11,13,14,16,17,19,20]
-1382c1478,1479
+1372c1471,1472
< 203 THROW(MyException)
---
> ? STORE_LOCAL(value ex5)
> ? JUMP 20
-1402c1499,1508
+1392c1492,1501
< 209 THROW(MyException)
---
> ? STORE_LOCAL(value ex5)
@@ -263,15 +258,15 @@
> 212 LOAD_LOCAL(value x3)
> 212 IS_INSTANCE REF(class MyException)
> 212 CZJUMP (BOOL)NE ? 5 : 11
-1415,1417d1520
+1405,1407d1513
< 200 JUMP 4
<
< 4:
-1477c1580
+1467c1573
< blocks: [1,2,3,4,5,7]
---
> blocks: [1,2,3,4,5,7,8]
-1501c1604,1611
+1491c1597,1604
< 58 THROW(IllegalArgumentException)
---
> ? STORE_LOCAL(value e)
@@ -282,11 +277,11 @@
> 62 CONSTANT("RuntimeException")
> 62 CALL_METHOD scala.Predef.println (dynamic)
> 62 JUMP 2
-1550c1660
+1540c1653
< blocks: [1,2,3,4]
---
> blocks: [1,2,3,4,5]
-1570c1680,1685
+1560c1673,1678
< 229 THROW(MyException)
---
> ? JUMP 5
@@ -295,19 +290,19 @@
> ? LOAD_LOCAL(variable monitor1)
> 228 MONITOR_EXIT
> 228 THROW(Throwable)
-1576c1691
+1566c1684
< ? THROW(Throwable)
---
> 228 THROW(Throwable)
-1604c1719
+1594c1712
< locals: value args, variable result, variable monitor2, variable monitorResult1
---
> locals: value exception$1, value args, variable result, variable monitor2, variable monitorResult1
-1606c1721
+1596c1714
< blocks: [1,2,3,4]
---
> blocks: [1,2,3,4,5]
-1629c1744,1752
+1619c1737,1745
< 245 THROW(MyException)
---
> ? STORE_LOCAL(value exception$1)
@@ -319,7 +314,7 @@
> ? LOAD_LOCAL(variable monitor2)
> 244 MONITOR_EXIT
> 244 THROW(Throwable)
-1635c1758
+1625c1751
< ? THROW(Throwable)
---
> 244 THROW(Throwable)
diff --git a/test/files/run/origins.check b/test/files/run/origins.check
index ffbf1c1f44..b12cb6e38f 100644
--- a/test/files/run/origins.check
+++ b/test/files/run/origins.check
@@ -1,5 +1,5 @@
->> Origins goxbox.Socks.boop logged 65 calls from 3 distinguished sources.
+>> Origins tag 'boop' logged 65 calls from 3 distinguished sources.
50 Test$$anonfun$f3$1.apply(origins.scala:16)
10 Test$$anonfun$f2$1.apply(origins.scala:15)
diff --git a/test/files/run/origins.scala b/test/files/run/origins.scala
index 9dc6071c7b..0ad92297f5 100644
--- a/test/files/run/origins.scala
+++ b/test/files/run/origins.scala
@@ -2,7 +2,7 @@ import scala.reflect.internal.util.Origins
package goxbox {
object Socks {
- val origins = Origins[Socks.type]("boop")
+ val origins = Origins("boop")
def boop(x: Int): Int = origins { 5 }
}
diff --git a/test/files/run/t3097.check b/test/files/run/t3097.check
new file mode 100644
index 0000000000..63695f771b
--- /dev/null
+++ b/test/files/run/t3097.check
@@ -0,0 +1 @@
+atomic
diff --git a/test/files/run/t3097.scala b/test/files/run/t3097.scala
new file mode 100644
index 0000000000..4aaf8056ca
--- /dev/null
+++ b/test/files/run/t3097.scala
@@ -0,0 +1,18 @@
+sealed trait ISimpleValue
+
+sealed trait IListValue extends ISimpleValue
+sealed trait IAtomicValue[O] extends ISimpleValue
+
+sealed trait IAbstractDoubleValue[O] extends IAtomicValue[O]
+sealed trait IDoubleValue extends IAbstractDoubleValue[Double]
+
+case class ListValue(val items: List[IAtomicValue[_]]) extends IListValue
+class DoubleValue(val data: Double) extends IDoubleValue
+
+object Test extends App {
+ // match is exhaustive
+ (new DoubleValue(1): ISimpleValue) match {
+ case m: IListValue => println("list")
+ case a: IAtomicValue[_] => println("atomic")
+ }
+}
diff --git a/test/files/run/t3761-overload-byname.check b/test/files/run/t3761-overload-byname.check
new file mode 100644
index 0000000000..ab7eff0d8a
--- /dev/null
+++ b/test/files/run/t3761-overload-byname.check
@@ -0,0 +1,12 @@
+hello!
+hello working world
+goodnight!
+goodnight moon, nobody, noises everywhere
+0
+1
+0
+1
+0
+1
+0
+1
diff --git a/test/files/run/t3761-overload-byname.scala b/test/files/run/t3761-overload-byname.scala
new file mode 100644
index 0000000000..a52d866097
--- /dev/null
+++ b/test/files/run/t3761-overload-byname.scala
@@ -0,0 +1,39 @@
+
+class OverTheTop {
+ def info0(m: String) = m + "!"
+ def info0(m: String, args: Any*) = m +" "+ args.mkString(" ")
+
+ // as reported
+ def info1(m: =>String) = m + "!"
+ def info1(m: =>String, args: Any*) = m +" "+ args.mkString(", ")
+
+ // @lrytz
+ def m[A](x: => Int) = 0; def m[A](x: => Int, xs: Int*) = 1
+
+ def m1(x: => Int, s: String) = 0
+ def m1(x: => Int, s: Object) = 1
+
+ def m2(x: => Int, s: String) = 0
+ def m2(x: => AnyVal, s: Object) = 1
+
+ def m3(x: => Int, s: String) = 0
+ def m3(x: => Any, s: Object) = 1
+}
+
+object Test {
+ def main(args: Array[String]) {
+ val top = new OverTheTop
+ println(top.info0("hello"))
+ println(top.info0("hello","working","world"))
+ println(top.info1("goodnight"))
+ println(top.info1("goodnight", "moon", "nobody", "noises everywhere"))
+ println(top.m(17))
+ println(top.m(17,19))
+ println(top.m1(1, "two"))
+ println(top.m1(1, new Object()))
+ println(top.m2(1, ""))
+ println(top.m2(1d, ""))
+ println(top.m3(1, ""))
+ println(top.m3("", ""))
+ }
+}
diff --git a/test/files/run/t5428.check b/test/files/run/t5428.check
new file mode 100644
index 0000000000..7b4b1d6558
--- /dev/null
+++ b/test/files/run/t5428.check
@@ -0,0 +1 @@
+Stack(8, 7, 6, 5, 4, 3) \ No newline at end of file
diff --git a/test/files/run/t5428.scala b/test/files/run/t5428.scala
new file mode 100644
index 0000000000..106bb7fc31
--- /dev/null
+++ b/test/files/run/t5428.scala
@@ -0,0 +1,29 @@
+
+
+
+import collection.mutable.{Stack, StackProxy}
+
+
+
+class A extends StackProxy[Int] {
+ val self = Stack[Int]()
+}
+
+
+object Test {
+
+ def main(args: Array[String]) {
+ val a = new A
+
+ a push 3
+ a push 4
+ a push 5
+
+ a.push(6, 7, 8)
+
+ println(a)
+
+ a pop
+ }
+
+}
diff --git a/test/files/run/t5804.check b/test/files/run/t5804.check
new file mode 100644
index 0000000000..3ccc1c24d3
--- /dev/null
+++ b/test/files/run/t5804.check
@@ -0,0 +1,4 @@
+128
+16
+128
+32 \ No newline at end of file
diff --git a/test/files/run/t5804.scala b/test/files/run/t5804.scala
new file mode 100644
index 0000000000..b96a736035
--- /dev/null
+++ b/test/files/run/t5804.scala
@@ -0,0 +1,32 @@
+
+
+import collection.mutable._
+
+
+object Test {
+
+ def main(args: Array[String]) {
+ class CustomHashMap extends HashMap[Int, Int] {
+ override def initialSize = 65
+
+ println(table.length)
+ }
+
+ new CustomHashMap
+ new HashMap {
+ println(table.length)
+ }
+
+ class CustomHashSet extends HashSet[Int] {
+ override def initialSize = 96
+
+ println(table.length)
+ }
+
+ new CustomHashSet
+ new HashSet {
+ println(table.length)
+ }
+ }
+
+}
diff --git a/test/pending/neg/t5008.scala b/test/pending/neg/t5008.scala
new file mode 100644
index 0000000000..2b20bcfe12
--- /dev/null
+++ b/test/pending/neg/t5008.scala
@@ -0,0 +1,165 @@
+// These are members of class bar.C, completely unrelated to class foo.A.
+// The types shown below include types defined within foo.A which are:
+//
+// - qualified private
+// - qualified protected
+// - object protected
+//
+// val a : foo.A = { /* compiled code */ }
+// val xprot1 : java.lang.Object with foo.A.FooProt1 = { /* compiled code */ }
+// val xprot2 : java.lang.Object with foo.A.FooProt2 = { /* compiled code */ }
+// val xprot3 : java.lang.Object with foo.A.FooProt3 = { /* compiled code */ }
+// val xprot4 : java.lang.Object with foo.A.FooProt4 = { /* compiled code */ }
+// val xpriv3 : java.lang.Object with foo.A.FooPriv3 = { /* compiled code */ }
+// val xpriv4 : java.lang.Object with foo.A.FooPriv4 = { /* compiled code */ }
+//
+// Indeed it will tell me a type which I cannot access:
+//
+// scala> new bar.C
+// res0: bar.C = bar.C@1339a0dc
+//
+// scala> res0.xpriv3
+// res1: java.lang.Object with res0.a.FooPriv3 = bar.C$$anon$29@39556aec
+//
+// scala> new res0.a.FooPriv3
+// <console>:9: error: trait FooPriv3 in class A cannot be accessed in foo.A
+// new res0.a.FooPriv3
+// ^
+// Looking at how the compiler prints the types of those vals, one
+// develops a suspicion how some of it is being allowed:
+//
+// val xpriv4: C.this.a.FooPriv4
+// val xpriv3: C.this.a.FooPriv3
+// val xprot4: C.this.a.FooProt4
+// val xprot3: C.this.a.FooProt3
+// val xprot2: C.this.a.FooProt2
+// val xprot1: C.this.a.FooProt1
+//
+// That is, "this" is in the prefix somewhere, it's just not a "this"
+// which has any bearing.
+
+package foo {
+ class A {
+ trait Foo
+
+ protected trait FooProt1
+ protected[this] trait FooProt2
+ protected[foo] trait FooProt3
+ protected[A] trait FooProt4
+
+ private trait FooPriv1
+ private[this] trait FooPriv2
+ private[foo] trait FooPriv3
+ private[A] trait FooPriv4
+
+ type BarProt1 = FooProt1
+ type BarProt2 = FooProt2
+ type BarProt3 = FooProt3
+ type BarProt4 = FooProt4
+
+ // type BarPriv1 = FooPriv1
+ // type BarPriv2 = FooPriv2
+ type BarPriv3 = FooPriv3
+ type BarPriv4 = FooPriv4
+
+ def fprot1(x: FooProt1) = x
+ def fprot2(x: FooProt2) = x
+ def fprot3(x: FooProt3) = x
+ def fprot4(x: FooProt4) = x
+
+ // def fpriv1(x: FooPriv1) = x
+ // def fpriv2(x: FooPriv2) = x
+ def fpriv3(x: FooPriv3) = x
+ def fpriv4(x: FooPriv4) = x
+
+ val yprot1 = new FooProt1 { }
+ val yprot2 = new FooProt2 { }
+ val yprot3 = new FooProt3 { }
+ val yprot4 = new FooProt4 { }
+
+ // val ypriv1 = new FooPriv1 { }
+ // val ypriv2 = new FooPriv2 { }
+ val ypriv3 = new FooPriv3 { }
+ val ypriv4 = new FooPriv4 { }
+
+ def fpriv_alt1(x: FooPriv1) = 0 // !!! isn't the private type now in the signature of the (public) method?
+ def fpriv_alt2(x: FooPriv2) = 0 // !!! isn't the private[this] type now in the signature of the (public) method?
+ }
+ // Same package, subclass
+ class B extends A {
+ val xprot1 = new BarProt1 { }
+ val xprot2 = new BarProt2 { }
+ val xprot3 = new BarProt3 { }
+ val xprot4 = new BarProt4 { }
+
+ // val xpriv1 = new BarPriv1 { }
+ // val xpriv2 = new BarPriv2 { }
+ val xpriv3 = new BarPriv3 { }
+ val xpriv4 = new BarPriv4 { }
+
+ override def fprot1(x: BarProt1) = x
+ override def fprot2(x: BarProt2) = x
+ override def fprot3(x: BarProt3) = x
+ override def fprot4(x: BarProt4) = x
+
+ // override def fpriv1(x: BarPriv1) = x
+ // override def fpriv2(x: BarPriv2) = x
+ override def fpriv3(x: BarPriv3) = x
+ override def fpriv4(x: BarPriv4) = x
+ }
+ // Same package, unrelated class
+ class C {
+ val a = new A
+ import a._
+
+ val xprot1 = new BarProt1 { }
+ val xprot2 = new BarProt2 { }
+ val xprot3 = new BarProt3 { }
+ val xprot4 = new BarProt4 { }
+
+ // val xpriv1 = new BarPriv1 { }
+ // val xpriv2 = new BarPriv2 { }
+ val xpriv3 = new BarPriv3 { }
+ val xpriv4 = new BarPriv4 { }
+ }
+}
+
+package bar {
+ // Different package, subclass
+ class B extends foo.A {
+ val xprot1 = new BarProt1 { }
+ val xprot2 = new BarProt2 { }
+ val xprot3 = new BarProt3 { }
+ val xprot4 = new BarProt4 { }
+
+ // val xpriv1 = new BarPriv1 { }
+ // val xpriv2 = new BarPriv2 { }
+ val xpriv3 = new BarPriv3 { }
+ val xpriv4 = new BarPriv4 { }
+
+ override def fprot1(x: BarProt1) = x
+ override def fprot2(x: BarProt2) = x
+ override def fprot3(x: BarProt3) = x
+ override def fprot4(x: BarProt4) = x
+
+ // override def fpriv1(x: BarPriv1) = x
+ // override def fpriv2(x: BarPriv2) = x
+ override def fpriv3(x: BarPriv3) = x
+ override def fpriv4(x: BarPriv4) = x
+ }
+ // Different package, unrelated class
+ class C {
+ val a = new foo.A
+ import a._
+
+ val xprot1 = new BarProt1 { }
+ val xprot2 = new BarProt2 { }
+ val xprot3 = new BarProt3 { }
+ val xprot4 = new BarProt4 { }
+
+ // val xpriv1 = new BarPriv1 { }
+ // val xpriv2 = new BarPriv2 { }
+ val xpriv3 = new BarPriv3 { }
+ val xpriv4 = new BarPriv4 { }
+ }
+}
diff --git a/test/files/pos/no-widen-locals.scala b/test/pending/pos/no-widen-locals.scala
index 013e63f0a2..013e63f0a2 100644
--- a/test/files/pos/no-widen-locals.scala
+++ b/test/pending/pos/no-widen-locals.scala
diff --git a/test/pending/pos/t4649.flags b/test/pending/pos/t4649.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/pending/pos/t4649.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/pending/pos/t4649.scala b/test/pending/pos/t4649.scala
new file mode 100644
index 0000000000..0d6caa8d7a
--- /dev/null
+++ b/test/pending/pos/t4649.scala
@@ -0,0 +1,6 @@
+object Test {
+ // @annotation.tailrec
+ def lazyFilter[E](s: Stream[E], p: E => Boolean): Stream[E] = s match {
+ case h #:: t => if (p(h)) h #:: lazyFilter(t, p) else lazyFilter(t, p)
+ }
+}
diff --git a/test/pending/run/partial-anyref-spec.check b/test/pending/run/partial-anyref-spec.check
new file mode 100644
index 0000000000..26e41933e7
--- /dev/null
+++ b/test/pending/run/partial-anyref-spec.check
@@ -0,0 +1,13 @@
+Fn$mcII$sp
+Fn$mcLI$sp
+Fn$mcLI$sp
+Fn$mcIL$sp
+Fn
+Fn
+Fn$mcIL$sp
+Fn
+Fn
+Fn3
+Fn3$mcLIDF$sp
+Fn3$mcBIDF$sp
+Fn3
diff --git a/test/pending/run/partial-anyref-spec.scala b/test/pending/run/partial-anyref-spec.scala
new file mode 100644
index 0000000000..49ed514f03
--- /dev/null
+++ b/test/pending/run/partial-anyref-spec.scala
@@ -0,0 +1,31 @@
+class Fn[@specialized(Int, AnyRef) -T, @specialized(Int, AnyRef) +R] {
+ override def toString = getClass.getName
+}
+
+class Fn3[
+ @specialized(Int, AnyRef) -T1,
+ @specialized(Double, AnyRef) -T2,
+ @specialized(Float) -T3,
+ @specialized(Byte, AnyRef) +R
+] {
+ override def toString = getClass.getName
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ println(new Fn[Int, Int])
+ println(new Fn[Int, Byte])
+ println(new Fn[Int, AnyRef])
+ println(new Fn[Byte, Int])
+ println(new Fn[Byte, Byte])
+ println(new Fn[Byte, AnyRef])
+ println(new Fn[AnyRef, Int])
+ println(new Fn[AnyRef, Byte])
+ println(new Fn[AnyRef, AnyRef])
+
+ println(new Fn3[Int, Int, Int, Int])
+ println(new Fn3[Int, Double, Float, Int])
+ println(new Fn3[Int, Double, Float, Byte])
+ println(new Fn3[AnyRef, Double, AnyRef, Int])
+ }
+}