summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--build.xml220
-rw-r--r--project/Build.scala290
-rw-r--r--project/Layers.scala27
-rw-r--r--project/Packaging.scala129
-rw-r--r--project/Release.scala27
-rw-r--r--project/RemoteDependencies.scala53
-rw-r--r--project/Sametest.scala3
-rw-r--r--project/ScalaBuildKeys.scala23
-rw-r--r--project/ScalaToolRunner.scala21
-rw-r--r--project/ShaResolve.scala70
-rw-r--r--project/Testing.scala41
-rw-r--r--src/compiler/scala/reflect/reify/Errors.scala5
-rw-r--r--src/compiler/scala/reflect/reify/Reifier.scala14
-rw-r--r--src/compiler/scala/reflect/reify/utils/NodePrinters.scala39
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala4
-rw-r--r--src/compiler/scala/tools/nsc/ast/NodePrinters.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/Printers.scala (renamed from src/compiler/scala/tools/nsc/ast/TreePrinters.scala)6
-rw-r--r--src/compiler/scala/tools/nsc/javac/JavaParsers.scala3
-rw-r--r--src/compiler/scala/tools/nsc/matching/MatchSupport.scala2
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaSettings.scala2
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala12
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala4
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala19
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Analyzer.scala7
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala16
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala17
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Duplicators.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala234
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala3
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Macros.scala15
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Modes.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala39
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala11
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala156
-rw-r--r--src/compiler/scala/tools/nsc/util/Statistics.scala200
-rw-r--r--src/compiler/scala/tools/nsc/util/StatisticsInfo.scala38
-rw-r--r--src/compiler/scala/tools/reflect/ToolBoxFactory.scala5
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala19
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala40
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala15
-rw-r--r--src/library/scala/Predef.scala52
-rw-r--r--src/library/scala/collection/GenTraversableOnce.scala19
-rw-r--r--src/library/scala/collection/Iterator.scala3
-rw-r--r--src/library/scala/collection/MapLike.scala28
-rw-r--r--src/library/scala/collection/TraversableLike.scala7
-rw-r--r--src/library/scala/collection/TraversableOnce.scala17
-rw-r--r--src/library/scala/collection/concurrent/TrieMap.scala6
-rw-r--r--src/library/scala/collection/immutable/Vector.scala2
-rw-r--r--src/library/scala/collection/mutable/ArrayOps.scala2
-rw-r--r--src/library/scala/collection/mutable/LinkedHashMap.scala21
-rw-r--r--src/library/scala/collection/parallel/ParIterableLike.scala6
-rw-r--r--src/library/scala/collection/parallel/immutable/ParVector.scala2
-rw-r--r--src/library/scala/concurrent/impl/Future.scala106
-rw-r--r--src/library/scala/concurrent/impl/Promise.scala25
-rw-r--r--src/library/scala/reflect/base/Base.scala2
-rw-r--r--src/library/scala/reflect/base/Trees.scala4
-rw-r--r--src/library/scala/util/control/Breaks.scala2
-rw-r--r--src/reflect/scala/reflect/api/Printers.scala94
-rw-r--r--src/reflect/scala/reflect/api/Symbols.scala36
-rw-r--r--src/reflect/scala/reflect/api/TreePrinters.scala87
-rw-r--r--src/reflect/scala/reflect/api/Universe.scala2
-rw-r--r--src/reflect/scala/reflect/internal/BaseTypeSeqs.scala12
-rw-r--r--src/reflect/scala/reflect/internal/Definitions.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Printers.scala (renamed from src/reflect/scala/reflect/internal/TreePrinters.scala)176
-rw-r--r--src/reflect/scala/reflect/internal/SymbolTable.scala4
-rw-r--r--src/reflect/scala/reflect/internal/Symbols.scala18
-rw-r--r--src/reflect/scala/reflect/internal/Trees.scala12
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala86
-rw-r--r--src/reflect/scala/reflect/internal/util/StatBase.scala97
-rw-r--r--src/reflect/scala/reflect/internal/util/Statistics.scala261
-rw-r--r--src/reflect/scala/reflect/makro/Universe.scala19
-rw-r--r--src/reflect/scala/reflect/runtime/JavaUniverse.scala6
-rw-r--r--src/scalacheck/org/scalacheck/Arbitrary.scala36
-rw-r--r--src/scalacheck/org/scalacheck/Arg.scala2
-rw-r--r--src/scalacheck/org/scalacheck/Commands.scala23
-rw-r--r--src/scalacheck/org/scalacheck/ConsoleReporter.scala27
-rw-r--r--src/scalacheck/org/scalacheck/Gen.scala50
-rw-r--r--src/scalacheck/org/scalacheck/Pretty.scala4
-rw-r--r--src/scalacheck/org/scalacheck/Prop.scala36
-rw-r--r--src/scalacheck/org/scalacheck/Properties.scala2
-rw-r--r--src/scalacheck/org/scalacheck/Shrink.scala2
-rw-r--r--src/scalacheck/org/scalacheck/Test.scala191
-rw-r--r--src/scalacheck/org/scalacheck/util/Buildable.scala4
-rw-r--r--src/scalacheck/org/scalacheck/util/CmdLineParser.scala9
-rw-r--r--src/scalacheck/org/scalacheck/util/FreqMap.scala2
-rw-r--r--src/scalacheck/org/scalacheck/util/StdRand.scala2
-rw-r--r--test/benchmarking/ParCtrie-size.scala11
-rw-r--r--test/files/continuations-neg/ts-1681-nontail-return.check4
-rw-r--r--test/files/continuations-neg/ts-1681-nontail-return.scala18
-rw-r--r--test/files/continuations-run/ts-1681-2.check5
-rw-r--r--test/files/continuations-run/ts-1681-2.scala44
-rw-r--r--test/files/continuations-run/ts-1681-3.check4
-rw-r--r--test/files/continuations-run/ts-1681-3.scala27
-rw-r--r--test/files/continuations-run/ts-1681.check3
-rw-r--r--test/files/continuations-run/ts-1681.scala29
-rw-r--r--test/files/jvm/actmig-PinS.check19
-rw-r--r--test/files/jvm/actmig-PinS.scala112
-rw-r--r--test/files/jvm/actmig-PinS_1.check19
-rw-r--r--test/files/jvm/actmig-PinS_1.scala135
-rw-r--r--test/files/jvm/actmig-PinS_2.check19
-rw-r--r--test/files/jvm/actmig-PinS_2.scala155
-rw-r--r--test/files/jvm/actmig-PinS_3.check19
-rw-r--r--test/files/jvm/actmig-PinS_3.scala161
-rw-r--r--test/files/jvm/actmig-hierarchy.check2
-rw-r--r--test/files/jvm/actmig-hierarchy_1.check2
-rw-r--r--test/files/jvm/actmig-instantiation.check8
-rw-r--r--test/files/jvm/actmig-loop-react.check15
-rw-r--r--test/files/jvm/actmig-loop-react.scala188
-rw-r--r--test/files/jvm/actmig-public-methods.check6
-rw-r--r--test/files/jvm/actmig-public-methods_1.check6
-rw-r--r--test/files/jvm/actmig-public-methods_1.scala88
-rw-r--r--test/files/jvm/actmig-react-receive.check16
-rw-r--r--test/files/jvm/actmig-react-receive.scala111
-rw-r--r--test/files/lib/scalacheck.jar.desired.sha12
-rw-r--r--test/files/neg/t4270.check4
-rw-r--r--test/files/neg/t4270.scala6
-rw-r--r--test/files/neg/t4541.check7
-rw-r--r--test/files/neg/t4541.scala16
-rw-r--r--test/files/neg/t4541b.check7
-rw-r--r--test/files/neg/t4541b.scala16
-rw-r--r--test/files/neg/t4842a.check4
-rw-r--r--test/files/neg/t4842a.scala3
-rw-r--r--test/files/neg/t4842b.check4
-rw-r--r--test/files/neg/t4842b.scala3
-rw-r--r--test/files/neg/t5334_1.check4
-rw-r--r--test/files/neg/t5334_2.check4
-rw-r--r--test/files/neg/t5376.check11
-rw-r--r--test/files/neg/t5376.scala24
-rw-r--r--test/files/neg/t5429.check20
-rw-r--r--test/files/neg/t5617.check8
-rw-r--r--test/files/neg/t5617.scala14
-rw-r--r--test/files/pos/nonlocal-unchecked.flags1
-rw-r--r--test/files/pos/nonlocal-unchecked.scala6
-rw-r--r--test/files/pos/t4842.scala26
-rw-r--r--test/files/pos/t5910.java2
-rw-r--r--test/files/pos/t5953.scala16
-rw-r--r--test/files/presentation/ide-bug-1000531.check4
-rw-r--r--test/files/run/collection-conversions.check104
-rw-r--r--test/files/run/collection-conversions.scala60
-rw-r--r--test/files/run/showraw_mods.check1
-rw-r--r--test/files/run/showraw_mods.scala6
-rw-r--r--test/files/run/showraw_tree.check2
-rw-r--r--test/files/run/showraw_tree.scala8
-rw-r--r--test/files/run/showraw_tree_ids.check2
-rw-r--r--test/files/run/showraw_tree_ids.scala8
-rw-r--r--test/files/run/showraw_tree_kinds.check2
-rw-r--r--test/files/run/showraw_tree_kinds.scala8
-rw-r--r--test/files/run/showraw_tree_types_ids.check10
-rw-r--r--test/files/run/showraw_tree_types_ids.scala10
-rw-r--r--test/files/run/showraw_tree_types_typed.check10
-rw-r--r--test/files/run/showraw_tree_types_typed.scala10
-rw-r--r--test/files/run/showraw_tree_types_untyped.check2
-rw-r--r--test/files/run/showraw_tree_types_untyped.scala8
-rw-r--r--test/files/run/showraw_tree_ultimate.check10
-rw-r--r--test/files/run/showraw_tree_ultimate.scala10
-rw-r--r--test/files/run/t4954.scala45
-rw-r--r--test/files/run/t5912.scala6
-rw-r--r--test/files/scalacheck/CheckEither.scala1
-rw-r--r--test/pending/run/t5334_1.scala (renamed from test/files/neg/t5334_1.scala)0
-rw-r--r--test/pending/run/t5334_2.scala (renamed from test/files/neg/t5334_2.scala)0
160 files changed, 3736 insertions, 1468 deletions
diff --git a/build.xml b/build.xml
index b5f5d07e54..33ebbfe377 100644
--- a/build.xml
+++ b/build.xml
@@ -55,6 +55,18 @@ END-USER TARGETS
</antcall>
</target>
+
+ <target name="partialdist" depends="dist.partial"
+ description="Makes a new distribution without documentation, so just for testing."/>
+
+ <target name="partialdist-opt"
+ description="Makes a new optimised distribution without testing it or removing partially build elements.">
+ <antcall target="partialdist">
+ <param name="scalac.args.optimise" value="-optimise"/>
+ </antcall>
+ </target>
+
+
<target name="fastdist" depends="dist.done"
description="Makes a new distribution without testing it or removing partially build elements."/>
@@ -123,10 +135,30 @@ END-USER TARGETS
<antcall target="palo.done"/>
</target>
- <target name="fastlocker"
+ <target name="fastlocker.lib"
+ description="Buildlocker without extra fuss">
+ <property name="fastlocker" value="true"/>
+ <antcall target="locker.unlock"/>
+ <antcall target="locker.lib"/>
+ </target>
+
+ <target name="fastlocker.reflect"
description="Buildlocker without extra fuss">
+ <property name="fastlocker" value="true"/>
<antcall target="locker.unlock"/>
- <antcall target="locker.done"/>
+ <antcall target="locker.reflect"/>
+ </target>
+
+ <target name="fastlocker.comp"
+ description="Buildlocker without extra fuss">
+ <property name="fastlocker" value="true"/>
+ <antcall target="locker.unlock"/>
+ <antcall target="locker.comp"/>
+ </target>
+
+ <target name="fastlocker"
+ description="Buildlocker without extra fuss">
+ <antcall target="fastlocker.comp"/>
</target>
<target name="buildlocker"
@@ -334,16 +366,31 @@ INITIALISATION
<target name="init.version.done" depends="init.version.release, init.version.snapshot"/>
<target name="init.testjava6">
- <fail message="This build requires JDK 1.6">
- <condition>
- <not>
+ <condition property="has.java6">
<equals arg1="${ant.java.version}" arg2="1.6"/>
- </not>
</condition>
- </fail>
+ <condition property="has.java7">
+ <equals arg1="${ant.java.version}" arg2="1.7"/>
+ </condition>
+ <condition property="has.unsupported.jdk">
+ <not><or>
+ <isset property="has.java7" />
+ <isset property="has.java6" />
+ </or></not>
+ </condition>
+ </target>
+
+ <target name="init.fail.bad.jdk" depends="init.testjava6">
+ <fail if="has.unsupported.jdk"
+ message="JDK ${ant.java.version} is not supported by this build!"/>
+ </target>
+ <target name="init.warn.jdk7" depends="init.testjava6" if="has.java7">
+ <echo level="warning"> You are using JDK7 for this build. While this will be able to build most of Scala, it will
+ not build the Swing project. You will be unable to create a distribution.
+ </echo>
</target>
- <target name="init" depends="init.jars, init.maven.jars, init.version.done">
+ <target name="init" depends="init.jars, init.maven.jars, init.version.done, init.fail.bad.jdk, init.warn.jdk7">
<property name="scalac.args.always" value="" />
<!-- scalac.args.optimise is selectively overridden in certain antcall tasks. -->
<property name="scalac.args.optimise" value=""/>
@@ -467,7 +514,7 @@ LOCAL DEPENDENCY (Adapted ASM)
<!-- ===========================================================================
LOCAL DEPENDENCY (FORKJOIN)
============================================================================ -->
- <target name="forkjoin.start" depends="init, init.testjava6">
+ <target name="forkjoin.start" depends="init">
<uptodate property="forkjoin.available" targetfile="${build-libs.dir}/forkjoin.complete">
<srcfiles dir="${src.dir}/forkjoin">
<include name="**/*.java"/>
@@ -477,6 +524,7 @@ LOCAL DEPENDENCY (FORKJOIN)
</target>
<target name="forkjoin.lib" depends="forkjoin.start" unless="forkjoin.available">
+ <stopwatch name="forkjoin.lib.timer"/>
<mkdir dir="${build-libs.dir}/classes/forkjoin"/>
<javac
fork="yes"
@@ -487,9 +535,10 @@ LOCAL DEPENDENCY (FORKJOIN)
includes="**/*.java"
debug="true"
target="1.5" source="1.5">
- <compilerarg line="${javac.args}"/>
+ <compilerarg line="${javac.args} -XDignore.symbol.file"/>
</javac>
<touch file="${build-libs.dir}/forkjoin.complete" verbose="no"/>
+ <stopwatch name="forkjoin.lib.timer" action="total"/>
</target>
<target name="forkjoin.pack" depends="forkjoin.lib">
@@ -524,6 +573,7 @@ LOCAL DEPENDENCY (FJBG)
</target>
<target name="fjbg.lib" depends="fjbg.init" unless="fjbg.available">
+ <stopwatch name="fjbg.lib.timer" />
<mkdir dir="${build-libs.dir}/classes/fjbg"/>
<javac
srcdir="${src.dir}/fjbg"
@@ -532,9 +582,10 @@ LOCAL DEPENDENCY (FJBG)
includes="**/*.java"
debug="true"
target="1.5" source="1.4">
- <compilerarg line="${javac.args}"/>
+ <compilerarg line="${javac.args} -XDignore.symbol.file"/>
</javac>
<touch file="${build-libs.dir}/fjbg.complete" verbose="no"/>
+ <stopwatch name="fjbg.lib.timer" action="total"/>
</target>
<target name="fjbg.pack" depends="fjbg.lib">
@@ -677,33 +728,43 @@ LOCAL REFERENCE BUILD (LOCKER)
<target name="locker.comp" depends="locker.pre-comp" if="locker.comp.needed">
<stopwatch name="locker.comp.timer"/>
<mkdir dir="${build-locker.dir}/classes/compiler"/>
- <!-- Compile MSIL inside of locker.... -->
- <javac
- srcdir="${src.dir}/msil"
- destdir="${build-locker.dir}/classes/compiler"
- classpath="${build-locker.dir}/classes/compiler"
- includes="**/*.java"
- excludes="**/tests/**"
- debug="true"
- target="1.5" source="1.4">
- <compilerarg line="${javac.args}"/>
- </javac>
- <scalacfork
- destdir="${build-locker.dir}/classes/compiler"
- compilerpathref="starr.classpath"
- params="${scalac.args.all}"
- srcdir="${src.dir}/msil"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-locker.dir}/classes/library"/>
- <pathelement location="${build-locker.dir}/classes/reflect"/>
- <pathelement location="${build-locker.dir}/classes/compiler"/>
- <path refid="fjbg.classpath"/>
- <path refid="aux.libs"/>
- <pathelement location="${jline.jar}"/>
- </compilationpath>
- </scalacfork>
+ <if>
+ <equals arg1="${fastlocker}" arg2="true" />
+ <then>
+ <!-- Fastlocker build: don't compile MSIL, use its starr version.... -->
+ <property name="locker.comp.msil" value="${msil.starr.jar}"/>
+ </then>
+ <else>
+ <!-- Regular build: Compile MSIL inside of locker.... -->
+ <javac
+ srcdir="${src.dir}/msil"
+ destdir="${build-locker.dir}/classes/compiler"
+ classpath="${build-locker.dir}/classes/compiler"
+ includes="**/*.java"
+ excludes="**/tests/**"
+ debug="true"
+ target="1.5" source="1.4">
+ <compilerarg line="${javac.args}"/>
+ </javac>
+ <scalacfork
+ destdir="${build-locker.dir}/classes/compiler"
+ compilerpathref="starr.classpath"
+ params="${scalac.args.all}"
+ srcdir="${src.dir}/msil"
+ jvmargs="${scalacfork.jvmargs}">
+ <include name="**/*.scala"/>
+ <compilationpath>
+ <pathelement location="${build-locker.dir}/classes/library"/>
+ <pathelement location="${build-locker.dir}/classes/reflect"/>
+ <pathelement location="${build-locker.dir}/classes/compiler"/>
+ <path refid="fjbg.classpath"/>
+ <path refid="aux.libs"/>
+ <pathelement location="${jline.jar}"/>
+ </compilationpath>
+ </scalacfork>
+ <property name="locker.comp.msil" value="${build-locker.dir}/classes/compiler"/>
+ </else>
+ </if>
<scalacfork
destdir="${build-locker.dir}/classes/compiler"
compilerpathref="starr.classpath"
@@ -718,6 +779,7 @@ LOCAL REFERENCE BUILD (LOCKER)
<path refid="fjbg.classpath"/>
<path refid="aux.libs"/>
<path refid="asm.classpath"/>
+ <pathelement location="${locker.comp.msil}" />
<pathelement location="${jline.jar}"/>
</compilationpath>
</scalacfork>
@@ -978,15 +1040,6 @@ QUICK BUILD (QUICK)
<include name="**/*.scala"/>
<compilationpath refid="quick.compilation.path"/>
</scalacfork>
- <scalacfork
- destdir="${build-quick.dir}/classes/library"
- compilerpathref="locker.classpath"
- params="${scalac.args.quick}"
- srcdir="${src.dir}/swing"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath refid="quick.compilation.path"/>
- </scalacfork>
<propertyfile file="${build-quick.dir}/classes/library/library.properties">
<entry key="version.number" value="${version.number}"/>
<entry key="maven.version.number" value="${maven.version.number}"/>
@@ -1001,12 +1054,26 @@ QUICK BUILD (QUICK)
<include name="**/*.css"/>
</fileset>
</copy>
- <touch file="${build-quick.dir}/library.complete" verbose="no"/>
- <stopwatch name="quick.lib.timer" action="total"/>
</target>
+ <target name="quick.swing" depends="quick.lib" if="has.java6" unless="quick.lib.available">
+ <scalacfork
+ destdir="${build-quick.dir}/classes/library"
+ compilerpathref="locker.classpath"
+ params="${scalac.args.quick}"
+ srcdir="${src.dir}/swing"
+ jvmargs="${scalacfork.jvmargs}">
+ <include name="**/*.scala"/>
+ <compilationpath refid="quick.compilation.path"/>
+ </scalacfork>
+ </target>
- <target name="quick.pre-reflect" depends="quick.lib">
+ <target name="quick.lib.done" depends="quick.swing, quick.lib">
+ <stopwatch name="quick.lib.timer" action="total"/>
+ <touch file="${build-quick.dir}/library.complete" verbose="no"/>
+ </target>
+
+ <target name="quick.pre-reflect" depends="quick.lib.done">
<uptodate property="quick.reflect.available" targetfile="${build-quick.dir}/reflect.complete">
<srcfiles dir="${src.dir}">
<include name="reflect/**"/>
@@ -1412,11 +1479,6 @@ PACKED QUICK BUILD (PACK)
</fileset>
<fileset dir="${build-libs.dir}/classes/forkjoin"/>
</jar>
- <jar destfile="${build-pack.dir}/lib/scala-swing.jar">
- <fileset dir="${build-quick.dir}/classes/library">
- <include name="scala/swing/**"/>
- </fileset>
- </jar>
<jar destfile="${build-pack.dir}/lib/scala-actors.jar">
<fileset dir="${build-quick.dir}/classes/library">
<include name="scala/actors/**"/>
@@ -1427,7 +1489,15 @@ PACKED QUICK BUILD (PACK)
</jar>
</target>
- <target name="pack.pre-reflect" depends="pack.lib">
+ <target name="pack.swing" depends="pack.lib" if="has.java6">
+ <jar destfile="${build-pack.dir}/lib/scala-swing.jar">
+ <fileset dir="${build-quick.dir}/classes/library">
+ <include name="scala/swing/**"/>
+ </fileset>
+ </jar>
+ </target>
+
+ <target name="pack.pre-reflect" depends="pack.lib, pack.swing">
<uptodate
property="pack.reflect.available"
targetfile="${build-pack.dir}/lib/scala-reflect.jar"
@@ -1643,15 +1713,6 @@ BOOTSTRAPPING BUILD (STRAP)
<include name="**/*.scala"/>
<compilationpath refid="strap.compilation.path"/>
</scalacfork>
- <scalacfork
- destdir="${build-strap.dir}/classes/library"
- compilerpathref="pack.classpath"
- params="${scalac.args.quick}"
- srcdir="${src.dir}/swing"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath refid="strap.compilation.path"/>
- </scalacfork>
<propertyfile file="${build-strap.dir}/classes/library/library.properties">
<entry key="version.number" value="${version.number}"/>
<entry key="maven.version.number" value="${maven.version.number}"/>
@@ -1666,11 +1727,26 @@ BOOTSTRAPPING BUILD (STRAP)
<include name="**/*.css"/>
</fileset>
</copy>
+ </target>
+
+ <target name="strap.swing" if="has.java6" unless="strap.lib.available" depends="strap.lib">
+ <scalacfork
+ destdir="${build-strap.dir}/classes/library"
+ compilerpathref="pack.classpath"
+ params="${scalac.args.quick}"
+ srcdir="${src.dir}/swing"
+ jvmargs="${scalacfork.jvmargs}">
+ <include name="**/*.scala"/>
+ <compilationpath refid="strap.compilation.path"/>
+ </scalacfork>
+ </target>
+
+ <target name="strap.lib.done" depends="strap.swing, strap.lib">
<touch file="${build-strap.dir}/library.complete" verbose="no"/>
<stopwatch name="strap.lib.timer" action="total"/>
</target>
- <target name="strap.pre-reflect" depends="strap.lib">
+ <target name="strap.pre-reflect" depends="strap.lib.done">
<uptodate property="strap.reflect.available" targetfile="${build-strap.dir}/reflect.complete">
<srcfiles dir="${src.dir}/reflect"/>
</uptodate>
@@ -2348,7 +2424,7 @@ BOOTRAPING TEST AND TEST SUITE
DISTRIBUTION
============================================================================ -->
- <target name="dist.start" depends="docs.done, pack.done">
+ <target name="dist.start" depends="pack.done">
<property name="dist.name" value="scala-${version.number}"/>
<property name="dist.dir" value="${dists.dir}/${dist.name}"/>
</target>
@@ -2373,7 +2449,7 @@ DISTRIBUTION
</copy>
</target>
- <target name="dist.doc" depends="dist.base">
+ <target name="dist.doc" depends="dist.base, docs.done">
<mkdir dir="${dist.dir}/doc/scala-devel-docs"/>
<copy file="${docs.dir}/LICENSE" toDir="${dist.dir}/doc"/>
<copy file="${docs.dir}/README" toDir="${dist.dir}/doc"/>
@@ -2432,11 +2508,11 @@ DISTRIBUTION
</jar>
</target>
- <target name="dist.latest.unix" depends="dist.src" unless="os.win">
+ <target name="dist.latest.unix" depends="dist.base" unless="os.win">
<symlink link="${dists.dir}/latest" resource="${dist.name}" overwrite="yes"/>
</target>
- <target name="dist.latest.win" depends="dist.src" if="os.win">
+ <target name="dist.latest.win" depends="dist.base" if="os.win">
<copy todir="${dists.dir}/latest">
<fileset dir="${dist.dir}"/>
</copy>
@@ -2444,7 +2520,9 @@ DISTRIBUTION
<target name="dist.latest" depends="dist.latest.unix,dist.latest.win"/>
- <target name="dist.done" depends="dist.latest"/>
+ <target name="dist.partial" depends="dist.base, dist.latest"/>
+
+ <target name="dist.done" depends="dist.latest, dist.src"/>
<target name="dist.clean">
<delete dir="${dists.dir}" includeemptydirs="yes" quiet="yes" failonerror="no"/>
diff --git a/project/Build.scala b/project/Build.scala
index 610f756a34..58d322108b 100644
--- a/project/Build.scala
+++ b/project/Build.scala
@@ -1,12 +1,11 @@
import sbt._
import Keys._
import partest._
-import SameTest._
import ScalaBuildKeys._
+import Release._
-
-object ScalaBuild extends Build with Layers {
+object ScalaBuild extends Build with Layers with Packaging with Testing {
// Build wide settings:
override lazy val settings = super.settings ++ Versions.settings ++ Seq(
@@ -21,36 +20,13 @@ object ScalaBuild extends Build with Layers {
),
organization := "org.scala-lang",
version <<= Versions.mavenVersion,
- pomExtra := epflPomExtra,
- commands += Command.command("fix-uri-projects") { (state: State) =>
- if(state.get(buildFixed) getOrElse false) state
- else {
- // TODO -fix up scalacheck's dependencies!
- val extracted = Project.extract(state)
- import extracted._
- def fix(s: Setting[_]): Setting[_] = s match {
- case ScopedExternalSetting(`scalacheck`, scalaInstance.key, setting) => fullQuickScalaReference mapKey Project.mapScope(_ => s.key.scope)
- case s => s
- }
- val transformed = session.mergeSettings map ( s => fix(s) )
- val scopes = transformed collect { case ScopedExternalSetting(`scalacheck`, _, s) => s.key.scope } toSet
- // Create some fixers so we don't download scala or rely on it.
- val fixers = for { scope <- scopes
- setting <- Seq(autoScalaLibrary := false, crossPaths := false)
- } yield setting mapKey Project.mapScope(_ => scope)
- val newStructure = Load.reapply(transformed ++ fixers, structure)
- Project.setProject(session, newStructure, state).put(buildFixed, true)
- }
- },
- onLoad in Global <<= (onLoad in Global) apply (_ andThen { (state: State) =>
- "fix-uri-projects" :: state
- })
- )
+ pomExtra := epflPomExtra
+ )
// Collections of projects to run 'compile' on.
- lazy val compiledProjects = Seq(quickLib, quickComp, continuationsLibrary, actors, swing, forkjoin, fjbg)
+ lazy val compiledProjects = Seq(quickLib, quickComp, continuationsLibrary, actors, actorsMigration, swing, forkjoin, fjbg)
// Collection of projects to 'package' and 'publish' together.
- lazy val packagedBinaryProjects = Seq(scalaLibrary, scalaCompiler, swing, continuationsPlugin, jline, scalap)
+ lazy val packagedBinaryProjects = Seq(scalaLibrary, scalaCompiler, swing, actors, actorsMigration, continuationsPlugin, jline, scalap)
lazy val partestRunProjects = Seq(testsuite, continuationsTestsuite)
private def epflPomExtra = (
@@ -110,7 +86,6 @@ object ScalaBuild extends Build with Layers {
},
// TODO - Make exported products == makeDist so we can use this when creating a *real* distribution.
commands += Release.pushStarr
- //commands += Release.setStarrHome
)
// Note: Root project is determined by lowest-alphabetical project that has baseDirectory as file("."). we use aaa_ to 'win'.
lazy val aaa_root = Project("scala", file(".")) settings(projectSettings: _*) settings(ShaResolve.settings: _*)
@@ -123,6 +98,11 @@ object ScalaBuild extends Build with Layers {
)
)
+ def fixArtifactSrc(dir: File, name: String) = name match {
+ case x if x startsWith "scala-" => dir / "src" / (name drop 6)
+ case x => dir / "src" / name
+ }
+
// These are setting overrides for most artifacts in the Scala build file.
def settingOverrides: Seq[Setting[_]] = publishSettings ++ Seq(
crossPaths := false,
@@ -134,9 +114,8 @@ object ScalaBuild extends Build with Layers {
target <<= (baseDirectory, name) apply (_ / "target" / _),
(classDirectory in Compile) <<= target(_ / "classes"),
javacOptions ++= Seq("-target", "1.5", "-source", "1.5"),
- scalaSource in Compile <<= (baseDirectory, name) apply (_ / "src" / _),
- javaSource in Compile <<= (baseDirectory, name) apply (_ / "src" / _),
- autoScalaLibrary := false,
+ scalaSource in Compile <<= (baseDirectory, name) apply fixArtifactSrc,
+ javaSource in Compile <<= (baseDirectory, name) apply fixArtifactSrc,
unmanagedJars in Compile := Seq(),
// Most libs in the compiler use this order to build.
compileOrder in Compile := CompileOrder.JavaThenScala,
@@ -177,20 +156,21 @@ object ScalaBuild extends Build with Layers {
}
// Locker is a lockable Scala compiler that can be built of 'current' source to perform rapid development.
- lazy val (lockerLib, lockerComp) = makeLayer("locker", STARR, autoLock = true)
- lazy val locker = Project("locker", file(".")) aggregate(lockerLib, lockerComp)
+ lazy val (lockerLib, lockerReflect, lockerComp) = makeLayer("locker", STARR, autoLock = true)
+ lazy val locker = Project("locker", file(".")) aggregate(lockerLib, lockerReflect, lockerComp)
// Quick is the general purpose project layer for the Scala compiler.
- lazy val (quickLib, quickComp) = makeLayer("quick", makeScalaReference("locker", lockerLib, lockerComp))
- lazy val quick = Project("quick", file(".")) aggregate(quickLib, quickComp)
+ lazy val (quickLib, quickReflect, quickComp) = makeLayer("quick", makeScalaReference("locker", lockerLib, lockerReflect, lockerComp))
+ lazy val quick = Project("quick", file(".")) aggregate(quickLib, quickReflect, quickComp)
// Reference to quick scala instance.
- lazy val quickScalaInstance = makeScalaReference("quick", quickLib, quickComp)
+ lazy val quickScalaInstance = makeScalaReference("quick", quickLib, quickReflect, quickComp)
def quickScalaLibraryDependency = unmanagedClasspath in Compile <++= (exportedProducts in quickLib in Compile).identity
+ def quickScalaReflectDependency = unmanagedClasspath in Compile <++= (exportedProducts in quickReflect in Compile).identity
def quickScalaCompilerDependency = unmanagedClasspath in Compile <++= (exportedProducts in quickComp in Compile).identity
// Strapp is used to test binary 'sameness' between things built with locker and things built with quick.
- lazy val (strappLib, strappComp) = makeLayer("strapp", quickScalaInstance)
+ lazy val (strappLib, strappReflect, strappComp) = makeLayer("strapp", quickScalaInstance)
// --------------------------------------------------------------
// Projects dependent on layered compilation (quick)
@@ -222,17 +202,18 @@ object ScalaBuild extends Build with Layers {
}
// TODO - in sabbus, these all use locker to build... I think tihs way is better, but let's farm this idea around.
- // TODO - Actors + swing separate jars...
lazy val dependentProjectSettings = settingOverrides ++ Seq(quickScalaInstance, quickScalaLibraryDependency, addCheaterDependency("scala-library"))
- lazy val actors = Project("actors", file(".")) settings(dependentProjectSettings:_*) dependsOn(forkjoin % "provided")
- // TODO - Remove actors dependency from pom...
- lazy val swing = Project("swing", file(".")) settings(dependentProjectSettings:_*) dependsOn(actors % "provided")
+ lazy val actors = Project("scala-actors", file(".")) settings(dependentProjectSettings:_*) dependsOn(forkjoin % "provided")
+ lazy val swing = Project("scala-swing", file(".")) settings(dependentProjectSettings:_*) dependsOn(actors % "provided")
+ lazy val actorsMigration = Project("scala-actors-migration", file(".")) settings(dependentProjectSettings:_*) dependsOn(actors % "provided")
// This project will generate man pages (in man1 and html) for scala.
lazy val manmakerSettings: Seq[Setting[_]] = dependentProjectSettings :+ externalDeps
lazy val manmaker = Project("manual", file(".")) settings(manmakerSettings:_*)
// Things that compile against the compiler.
- lazy val compilerDependentProjectSettings = dependentProjectSettings ++ Seq(quickScalaCompilerDependency, addCheaterDependency("scala-compiler"))
+ lazy val compilerDependentProjectSettings = dependentProjectSettings ++ Seq(quickScalaReflectDependency, quickScalaCompilerDependency, addCheaterDependency("scala-compiler"))
+
+ lazy val scalacheck = Project("scalacheck", file(".")) settings(compilerDependentProjectSettings:_*) dependsOn(actors % "provided")
lazy val partestSettings = compilerDependentProjectSettings :+ externalDeps
lazy val partest = Project("partest", file(".")) settings(partestSettings:_*) dependsOn(actors,forkjoin,scalap)
lazy val scalapSettings = compilerDependentProjectSettings ++ Seq(
@@ -267,7 +248,7 @@ object ScalaBuild extends Build with Layers {
// --------------------------------------------------------------
val allSubpathsCopy = (dir: File) => (dir.*** --- dir) x (relativeTo(dir)|flat)
def productTaskToMapping(products : Seq[File]) = products flatMap { p => allSubpathsCopy(p) }
- lazy val packageScalaLibBinTask = Seq(quickLib, continuationsLibrary, forkjoin, actors).map(p => products in p in Compile).join.map(_.flatten).map(productTaskToMapping)
+ lazy val packageScalaLibBinTask = Seq(quickLib, continuationsLibrary, forkjoin).map(p => products in p in Compile).join.map(_.flatten).map(productTaskToMapping)
lazy val scalaLibArtifactSettings: Seq[Setting[_]] = inConfig(Compile)(Defaults.packageTasks(packageBin, packageScalaLibBinTask)) ++ Seq(
name := "scala-library",
crossPaths := false,
@@ -283,11 +264,12 @@ object ScalaBuild extends Build with Layers {
lazy val scalaLibrary = Project("scala-library", file(".")) settings(publishSettings:_*) settings(scalaLibArtifactSettings:_*)
// --------------------------------------------------------------
- // Real Compiler Artifact
+ // Real Reflect Artifact
// --------------------------------------------------------------
- lazy val packageScalaBinTask = Seq(quickComp, fjbg, asm).map(p => products in p in Compile).join.map(_.flatten).map(productTaskToMapping)
- lazy val scalaBinArtifactSettings : Seq[Setting[_]] = inConfig(Compile)(Defaults.packageTasks(packageBin, packageScalaBinTask)) ++ Seq(
- name := "scala-compiler",
+
+ lazy val packageScalaReflect = Seq(quickReflect).map(p => products in p in Compile).join.map(_.flatten).map(productTaskToMapping)
+ lazy val scalaReflectArtifactSettings : Seq[Setting[_]] = inConfig(Compile)(Defaults.packageTasks(packageBin, packageScalaReflect)) ++ Seq(
+ name := "scala-reflect",
crossPaths := false,
exportJars := true,
autoScalaLibrary := false,
@@ -296,52 +278,33 @@ object ScalaBuild extends Build with Layers {
quickScalaInstance,
target <<= (baseDirectory, name) apply (_ / "target" / _)
)
- lazy val scalaCompiler = Project("scala-compiler", file(".")) settings(publishSettings:_*) settings(scalaBinArtifactSettings:_*) dependsOn(scalaLibrary)
- lazy val fullQuickScalaReference = makeScalaReference("pack", scalaLibrary, scalaCompiler)
+ lazy val scalaReflect = Project("scala-reflect", file(".")) settings(publishSettings:_*) settings(scalaReflectArtifactSettings:_*) dependsOn(scalaLibrary)
+
// --------------------------------------------------------------
- // Testing
+ // Real Compiler Artifact
// --------------------------------------------------------------
- /* lazy val scalacheckSettings: Seq[Setting[_]] = Seq(fullQuickScalaReference, crossPaths := false)*/
- lazy val scalacheck = uri("git://github.com/jsuereth/scalacheck.git#scala-build")
-
- lazy val testsuiteSettings: Seq[Setting[_]] = compilerDependentProjectSettings ++ partestTaskSettings ++ VerifyClassLoad.settings ++ Seq(
- unmanagedBase <<= baseDirectory / "test/files/lib",
- fullClasspath in VerifyClassLoad.checkClassLoad <<= (fullClasspath in scalaLibrary in Runtime).identity,
+ lazy val packageScalaBinTask = Seq(quickComp, fjbg, asm).map(p => products in p in Compile).join.map(_.flatten).map(productTaskToMapping)
+ lazy val scalaBinArtifactSettings : Seq[Setting[_]] = inConfig(Compile)(Defaults.packageTasks(packageBin, packageScalaBinTask)) ++ Seq(
+ name := "scala-compiler",
+ crossPaths := false,
+ exportJars := true,
autoScalaLibrary := false,
- checkSameLibrary <<= checkSameBinaryProjects(quickLib, strappLib),
- checkSameCompiler <<= checkSameBinaryProjects(quickComp, strappComp),
- checkSame <<= (checkSameLibrary, checkSameCompiler) map ((a,b) => ()),
- autoScalaLibrary := false
- )
- lazy val continuationsTestsuiteSettings: Seq[Setting[_]] = testsuiteSettings ++ Seq(
- scalacOptions in Test <++= (exportedProducts in Compile in continuationsPlugin) map {
- case Seq(cpDir) => Seq("-Xplugin-require:continuations", "-P:continuations:enable", "-Xplugin:"+cpDir.data.getAbsolutePath)
- },
- partestDirs <<= baseDirectory apply { bd =>
- def mkFile(name: String) = bd / "test" / "files" / name
- def mkTestType(name: String) = name.drop("continuations-".length).toString
- Seq("continuations-neg", "continuations-run") map (t => mkTestType(t) -> mkFile(t)) toMap
- }
- )
- val testsuite = (
- Project("testsuite", file("."))
- settings (testsuiteSettings:_*)
- dependsOn (swing, scalaLibrary, scalaCompiler, fjbg, partest, scalacheck)
- )
- val continuationsTestsuite = (
- Project("continuations-testsuite", file("."))
- settings (continuationsTestsuiteSettings:_*)
- dependsOn (partest, swing, scalaLibrary, scalaCompiler, fjbg)
+ unmanagedJars in Compile := Seq(),
+ fullClasspath in Runtime <<= (exportedProducts in Compile).identity,
+ quickScalaInstance,
+ target <<= (baseDirectory, name) apply (_ / "target" / _)
)
+ lazy val scalaCompiler = Project("scala-compiler", file(".")) settings(publishSettings:_*) settings(scalaBinArtifactSettings:_*) dependsOn(scalaReflect)
+ lazy val fullQuickScalaReference = makeScalaReference("pack", scalaLibrary, scalaReflect, scalaCompiler)
+
// --------------------------------------------------------------
// Generating Documentation.
// --------------------------------------------------------------
// TODO - Migrate this into the dist project.
// Scaladocs
- def distScalaInstance = makeScalaReference("dist", scalaLibrary, scalaCompiler)
lazy val documentationSettings: Seq[Setting[_]] = dependentProjectSettings ++ Seq(
// TODO - Make these work for realz.
defaultExcludes in unmanagedSources in Compile := ((".*" - ".") || HiddenFileFilter ||
@@ -371,163 +334,4 @@ object ScalaBuild extends Build with Layers {
settings (documentationSettings: _*)
dependsOn(quickLib, quickComp, actors, fjbg, forkjoin, swing, continuationsLibrary)
)
-
- // --------------------------------------------------------------
- // Packaging a distro
- // --------------------------------------------------------------
-
- class ScalaToolRunner(classpath: Classpath) {
- // TODO - Don't use the ant task directly...
- lazy val classLoader = new java.net.URLClassLoader(classpath.map(_.data.toURI.toURL).toArray, null)
- lazy val mainClass = classLoader.loadClass("scala.tools.ant.ScalaTool")
- lazy val executeMethod = mainClass.getMethod("execute")
- lazy val setFileMethod = mainClass.getMethod("setFile", classOf[java.io.File])
- lazy val setClassMethod = mainClass.getMethod("setClass", classOf[String])
- lazy val setClasspathMethod = mainClass.getMethod("setClassPath", classOf[String])
- lazy val instance = mainClass.newInstance()
-
- def setClass(cls: String): Unit = setClassMethod.invoke(instance, cls)
- def setFile(file: File): Unit = setFileMethod.invoke(instance, file)
- def setClasspath(cp: String): Unit = setClasspathMethod.invoke(instance, cp)
- def execute(): Unit = executeMethod.invoke(instance)
- }
-
- def genBinTask(
- runner: ScopedTask[ScalaToolRunner],
- outputDir: ScopedSetting[File],
- classpath: ScopedTask[Classpath],
- useClasspath: Boolean
- ): Project.Initialize[sbt.Task[Map[File,String]]] = {
- (runner, outputDir, classpath, streams) map { (runner, outDir, cp, s) =>
- IO.createDirectory(outDir)
- val classToFilename = Map(
- "scala.tools.nsc.MainGenericRunner" -> "scala",
- "scala.tools.nsc.Main" -> "scalac",
- "scala.tools.nsc.ScalaDoc" -> "scaladoc",
- "scala.tools.nsc.CompileClient" -> "fsc",
- "scala.tools.scalap.Main" -> "scalap"
- )
- if (useClasspath) {
- val classpath = Build.data(cp).map(_.getCanonicalPath).distinct.mkString(",")
- s.log.debug("Setting classpath = " + classpath)
- runner setClasspath classpath
- }
- def genBinFiles(cls: String, dest: File) = {
- runner.setClass(cls)
- runner.setFile(dest)
- runner.execute()
- // TODO - Mark generated files as executable (755 or a+x) that is *not* JDK6 specific...
- dest.setExecutable(true)
- }
- def makeBinMappings(cls: String, binName: String): Map[File,String] = {
- val file = outDir / binName
- val winBinName = binName + ".bat"
- genBinFiles(cls, file)
- Map( file -> ("bin/"+binName), outDir / winBinName -> ("bin/"+winBinName) )
- }
- classToFilename.flatMap((makeBinMappings _).tupled).toMap
- }
- }
- def runManmakerTask(classpath: ScopedTask[Classpath], scalaRun: ScopedTask[ScalaRun], mainClass: String, dir: String, ext: String): Project.Initialize[Task[Map[File,String]]] =
- (classpath, scalaRun, streams, target) map { (cp, runner, s, target) =>
- val binaries = Seq("fsc", "scala", "scalac", "scaladoc", "scalap")
- binaries map { bin =>
- val file = target / "man" / dir / (bin + ext)
- val classname = "scala.man1." + bin
- IO.createDirectory(file.getParentFile)
- toError(runner.run(mainClass, Build.data(cp), Seq(classname, file.getAbsolutePath), s.log))
- file -> ("man/" + dir + "/" + bin + ext)
- } toMap
- }
-
- val genBinRunner = TaskKey[ScalaToolRunner]("gen-bin-runner",
- "Creates a utility to generate script files for Scala.")
- val genBin = TaskKey[Map[File,String]]("gen-bin",
- "Creates script files for Scala distribution.")
- val binDir = SettingKey[File]("binaries-directory",
- "Directory where binary scripts will be located.")
- val genBinQuick = TaskKey[Map[File,String]]("gen-quick-bin",
- "Creates script files for testing against current Scala build classfiles (not local dist).")
- val runManmakerMan = TaskKey[Map[File,String]]("make-man",
- "Runs the man maker project to generate man pages")
- val runManmakerHtml = TaskKey[Map[File,String]]("make-html",
- "Runs the man maker project to generate html pages")
-
- lazy val scalaDistSettings: Seq[Setting[_]] = Seq(
- crossPaths := false,
- target <<= (baseDirectory, name) apply (_ / "target" / _),
- scalaSource in Compile <<= (baseDirectory, name) apply (_ / "src" / _),
- autoScalaLibrary := false,
- unmanagedJars in Compile := Seq(),
- genBinRunner <<= (fullClasspath in quickComp in Runtime) map (new ScalaToolRunner(_)),
- binDir <<= target(_/"bin"),
- genBin <<= genBinTask(genBinRunner, binDir, fullClasspath in Runtime, false),
- binDir in genBinQuick <<= baseDirectory apply (_ / "target" / "bin"),
- // Configure the classpath this way to avoid having .jar files and previous layers on the classpath.
- fullClasspath in Runtime in genBinQuick <<= Seq(quickComp,quickLib,scalap,actors,swing,fjbg,jline,forkjoin).map(classDirectory in Compile in _).join.map(Attributed.blankSeq),
- fullClasspath in Runtime in genBinQuick <++= (fullClasspath in Compile in jline),
- genBinQuick <<= genBinTask(genBinRunner, binDir in genBinQuick, fullClasspath in Runtime in genBinQuick, true),
- runManmakerMan <<= runManmakerTask(fullClasspath in Runtime in manmaker, runner in manmaker, "scala.tools.docutil.EmitManPage", "man1", ".1"),
- runManmakerHtml <<= runManmakerTask(fullClasspath in Runtime in manmaker, runner in manmaker, "scala.tools.docutil.EmitHtml", "doc", ".html"),
- // TODO - We could *really* clean this up in many ways. Let's look into making a a Seq of "direct jars" (scalaLibrary, scalaCompiler, jline, scalap)
- // a seq of "plugin jars" (continuationsPlugin) and "binaries" (genBin) and "documentation" mappings (genBin) that this can aggregate.
- // really need to figure out a better way to pull jline + jansi.
- makeDistMappings <<= (genBin,
- runManmakerMan,
- runManmakerHtml,
- packageBin in scalaLibrary in Compile,
- packageBin in scalaCompiler in Compile,
- packageBin in jline in Compile,
- packageBin in continuationsPlugin in Compile,
- managedClasspath in jline in Compile,
- packageBin in scalap in Compile) map {
- (binaries, man, html, lib, comp, jline, continuations, jlineDeps, scalap) =>
- val jlineDepMap: Seq[(File, String)] = jlineDeps.map(_.data).flatMap(_ x Path.flat) map { case(a,b) => a -> ("lib/"+b) }
- binaries ++ man ++ html ++ jlineDepMap ++ Seq(
- lib -> "lib/scala-library.jar",
- comp -> "lib/scala-compiler.jar",
- jline -> "lib/jline.jar",
- continuations -> "misc/scala-devel/plugins/continuations.jar",
- scalap -> "lib/scalap.jar"
- ) toMap
- },
- // Add in some more dependencies
- makeDistMappings <<= (makeDistMappings,
- packageBin in swing in Compile) map {
- (dist, s) =>
- dist ++ Seq(s -> "lib/scala-swing.jar")
- },
- makeDist <<= (makeDistMappings, baseDirectory, streams) map { (maps, dir, s) =>
- s.log.debug("Map = " + maps.mkString("\n"))
- val file = dir / "target" / "scala-dist.zip"
- IO.zip(maps, file)
- s.log.info("Created " + file.getAbsolutePath)
- file
- },
- makeExplodedDist <<= (makeDistMappings, target, streams) map { (maps, dir, s) =>
- def sameFile(f: File, f2: File) = f.getCanonicalPath == f2.getCanonicalPath
- IO.createDirectory(dir)
- IO.copy(for {
- (file, name) <- maps
- val file2 = dir / name
- if !sameFile(file,file2)
- } yield (file, file2))
- // Hack to make binaries be executable. TODO - Fix for JDK 5 and below...
- maps.values filter (_ startsWith "bin/") foreach (dir / _ setExecutable true)
- dir
- }
- )
- lazy val scaladist = (
- Project("dist", file("."))
- settings (scalaDistSettings: _*)
- )
-}
-
-/** Matcher to make updated remote project references easier. */
-object ScopedExternalSetting {
- def unapply[T](s: Setting[_]): Option[(URI, AttributeKey[_], Setting[_])] =
- s.key.scope.project match {
- case Select(p @ ProjectRef(uri, _)) => Some((uri, s.key.key, s))
- case _ => None
- }
}
diff --git a/project/Layers.scala b/project/Layers.scala
index 6d0c68f2a4..35cc79c130 100644
--- a/project/Layers.scala
+++ b/project/Layers.scala
@@ -25,15 +25,16 @@ trait Layers extends Build {
/** Creates a reference Scala version that can be used to build other projects. This takes in the raw
* library, compiler and fjbg libraries as well as a string representing the layer name (used for compiling the compile-interface).
*/
- def makeScalaReference(layer: String, library: Project, compiler: Project) =
+ def makeScalaReference(layer: String, library: Project, reflect: Project, compiler: Project) =
scalaInstance <<= (appConfiguration in library,
version in library,
(exportedProducts in library in Compile),
+ (exportedProducts in reflect in Compile),
(exportedProducts in compiler in Compile),
(exportedProducts in fjbg in Compile),
(fullClasspath in jline in Runtime),
(exportedProducts in asm in Runtime)) map {
- (app, version: String, lib: Classpath, comp: Classpath, fjbg: Classpath, jline: Classpath, asm: Classpath) =>
+ (app, version: String, lib: Classpath, reflect: Classpath, comp: Classpath, fjbg: Classpath, jline: Classpath, asm: Classpath) =>
val launcher = app.provider.scalaProvider.launcher
(lib,comp) match {
case (Seq(libraryJar), Seq(compilerJar)) =>
@@ -42,7 +43,7 @@ trait Layers extends Build {
libraryJar.data,
compilerJar.data,
launcher,
- ((fjbg.files++jline.files ++ asm.files):_*))
+ ((fjbg.files ++ jline.files ++ asm.files ++ reflect.files):_*))
case _ => error("Cannot build a ScalaReference with more than one classpath element")
}
}
@@ -51,7 +52,7 @@ trait Layers extends Build {
* Returns the library project and compiler project from the next layer.
* Note: The library and compiler are not *complete* in the sense that they are missing things like "actors" and "fjbg".
*/
- def makeLayer(layer: String, referenceScala: Setting[Task[ScalaInstance]], autoLock: Boolean = false) : (Project, Project) = {
+ def makeLayer(layer: String, referenceScala: Setting[Task[ScalaInstance]], autoLock: Boolean = false) : (Project, Project, Project) = {
val autoLockSettings: Seq[Setting[_]] =
if(autoLock) Seq(compile in Compile <<= (compile in Compile, lock) apply { (c, l) =>
c flatMapR { cResult =>
@@ -76,6 +77,20 @@ trait Layers extends Build {
referenceScala
)
+ // Define the reflection
+ val reflect = Project(layer + "-reflect", file(".")) settings(settingOverrides:_*) settings(autoLockSettings:_*) settings(
+ version := layer,
+ scalaSource in Compile <<= (baseDirectory) apply (_ / "src" / "reflect"),
+ resourceDirectory in Compile <<= baseDirectory apply (_ / "src" / "reflect"),
+ defaultExcludes := ("tests"),
+ defaultExcludes in unmanagedResources := "*.scala",
+ resourceGenerators in Compile <+= (resourceManaged, Versions.scalaVersions, skip in Compile, streams) map Versions.generateVersionPropertiesFile("reflect.properties"),
+ // TODO - Use depends on *and* SBT's magic dependency mechanisms...
+ unmanagedClasspath in Compile <<= Seq(forkjoin, library).map(exportedProducts in Compile in _).join.map(_.flatten),
+ externalDeps,
+ referenceScala
+ )
+
// Define the compiler
val compiler = Project(layer + "-compiler", file(".")) settings(settingOverrides:_*) settings(autoLockSettings:_*) settings(
version := layer,
@@ -93,13 +108,13 @@ trait Layers extends Build {
dirs.descendentsExcept( ("*.xml" | "*.html" | "*.gif" | "*.png" | "*.js" | "*.css" | "*.tmpl" | "*.swf" | "*.properties" | "*.txt"),"*.scala").get
},
// TODO - Use depends on *and* SBT's magic dependency mechanisms...
- unmanagedClasspath in Compile <<= Seq(forkjoin, library, fjbg, jline, asm).map(exportedProducts in Compile in _).join.map(_.flatten),
+ unmanagedClasspath in Compile <<= Seq(forkjoin, library, reflect, fjbg, jline, asm).map(exportedProducts in Compile in _).join.map(_.flatten),
externalDeps,
referenceScala
)
// Return the generated projects.
- (library, compiler)
+ (library, reflect, compiler)
}
}
diff --git a/project/Packaging.scala b/project/Packaging.scala
new file mode 100644
index 0000000000..eb4e69f99e
--- /dev/null
+++ b/project/Packaging.scala
@@ -0,0 +1,129 @@
+import sbt._
+import Keys._
+import ScalaBuildKeys._
+
+/** All the settings related to *packaging* the built scala software. */
+trait Packaging { self: ScalaBuild.type =>
+
+ // --------------------------------------------------------------
+ // Packaging a distro
+ // --------------------------------------------------------------
+ lazy val scalaDistSettings: Seq[Setting[_]] = Seq(
+ crossPaths := false,
+ target <<= (baseDirectory, name) apply (_ / "target" / _),
+ scalaSource in Compile <<= (baseDirectory, name) apply (_ / "src" / _),
+ autoScalaLibrary := false,
+ unmanagedJars in Compile := Seq(),
+ genBinRunner <<= (fullClasspath in quickComp in Runtime) map (new ScalaToolRunner(_)),
+ binDir <<= target(_/"bin"),
+ genBin <<= genBinTask(genBinRunner, binDir, fullClasspath in Runtime, false),
+ binDir in genBinQuick <<= baseDirectory apply (_ / "target" / "bin"),
+ // Configure the classpath this way to avoid having .jar files and previous layers on the classpath.
+ fullClasspath in Runtime in genBinQuick <<= Seq(quickComp,quickLib,scalap,actors,swing,fjbg,jline,forkjoin).map(classDirectory in Compile in _).join.map(Attributed.blankSeq),
+ fullClasspath in Runtime in genBinQuick <++= (fullClasspath in Compile in jline),
+ genBinQuick <<= genBinTask(genBinRunner, binDir in genBinQuick, fullClasspath in Runtime in genBinQuick, true),
+ runManmakerMan <<= runManmakerTask(fullClasspath in Runtime in manmaker, runner in manmaker, "scala.tools.docutil.EmitManPage", "man1", ".1"),
+ runManmakerHtml <<= runManmakerTask(fullClasspath in Runtime in manmaker, runner in manmaker, "scala.tools.docutil.EmitHtml", "doc", ".html"),
+ // TODO - We could *really* clean this up in many ways. Let's look into making a a Seq of "direct jars" (scalaLibrary, scalaCompiler, jline, scalap)
+ // a seq of "plugin jars" (continuationsPlugin) and "binaries" (genBin) and "documentation" mappings (genBin) that this can aggregate.
+ // really need to figure out a better way to pull jline + jansi.
+ makeDistMappings <<= (genBin,
+ runManmakerMan,
+ runManmakerHtml,
+ packageBin in scalaLibrary in Compile,
+ packageBin in scalaCompiler in Compile,
+ packageBin in jline in Compile,
+ packageBin in continuationsPlugin in Compile,
+ managedClasspath in jline in Compile,
+ packageBin in scalap in Compile) map {
+ (binaries, man, html, lib, comp, jline, continuations, jlineDeps, scalap) =>
+ val jlineDepMap: Seq[(File, String)] = jlineDeps.map(_.data).flatMap(_ x Path.flat) map { case(a,b) => a -> ("lib/"+b) }
+ binaries ++ man ++ html ++ jlineDepMap ++ Seq(
+ lib -> "lib/scala-library.jar",
+ comp -> "lib/scala-compiler.jar",
+ jline -> "lib/jline.jar",
+ continuations -> "misc/scala-devel/plugins/continuations.jar",
+ scalap -> "lib/scalap.jar"
+ )
+ },
+ // Add in some more dependencies
+ makeDistMappings <+= (packageBin in swing in Compile) map (s => s -> "lib/scala-swing.jar"),
+ makeDistMappings <+= (packageBin in scalaReflect in Compile) map (s => s -> "lib/scala-reflect.jar"),
+ makeDist <<= (makeDistMappings, baseDirectory, streams) map { (maps, dir, s) =>
+ s.log.debug("Map = " + maps.mkString("\n"))
+ val file = dir / "target" / "scala-dist.zip"
+ IO.zip(maps, file)
+ s.log.info("Created " + file.getAbsolutePath)
+ file
+ },
+ makeExplodedDist <<= (makeDistMappings, target, streams) map { (maps, dir, s) =>
+ def sameFile(f: File, f2: File) = f.getCanonicalPath == f2.getCanonicalPath
+ IO.createDirectory(dir)
+ IO.copy(for {
+ (file, name) <- maps
+ val file2 = dir / name
+ if !sameFile(file,file2)
+ } yield (file, file2))
+ // Hack to make binaries be executable. TODO - Fix for JDK 5 and below...
+ maps map (_._2) filter (_ startsWith "bin/") foreach (dir / _ setExecutable true)
+ dir
+ }
+ )
+ lazy val scaladist = (
+ Project("dist", file("."))
+ settings (scalaDistSettings: _*)
+ )
+
+
+// Helpers to make a distribution
+
+ /** Generates runner scripts for distribution. */
+ def genBinTask(
+ runner: ScopedTask[ScalaToolRunner],
+ outputDir: ScopedSetting[File],
+ classpath: ScopedTask[Classpath],
+ useClasspath: Boolean
+ ): Project.Initialize[sbt.Task[Seq[(File,String)]]] = {
+ (runner, outputDir, classpath, streams) map { (runner, outDir, cp, s) =>
+ IO.createDirectory(outDir)
+ val classToFilename = Seq(
+ "scala.tools.nsc.MainGenericRunner" -> "scala",
+ "scala.tools.nsc.Main" -> "scalac",
+ "scala.tools.nsc.ScalaDoc" -> "scaladoc",
+ "scala.tools.nsc.CompileClient" -> "fsc",
+ "scala.tools.scalap.Main" -> "scalap"
+ )
+ if (useClasspath) {
+ val classpath = Build.data(cp).map(_.getCanonicalPath).distinct.mkString(",")
+ s.log.debug("Setting classpath = " + classpath)
+ runner setClasspath classpath
+ }
+ def genBinFiles(cls: String, dest: File) = {
+ runner.setClass(cls)
+ runner.setFile(dest)
+ runner.execute()
+ // TODO - Mark generated files as executable (755 or a+x) that is *not* JDK6 specific...
+ dest.setExecutable(true)
+ }
+ def makeBinMappings(cls: String, binName: String): Seq[(File,String)] = {
+ val file = outDir / binName
+ val winBinName = binName + ".bat"
+ genBinFiles(cls, file)
+ Seq( file -> ("bin/"+binName), outDir / winBinName -> ("bin/"+winBinName) )
+ }
+ classToFilename.flatMap((makeBinMappings _).tupled)
+ }
+ }
+ /** Creates man pages for distribution. */
+ def runManmakerTask(classpath: ScopedTask[Classpath], scalaRun: ScopedTask[ScalaRun], mainClass: String, dir: String, ext: String): Project.Initialize[Task[Seq[(File,String)]]] =
+ (classpath, scalaRun, streams, target) map { (cp, runner, s, target) =>
+ val binaries = Seq("fsc", "scala", "scalac", "scaladoc", "scalap")
+ binaries map { bin =>
+ val file = target / "man" / dir / (bin + ext)
+ val classname = "scala.man1." + bin
+ IO.createDirectory(file.getParentFile)
+ toError(runner.run(mainClass, Build.data(cp), Seq(classname, file.getAbsolutePath), s.log))
+ file -> ("man/" + dir + "/" + bin + ext)
+ }
+ }
+}
diff --git a/project/Release.scala b/project/Release.scala
index 1a17956c13..feab8bdc8c 100644
--- a/project/Release.scala
+++ b/project/Release.scala
@@ -5,11 +5,26 @@ object Release {
// TODO - Just make the STARR artifacts and dump the sha1 files.
+ val starrLibs = Seq("scala-library.jar", "scala-reflect.jar", "scala-compiler.jar", "jline.jar")
- lazy val pushStarr = Command.command("push-starr") { (state: State) =>
- // TODO do something
- // Revert to previous project state.
- state
- }
-
+ val pushStarr = Command.command("new-starr") { (state: State) =>
+ /*val extracted = Project.extract(state)
+ import extracted._
+ // First run tests
+ val (s1, result) = runTask(test in Test, state)
+ // If successful, package artifacts
+ val (s2, distDir) = runTask(makeExplodedDist, s1)
+ // Then copy new libs in place
+ val bd = extracted get baseDirectory
+ for {
+ jarName <- starrLibs
+ jar = distDir / "lib" / jarName
+ if jar.exists
+ } IO.copyFile(jar, bd / "lib" / jarName)
+ // Invalidate SHA1 files.
+ ShaResolve.removeInvalidShaFiles(bd)
+ // Now run tests *again*?
+ s2*/
+ state
+ }
}
diff --git a/project/RemoteDependencies.scala b/project/RemoteDependencies.scala
new file mode 100644
index 0000000000..705b9dc402
--- /dev/null
+++ b/project/RemoteDependencies.scala
@@ -0,0 +1,53 @@
+import sbt._
+import Keys._
+import ScalaBuildKeys._
+
+
+object RemoteDependencies {
+ def buildSettings(externalProjects: Set[URI], localScala: Setting[_]): Seq[Setting[_]] = Seq(
+ commands += Command.command("fix-uri-projects") { (state: State) =>
+ if(state.get(buildFixed) getOrElse false) state
+ else {
+ // TODO -fix up scalacheck's dependencies!
+ val extracted = Project.extract(state)
+ import extracted._
+ val scalaVersionString = extracted get version
+
+ def fix(s: Setting[_]): Setting[_] = s match {
+ case ScopedExternalSetting(p, scalaInstance.key, setting) if externalProjects(p) => localScala mapKey Project.mapScope(_ => s.key.scope)
+ // TODO - Fix Actors dependency...
+ //case ScopedExternalSetting(p, libraryDependencies.key, setting) if externalProjects(p) => fixProjectDeps(s)
+ case s => s
+ }
+ val transformed = session.mergeSettings map ( s => fix(s) )
+ val scopes = transformed collect { case ScopedExternalSetting(p, _, s) if externalProjects(p) => s.key.scope } toSet
+ // Create some fixers so we don't download scala or rely on it.
+ // Also add dependencies that disappear in 2.10 for now...
+ val fixers = for { scope <- scopes
+ setting <- Seq(autoScalaLibrary := false,
+ crossPaths := false,
+ scalaVersion := scalaVersionString)
+ } yield setting mapKey Project.mapScope(_ => scope)
+ val newStructure = Load.reapply(transformed ++ fixers, structure)
+ Project.setProject(session, newStructure, state).put(buildFixed, true)
+ }
+ },
+ onLoad in Global <<= (onLoad in Global) apply (_ andThen { (state: State) =>
+ "fix-uri-projects" :: state
+ })
+ )
+}
+
+
+
+/** Matcher to make updated remote project references easier. */
+object ScopedExternalSetting {
+ def unapply[T](s: Setting[_]): Option[(URI, AttributeKey[_], Setting[_])] =
+ s.key.scope.project match {
+ case Select(p @ ProjectRef(uri, _)) => Some((uri, s.key.key, s))
+ case _ => None
+ }
+}
+
+
+
diff --git a/project/Sametest.scala b/project/Sametest.scala
index f44fe8ec65..6f12eb24b3 100644
--- a/project/Sametest.scala
+++ b/project/Sametest.scala
@@ -5,9 +5,6 @@ import Keys._
// This code is adapted from scala.tools.ant.Same by Gilles Dubochet.
object SameTest {
- lazy val checkSame: TaskKey[Unit] = TaskKey("check-same-binaries", "checks whether or not the class files generated by scala are the same.")
- lazy val checkSameLibrary: TaskKey[Unit] = TaskKey("check-same-lib-binaries", "checks whether or not the librayr class files generated by scala are the same.")
- lazy val checkSameCompiler: TaskKey[Unit] = TaskKey("check-same-comp-binaries", "checks whether or not the compiler class files generated by scala are the same.")
def checkSameBinaryProjects(lhs: Project, rhs: Project): Project.Initialize[Task[Unit]] =
(classDirectory in Compile in lhs, classDirectory in Compile in rhs,
diff --git a/project/ScalaBuildKeys.scala b/project/ScalaBuildKeys.scala
new file mode 100644
index 0000000000..9e495de19f
--- /dev/null
+++ b/project/ScalaBuildKeys.scala
@@ -0,0 +1,23 @@
+import sbt._
+import Keys._
+
+object ScalaBuildKeys {
+ val lockerLock = TaskKey[Unit]("locker-lock", "Locks the locker layer of the compiler build such that it won't rebuild on changed source files.")
+ val lockerUnlock = TaskKey[Unit]("locker-unlock", "Unlocks the locker layer of the compiler so that it will be recompiled on changed source files.")
+ val lockFile = SettingKey[File]("lock-file", "Location of the lock file compiling this project.")
+ val lock = TaskKey[Unit]("lock", "Locks this project so it won't be recompiled.")
+ val unlock = TaskKey[Unit]("unlock", "Unlocks this project so it will be recompiled.")
+ val makeDist = TaskKey[File]("make-dist", "Creates a mini-distribution (scala home directory) for this build in a zip file.")
+ val makeExplodedDist = TaskKey[File]("make-exploded-dist", "Creates a mini-distribution (scala home directory) for this build in a directory.")
+ val makeDistMappings = TaskKey[Seq[(File, String)]]("make-dist-mappings", "Creates distribution mappings for creating zips,jars,directorys,etc.")
+ val buildFixed = AttributeKey[Boolean]("build-uri-fixed")
+ val genBinRunner = TaskKey[ScalaToolRunner]("gen-bin-runner", "Creates a utility to generate script files for Scala.")
+ val genBin = TaskKey[Seq[(File,String)]]("gen-bin", "Creates script files for Scala distribution.")
+ val binDir = SettingKey[File]("binaries-directory", "Directory where binary scripts will be located.")
+ val genBinQuick = TaskKey[Seq[(File,String)]]("gen-quick-bin", "Creates script files for testing against current Scala build classfiles (not local dist).")
+ val runManmakerMan = TaskKey[Seq[(File,String)]]("make-man", "Runs the man maker project to generate man pages")
+ val runManmakerHtml = TaskKey[Seq[(File,String)]]("make-html", "Runs the man maker project to generate html pages")
+ val checkSame = TaskKey[Unit]("check-same-binaries", "checks whether or not the class files generated by scala are the same.")
+ val checkSameLibrary = TaskKey[Unit]("check-same-lib-binaries", "checks whether or not the librayr class files generated by scala are the same.")
+ val checkSameCompiler = TaskKey[Unit]("check-same-comp-binaries", "checks whether or not the compiler class files generated by scala are the same.")
+}
diff --git a/project/ScalaToolRunner.scala b/project/ScalaToolRunner.scala
new file mode 100644
index 0000000000..d7338a54b3
--- /dev/null
+++ b/project/ScalaToolRunner.scala
@@ -0,0 +1,21 @@
+import sbt._
+import Keys._
+
+/** Reflection helper that runs ScalaTool.
+ * TODO - When SBT is on 2.10.x try to use Dynamic + Reflection. COULD BE FUN.
+ */
+class ScalaToolRunner(classpath: Classpath) {
+ // TODO - Don't use the ant task directly...
+ lazy val classLoader = new java.net.URLClassLoader(classpath.map(_.data.toURI.toURL).toArray, null)
+ lazy val mainClass = classLoader.loadClass("scala.tools.ant.ScalaTool")
+ lazy val executeMethod = mainClass.getMethod("execute")
+ lazy val setFileMethod = mainClass.getMethod("setFile", classOf[java.io.File])
+ lazy val setClassMethod = mainClass.getMethod("setClass", classOf[String])
+ lazy val setClasspathMethod = mainClass.getMethod("setClassPath", classOf[String])
+ lazy val instance = mainClass.newInstance()
+
+ def setClass(cls: String): Unit = setClassMethod.invoke(instance, cls)
+ def setFile(file: File): Unit = setFileMethod.invoke(instance, file)
+ def setClasspath(cp: String): Unit = setClasspathMethod.invoke(instance, cp)
+ def execute(): Unit = executeMethod.invoke(instance)
+}
diff --git a/project/ShaResolve.scala b/project/ShaResolve.scala
index e6824ee464..cea2b2d6cc 100644
--- a/project/ShaResolve.scala
+++ b/project/ShaResolve.scala
@@ -7,6 +7,7 @@ import scala.collection.{ mutable, immutable }
import scala.collection.parallel.CompositeThrowable
import java.security.MessageDigest
+case class Credentials(user: String, pw: String)
/** Helpers to resolve SHA artifacts from typesafe repo. */
object ShaResolve {
@@ -19,7 +20,8 @@ object ShaResolve {
def settings: Seq[Setting[_]] = Seq(
binaryLibCache in ThisBuild := file(System.getProperty("user.home")) / ".sbt" / "cache" / "scala",
- pullBinaryLibs in ThisBuild <<= (baseDirectory, binaryLibCache, streams) map resolveLibs
+ pullBinaryLibs in ThisBuild <<= (baseDirectory, binaryLibCache, streams) map resolveLibs,
+ pushBinaryLibs in ThisBuild <<= (baseDirectory, streams) map getCredentialsAndPushFiles
)
def resolveLibs(dir: File, cacheDir: File, s: TaskStreams): Unit = loggingParallelExceptions(s) {
@@ -33,6 +35,34 @@ object ShaResolve {
} pullFile(jar, sha + "/" + uri, cacheDir, sha, s)
}
+ /** This method removes all SHA1 files that don't match their corresponding JAR. */
+ def removeInvalidShaFiles(dir: File): Unit = {
+ val files = (dir / "test" / "files" ** "*.desired.sha1") +++ (dir / "lib" ** "*.desired.sha1")
+ for {
+ (file, name) <- (files x relativeTo(dir)).par
+ uri = name.dropRight(13).replace('\\', '/')
+ jar = dir / uri
+ if !jar.exists || !isValidSha(file)
+ } IO.delete(jar)
+ }
+ def getCredentials: Credentials = System.out.synchronized {
+ val user = (SimpleReader.readLine("Please enter your STARR username> ") getOrElse error("No username provided."))
+ val password = (SimpleReader.readLine("Please enter your STARR password> ", Some('*')) getOrElse error("No password provided."))
+ Credentials(user, password)
+ }
+
+ def getCredentialsAndPushFiles(dir: File, s: TaskStreams): Unit =
+ pushFiles(dir, getCredentials, s)
+
+ def pushFiles(dir: File, cred: Credentials, s: TaskStreams): Unit = loggingParallelExceptions(s) {
+ val files = (dir / "test" / "files" ** "*.jar") +++ (dir / "lib" ** "*.jar")
+ for {
+ (jar, name) <- (files x relativeTo(dir)).par
+ shafile = dir / (name + ".desired.sha1")
+ if !shafile.exists || !isValidSha(shafile)
+ } pushFile(jar, name, cred, s)
+ }
+
@inline final def loggingParallelExceptions[U](s: TaskStreams)(f: => U): U = try f catch {
case t: CompositeThrowable =>
s.log.error("Error during parallel execution, GET READY FOR STACK TRACES!!")
@@ -60,24 +90,17 @@ object ShaResolve {
sha
}
- // TODO - Prettier way of doing this...
- private def convertToHex(data: Array[Byte]): String = {
+ def convertToHex(data: Array[Byte]): String = {
+ def byteToHex(b: Int) =
+ if ((0 <= b) && (b <= 9)) ('0' + b).toChar
+ else ('a' + (b-10)).toChar
val buf = new StringBuffer
- for (i <- 0 until data.length) {
- var halfbyte = (data(i) >>> 4) & 0x0F;
- var two_halfs = 0;
- while(two_halfs < 2) {
- if ((0 <= halfbyte) && (halfbyte <= 9))
- buf.append(('0' + halfbyte).toChar)
- else
- buf.append(('a' + (halfbyte - 10)).toChar);
- halfbyte = data(i) & 0x0F;
- two_halfs += 1
- }
- }
- return buf.toString
- }
-
+ for (i <- 0 until data.length) {
+ buf append byteToHex((data(i) >>> 4) & 0x0F)
+ buf append byteToHex(data(i) & 0x0F)
+ }
+ buf.toString
+ }
// Parses a sha file into a file and a sha.
def parseShaFile(file: File): (File, String) =
IO.read(file).split("\\s") match {
@@ -110,10 +133,15 @@ object ShaResolve {
IO.copyFile(cachedFile, file)
}
- def pushFile(file: File, uri: String, user: String, pw: String): Unit = {
- val url = remote_urlbase + "/" + uri
- val sender = dispatch.url(url).PUT.as(user,pw) <<< (file, "application/java-archive")
+ // Pushes a file and writes the new .desired.sha1 for git.
+ def pushFile(file: File, uri: String, cred: Credentials, s: TaskStreams): Unit = {
+ val sha = calculateSha(file)
+ val url = remote_urlbase + "/" + sha + "/" + uri
+ val sender = dispatch.url(url).PUT.as(cred.user,cred.pw) <<< (file, "application/java-archive")
// TODO - output to logger.
Http(sender >>> System.out)
+ val shafile = file.getParentFile / (file.getName + ".desired.sha1")
+ IO.touch(shafile)
+ IO.write(shafile, sha + " ?" + file.getName)
}
}
diff --git a/project/Testing.scala b/project/Testing.scala
new file mode 100644
index 0000000000..cec1d8c60b
--- /dev/null
+++ b/project/Testing.scala
@@ -0,0 +1,41 @@
+import sbt._
+import Keys._
+import partest._
+import SameTest._
+import ScalaBuildKeys._
+
+/** All settings/projects relating to testing. */
+trait Testing { self: ScalaBuild.type =>
+
+ lazy val testsuiteSettings: Seq[Setting[_]] = compilerDependentProjectSettings ++ partestTaskSettings ++ VerifyClassLoad.settings ++ Seq(
+ unmanagedBase <<= baseDirectory / "test/files/lib",
+ fullClasspath in VerifyClassLoad.checkClassLoad <<= (fullClasspath in scalaLibrary in Runtime).identity,
+ autoScalaLibrary := false,
+ checkSameLibrary <<= checkSameBinaryProjects(quickLib, strappLib),
+ checkSameCompiler <<= checkSameBinaryProjects(quickComp, strappComp),
+ checkSame <<= (checkSameLibrary, checkSameCompiler) map ((a,b) => ()),
+ autoScalaLibrary := false
+ )
+ lazy val continuationsTestsuiteSettings: Seq[Setting[_]] = testsuiteSettings ++ Seq(
+ scalacOptions in Test <++= (exportedProducts in Compile in continuationsPlugin) map {
+ case Seq(cpDir) => Seq("-Xplugin-require:continuations", "-P:continuations:enable", "-Xplugin:"+cpDir.data.getAbsolutePath)
+ },
+ partestDirs <<= baseDirectory apply { bd =>
+ def mkFile(name: String) = bd / "test" / "files" / name
+ def mkTestType(name: String) = name.drop("continuations-".length).toString
+ Seq("continuations-neg", "continuations-run") map (t => mkTestType(t) -> mkFile(t)) toMap
+ }
+ )
+ val testsuite = (
+ Project("testsuite", file("."))
+ settings (testsuiteSettings:_*)
+ dependsOn (scalaLibrary, scalaCompiler, fjbg, partest, scalacheck, actorsMigration)
+ )
+ val continuationsTestsuite = (
+ Project("continuations-testsuite", file("."))
+ settings (continuationsTestsuiteSettings:_*)
+ dependsOn (partest, scalaLibrary, scalaCompiler, fjbg, actorsMigration)
+ )
+
+}
+
diff --git a/src/compiler/scala/reflect/reify/Errors.scala b/src/compiler/scala/reflect/reify/Errors.scala
index 714795503b..1b72b3075b 100644
--- a/src/compiler/scala/reflect/reify/Errors.scala
+++ b/src/compiler/scala/reflect/reify/Errors.scala
@@ -17,11 +17,6 @@ trait Errors {
// expected errors: these can happen if the user casually writes whatever.reify(...)
// hence we don't crash here, but nicely report a typechecking error and bail out asap
- def CannotReifyReifeeThatHasTypeLocalToReifee(tree: Tree) = {
- val msg = "implementation restriction: cannot reify block of type %s that involves a type declared inside the block being reified. consider casting the return value to a suitable type".format(tree.tpe)
- throw new ReificationError(tree.pos, msg)
- }
-
def CannotReifyType(tpe: Type) = {
val msg = "implementation restriction: cannot reify type %s (%s)".format(tpe, tpe.kind)
throw new ReificationError(defaultErrorPosition, msg)
diff --git a/src/compiler/scala/reflect/reify/Reifier.scala b/src/compiler/scala/reflect/reify/Reifier.scala
index 00f25f0d8b..8fba7274be 100644
--- a/src/compiler/scala/reflect/reify/Reifier.scala
+++ b/src/compiler/scala/reflect/reify/Reifier.scala
@@ -68,20 +68,6 @@ abstract class Reifier extends States
val pipeline = mkReificationPipeline
val rtree = pipeline(tree)
- // consider the following code snippet
- //
- // val x = reify { class C; new C }
- //
- // inferred type for x will be C
- // but C ceases to exist after reification so this type is clearly incorrect
- // however, reify is "just" a library function, so it cannot affect type inference
- //
- // hence we crash here even though the reification itself goes well
- // fortunately, all that it takes to fix the error is to cast "new C" to Object
- // so I'm not very much worried about introducing this restriction
- if (tree.tpe exists (sub => sub.typeSymbol.isLocalToReifee))
- CannotReifyReifeeThatHasTypeLocalToReifee(tree)
-
val tpe = typer.packedType(tree, NoSymbol)
val ReifiedType(_, _, tpeSymtab, _, rtpe, tpeReificationIsConcrete) = `package`.reifyType(global)(typer, universe, mirror, tpe, concrete = false)
state.reificationIsConcrete &= tpeReificationIsConcrete
diff --git a/src/compiler/scala/reflect/reify/utils/NodePrinters.scala b/src/compiler/scala/reflect/reify/utils/NodePrinters.scala
index ce0ab2196a..7214da597e 100644
--- a/src/compiler/scala/reflect/reify/utils/NodePrinters.scala
+++ b/src/compiler/scala/reflect/reify/utils/NodePrinters.scala
@@ -15,41 +15,6 @@ trait NodePrinters {
import Flag._
object reifiedNodeToString extends (Tree => String) {
- // [Eugene++ to Martin] can we do better?
- // didn't want to invent anything myself in order not to interfere with your line of thought
- def bitsToFlags(bits: String): String = {
- val flags = bits.toLong
- if (flags == NoFlags) nme.NoFlags.toString
- else {
- val s_flags = new collection.mutable.ListBuffer[String]
- if (flags containsAll TRAIT) s_flags += "TRAIT"
- if (flags containsAll MODULE) s_flags += "MODULE"
- if (flags containsAll MUTABLE) s_flags += "MUTABLE"
- if (flags containsAll PACKAGE) s_flags += "PACKAGE"
- if (flags containsAll METHOD) s_flags += "METHOD"
- if (flags containsAll DEFERRED) s_flags += "DEFERRED"
- if (flags containsAll ABSTRACT) s_flags += "ABSTRACT"
- if (flags containsAll FINAL) s_flags += "FINAL"
- if (flags containsAll SEALED) s_flags += "SEALED"
- if (flags containsAll IMPLICIT) s_flags += "IMPLICIT"
- if (flags containsAll LAZY) s_flags += "LAZY"
- if (flags containsAll OVERRIDE) s_flags += "OVERRIDE"
- if (flags containsAll PRIVATE) s_flags += "PRIVATE"
- if (flags containsAll PROTECTED) s_flags += "PROTECTED"
- if (flags containsAll CASE) s_flags += "CASE"
- if (flags containsAll ABSOVERRIDE) s_flags += "ABSOVERRIDE"
- if (flags containsAll BYNAMEPARAM) s_flags += "BYNAMEPARAM"
- if (flags containsAll PARAM) s_flags += "PARAM"
- if (flags containsAll PARAMACCESSOR) s_flags += "PARAMACCESSOR"
- if (flags containsAll CASEACCESSOR) s_flags += "CASEACCESSOR"
- if (flags containsAll COVARIANT) s_flags += "COVARIANT"
- if (flags containsAll CONTRAVARIANT) s_flags += "CONTRAVARIANT"
- if (flags containsAll DEFAULTPARAM) s_flags += "DEFAULTPARAM"
- if (flags containsAll INTERFACE) s_flags += "INTERFACE"
- s_flags mkString " | "
- }
- }
-
def apply(tree: Tree): String = {
var mirrorIsUsed = false
var flagsAreUsed = false
@@ -70,7 +35,7 @@ trait NodePrinters {
s = s.replace("immutable.this.Nil", "List()")
s = """build\.flagsFromBits\((\d+)[lL]\)""".r.replaceAllIn(s, m => {
flagsAreUsed = true
- bitsToFlags(m.group(1))
+ show(m.group(1).toLong)
})
s = s.replace("Modifiers(0L, newTypeName(\"\"), List())", "Modifiers()")
s = """Modifiers\((\d+)[lL], newTypeName\("(.*?)"\), List\((.*?)\)\)""".r.replaceAllIn(s, m => {
@@ -87,7 +52,7 @@ trait NodePrinters {
val bits = m.group(1)
if (buf.nonEmpty || bits != "0L") {
flagsAreUsed = true
- buf.append(bitsToFlags(bits))
+ buf.append(show(bits.toLong))
}
val replacement = "Modifiers(" + buf.reverse.mkString(", ") + ")"
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index a701e41153..35bf2dd288 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -12,7 +12,7 @@ import scala.tools.util.PathResolver
import scala.collection.{ mutable, immutable }
import io.{ SourceReader, AbstractFile, Path }
import reporters.{ Reporter, ConsoleReporter }
-import util.{ Exceptional, ClassPath, MergedClassPath, Statistics, StatisticsInfo, ScalaClassLoader, returning }
+import util.{ Exceptional, ClassPath, MergedClassPath, StatisticsInfo, ScalaClassLoader, returning }
import scala.reflect.internal.util.{ NoPosition, SourceFile, NoSourceFile, BatchSourceFile, ScriptSourceFile }
import scala.reflect.internal.pickling.{ PickleBuffer, PickleFormat }
import settings.{ AestheticSettings }
@@ -39,7 +39,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
with Plugins
with PhaseAssembly
with Trees
- with TreePrinters
+ with Printers
with DocComments
with Positions { self =>
diff --git a/src/compiler/scala/tools/nsc/ast/NodePrinters.scala b/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
index ba1f3b2e3c..4afd3545b9 100644
--- a/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
+++ b/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
@@ -146,7 +146,7 @@ abstract class NodePrinters {
}
def printModifiers(tree: MemberDef) {
// [Eugene++] there's most likely a bug here (?)
- // see `TreePrinters.printAnnotations` for more information
+ // see `Printers.printAnnotations` for more information
val annots0 = tree.symbol.annotations match {
case Nil => tree.mods.annotations
case xs => xs map annotationInfoToString
diff --git a/src/compiler/scala/tools/nsc/ast/TreePrinters.scala b/src/compiler/scala/tools/nsc/ast/Printers.scala
index 3371353f25..94d0c4f45e 100644
--- a/src/compiler/scala/tools/nsc/ast/TreePrinters.scala
+++ b/src/compiler/scala/tools/nsc/ast/Printers.scala
@@ -10,7 +10,7 @@ import java.io.{ OutputStream, PrintWriter, StringWriter, Writer }
import symtab.Flags._
import symtab.SymbolTable
-trait TreePrinters extends reflect.internal.TreePrinters { this: Global =>
+trait Printers extends reflect.internal.Printers { this: Global =>
import treeInfo.{ IsTrue, IsFalse }
@@ -276,8 +276,8 @@ trait TreePrinters extends reflect.internal.TreePrinters { this: Global =>
}
}
- def asString(t: Tree): String = show(t, newStandardTreePrinter)
- def asCompactString(t: Tree): String = show(t, newCompactTreePrinter)
+ def asString(t: Tree): String = render(t, newStandardTreePrinter, settings.printtypes.value, settings.uniqid.value, settings.Yshowsymkinds.value)
+ def asCompactString(t: Tree): String = render(t, newCompactTreePrinter, settings.printtypes.value, settings.uniqid.value, settings.Yshowsymkinds.value)
def newStandardTreePrinter(writer: PrintWriter): TreePrinter = new TreePrinter(writer)
def newStandardTreePrinter(stream: OutputStream): TreePrinter = newStandardTreePrinter(new PrintWriter(stream))
diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
index e2203e07b3..3797d32d8b 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
@@ -911,7 +911,8 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
buf ++= importDecl()
while (in.token != EOF && in.token != RBRACE) {
while (in.token == SEMI) in.nextToken
- buf ++= typeDecl(modifiers(false))
+ if (in.token != EOF)
+ buf ++= typeDecl(modifiers(false))
}
accept(EOF)
atPos(pos) {
diff --git a/src/compiler/scala/tools/nsc/matching/MatchSupport.scala b/src/compiler/scala/tools/nsc/matching/MatchSupport.scala
index 72e6f32af1..16761144d7 100644
--- a/src/compiler/scala/tools/nsc/matching/MatchSupport.scala
+++ b/src/compiler/scala/tools/nsc/matching/MatchSupport.scala
@@ -7,7 +7,7 @@ package scala.tools.nsc
package matching
import transform.ExplicitOuter
-import ast.{ TreePrinters, Trees }
+import ast.{ Printers, Trees }
import java.io.{ StringWriter, PrintWriter }
import annotation.elidable
import language.postfixOps
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
index 04acba06e8..9b223a13ba 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
@@ -160,7 +160,7 @@ trait ScalaSettings extends AbsScalaSettings
val Ygenjavap = StringSetting ("-Ygen-javap", "dir", "Generate a parallel output directory of .javap files.", "")
val Ydumpclasses = StringSetting ("-Ydump-classes", "dir", "Dump the generated bytecode to .class files (useful for reflective compilation that utilizes in-memory classloaders).", "")
val Ynosqueeze = BooleanSetting ("-Yno-squeeze", "Disable creation of compact code in matching.")
- val Ystatistics = BooleanSetting ("-Ystatistics", "Print compiler statistics.") andThen (util.Statistics.enabled = _)
+ val Ystatistics = BooleanSetting ("-Ystatistics", "Print compiler statistics.") andThen (scala.reflect.internal.util.Statistics.enabled = _)
val stopAfter = PhasesSetting ("-Ystop-after", "Stop after") withAbbreviation ("-stop") // backward compat
val stopBefore = PhasesSetting ("-Ystop-before", "Stop before")
val refinementMethodDispatch
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
index 15edac16d5..0c988ceae4 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
@@ -12,7 +12,7 @@ import scala.tools.nsc.util.{ ClassPath }
import classfile.ClassfileParser
import reflect.internal.Flags._
import reflect.internal.MissingRequirementError
-import util.Statistics._
+import reflect.internal.util.Statistics
import scala.tools.nsc.io.{ AbstractFile, MsilFile }
/** This class ...
@@ -23,6 +23,7 @@ import scala.tools.nsc.io.{ AbstractFile, MsilFile }
abstract class SymbolLoaders {
val global: Global
import global._
+ import SymbolLoadersStats._
protected def enterIfNew(owner: Symbol, member: Symbol, completer: SymbolLoader): Symbol = {
assert(owner.info.decls.lookup(member.name) == NoSymbol, owner.fullName + "." + member.name)
@@ -236,7 +237,7 @@ abstract class SymbolLoaders {
protected def description = "class file "+ classfile.toString
protected def doComplete(root: Symbol) {
- val start = startTimer(classReadNanos)
+ val start = Statistics.startTimer(classReadNanos)
classfileParser.parse(classfile, root)
if (root.associatedFile eq null) {
root match {
@@ -248,7 +249,7 @@ abstract class SymbolLoaders {
debuglog("Not setting associatedFile to %s because %s is a %s".format(classfile, root.name, root.shortSymbolClass))
}
}
- stopTimer(classReadNanos, start)
+ Statistics.stopTimer(classReadNanos, start)
}
override def sourcefile: Option[AbstractFile] = classfileParser.srcfile
}
@@ -284,3 +285,8 @@ abstract class SymbolLoaders {
var parentsLevel = 0
var pendingLoadActions: List[() => Unit] = Nil
}
+
+object SymbolLoadersStats {
+ import reflect.internal.TypesStats.typerNanos
+ val classReadNanos = Statistics.newSubTimer ("time classfilereading", typerNanos)
+}
diff --git a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
index 1c97eaad8b..5f66cadbc9 100644
--- a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
@@ -107,7 +107,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
def extensionMethInfo(extensionMeth: Symbol, origInfo: Type, clazz: Symbol): Type = {
var newTypeParams = cloneSymbolsAtOwner(clazz.typeParams, extensionMeth)
- val thisParamType = appliedType(clazz.typeConstructor, newTypeParams map (_.tpe))
+ val thisParamType = appliedType(clazz.typeConstructor, newTypeParams map (_.tpeHK))
val thisParam = extensionMeth.newValueParameter(nme.SELF, extensionMeth.pos) setInfo thisParamType
def transform(clonedType: Type): Type = clonedType match {
case MethodType(params, restpe) =>
@@ -159,7 +159,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
.changeOwner((origMeth, extensionMeth))
extensionDefs(companion) += atPos(tree.pos) { DefDef(extensionMeth, extensionBody) }
val extensionCallPrefix = Apply(
- gen.mkTypeApply(gen.mkAttributedRef(companion), extensionMeth, origTpeParams map (_.tpe)),
+ gen.mkTypeApply(gen.mkAttributedRef(companion), extensionMeth, origTpeParams map (_.tpeHK)),
List(This(currentOwner)))
val extensionCall = atOwner(origMeth) {
localTyper.typedPos(rhs.pos) {
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index 4b488a6437..c4c769d7cf 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -1448,20 +1448,29 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case ddef @ DefDef(_, _, _, vparamss, _, _) if info.isDefinedAt(symbol) =>
// log("--> method: " + ddef + " in " + ddef.symbol.owner + ", " + info(symbol))
+ def reportTypeError(body: =>Tree) =
+ try body
+ catch {
+ case te: TypeError =>
+ reporter.error(te.pos, te.toString)
+ ddef
+ }
if (symbol.isConstructor) {
val t = atOwner(symbol)(forwardCtorCall(tree.pos, gen.mkSuperSelect, vparamss, symbol.owner))
if (symbol.isPrimaryConstructor)
localTyper.typedPos(symbol.pos)(deriveDefDef(tree)(_ => Block(List(t), Literal(Constant()))))
- else // duplicate the original constructor
- duplicateBody(ddef, info(symbol).target)
+ else // duplicate the original constructor
+ reportTypeError(duplicateBody(ddef, info(symbol).target))
}
else info(symbol) match {
case Implementation(target) =>
assert(body.isDefinedAt(target), "sym: " + symbol.fullName + " target: " + target.fullName)
// we have an rhs, specialize it
- val tree1 = duplicateBody(ddef, target)
+ val tree1 = reportTypeError {
+ duplicateBody(ddef, target)
+ }
debuglog("implementation: " + tree1)
deriveDefDef(tree1)(transform)
@@ -1472,7 +1481,9 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
else {
// we have an rhs, specialize it
- val tree1 = duplicateBody(ddef, target)
+ val tree1 = reportTypeError {
+ duplicateBody(ddef, target)
+ }
debuglog("implementation: " + tree1)
deriveDefDef(tree1)(transform)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
index a77df71312..2077ab0997 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
@@ -6,7 +6,7 @@
package scala.tools.nsc
package typechecker
-import util.Statistics._
+import reflect.internal.util.Statistics
/** The main attribution phase.
*/
@@ -72,6 +72,7 @@ trait Analyzer extends AnyRef
}
object typerFactory extends SubComponent {
+ import reflect.internal.TypesStats.typerNanos
val global: Analyzer.this.global.type = Analyzer.this.global
val phaseName = "typer"
val runsAfter = List[String]()
@@ -84,13 +85,13 @@ trait Analyzer extends AnyRef
// compiler run). This is good enough for the resident compiler, which was the most affected.
undoLog.clear()
override def run() {
- val start = startTimer(typerNanos)
+ val start = Statistics.startTimer(typerNanos)
global.echoPhaseSummary(this)
currentRun.units foreach applyPhase
undoLog.clear()
// need to clear it after as well or 10K+ accumulated entries are
// uncollectable the rest of the way.
- stopTimer(typerNanos, start)
+ Statistics.stopTimer(typerNanos, start)
}
def apply(unit: CompilationUnit) {
try {
diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
index bcf2ba6b71..60cc9e5fb8 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
@@ -90,7 +90,7 @@ trait ContextErrors {
import infer.setError
object TyperErrorGen {
- implicit val context0: Context = infer.getContext
+ implicit val contextTyperErrorGen: Context = infer.getContext
def UnstableTreeError(tree: Tree) = {
def addendum = {
@@ -222,7 +222,13 @@ trait ContextErrors {
NormalTypeError(tree, "super constructor cannot be passed a self reference unless parameter is declared by-name")
def SuperConstrArgsThisReferenceError(tree: Tree) =
- NormalTypeError(tree, "super constructor arguments cannot reference unconstructed `this`")
+ ConstrArgsThisReferenceError("super", tree)
+
+ def SelfConstrArgsThisReferenceError(tree: Tree) =
+ ConstrArgsThisReferenceError("self", tree)
+
+ private def ConstrArgsThisReferenceError(prefix: String, tree: Tree) =
+ NormalTypeError(tree, s"$prefix constructor arguments cannot reference unconstructed `this`")
def TooManyArgumentListsForConstructor(tree: Tree) = {
issueNormalTypeError(tree, "too many argument lists for constructor invocation")
@@ -642,7 +648,7 @@ trait ContextErrors {
object InferErrorGen {
- implicit val context0 = getContext
+ implicit val contextInferErrorGen = getContext
object PolyAlternativeErrorKind extends Enumeration {
type ErrorType = Value
@@ -828,7 +834,7 @@ trait ContextErrors {
object NamerErrorGen {
- implicit val context0 = context
+ implicit val contextNamerErrorGen = context
object SymValidateErrors extends Enumeration {
val ImplicitConstr, ImplicitNotTermOrClass, ImplicitAtToplevel,
@@ -863,7 +869,7 @@ trait ContextErrors {
case CyclicReference(sym, info: TypeCompleter) =>
issueNormalTypeError(tree, typer.cyclicReferenceMessage(sym, info.tree) getOrElse ex.getMessage())
case _ =>
- context0.issue(TypeErrorWithUnderlyingTree(tree, ex))
+ contextNamerErrorGen.issue(TypeErrorWithUnderlyingTree(tree, ex))
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index ac3c94c47a..bcf529ecd2 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -100,6 +100,12 @@ trait Contexts { self: Analyzer =>
var outer: Context = _ // The next outer context
var enclClass: Context = _ // The next outer context whose tree is a
// template or package definition
+ @inline final def savingEnclClass[A](c: Context)(a: => A): A = {
+ val saved = enclClass
+ enclClass = c
+ try a finally enclClass = saved
+ }
+
var enclMethod: Context = _ // The next outer context whose tree is a method
var variance: Int = _ // Variance relative to enclosing class
private var _undetparams: List[Symbol] = List() // Undetermined type parameters,
@@ -638,11 +644,12 @@ trait Contexts { self: Analyzer =>
if (owner != nextOuter.owner && owner.isClass && !owner.isPackageClass && !inSelfSuperCall) {
if (!owner.isInitialized) return nextOuter.implicitss
// debuglog("collect member implicits " + owner + ", implicit members = " + owner.thisType.implicitMembers)//DEBUG
- val savedEnclClass = enclClass
- this.enclClass = this
- val res = collectImplicits(owner.thisType.implicitMembers, owner.thisType)
- this.enclClass = savedEnclClass
- res
+ savingEnclClass(this) {
+ // !!! In the body of `class C(implicit a: A) { }`, `implicitss` returns `List(List(a), List(a), List(<predef..)))`
+ // it handled correctly by implicit search, which considers the second `a` to be shadowed, but should be
+ // remedied nonetheless.
+ collectImplicits(owner.thisType.implicitMembers, owner.thisType)
+ }
} else if (scope != nextOuter.scope && !owner.isPackageClass) {
debuglog("collect local implicits " + scope.toList)//DEBUG
collectImplicits(scope.toList, NoPrefix)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
index b7a6ea677e..6386273c9d 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
@@ -360,7 +360,7 @@ abstract class Duplicators extends Analyzer {
tree
case _ =>
- debuglog("Duplicators default case: " + tree.summaryString)
+ // log("Duplicators default case: " + tree.summaryString + " -> " + tree)
if (tree.hasSymbol && tree.symbol != NoSymbol && (tree.symbol.owner == definitions.AnyClass)) {
tree.symbol = NoSymbol // maybe we can find a more specific member in a subclass of Any (see AnyVal members, like ==)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index aa63f3ec31..f7e00109ae 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -16,7 +16,7 @@ import scala.collection.{ mutable, immutable }
import mutable.{ LinkedHashMap, ListBuffer }
import scala.util.matching.Regex
import symtab.Flags._
-import util.Statistics._
+import scala.reflect.internal.util.Statistics
import language.implicitConversions
/** This trait provides methods to find various kinds of implicits.
@@ -29,6 +29,7 @@ trait Implicits {
import global._
import definitions._
+ import ImplicitsStats._
import typeDebug.{ ptTree, ptBlock, ptLine }
import global.typer.{ printTyping, deindentTyping, indentTyping, printInference }
@@ -71,10 +72,10 @@ trait Implicits {
)
indentTyping()
- val rawTypeStart = startCounter(rawTypeImpl)
- val findMemberStart = startCounter(findMemberImpl)
- val subtypeStart = startCounter(subtypeImpl)
- val start = startTimer(implicitNanos)
+ val rawTypeStart = Statistics.startCounter(rawTypeImpl)
+ val findMemberStart = Statistics.startCounter(findMemberImpl)
+ val subtypeStart = Statistics.startCounter(subtypeImpl)
+ val start = Statistics.startTimer(implicitNanos)
if (printInfers && !tree.isEmpty && !context.undetparams.isEmpty)
printTyping("typing implicit: %s %s".format(tree, context.undetparamsString))
val implicitSearchContext = context.makeImplicit(reportAmbiguous)
@@ -86,10 +87,10 @@ trait Implicits {
printInference("[infer implicit] inferred " + result)
context.undetparams = context.undetparams filterNot result.subst.from.contains
- stopTimer(implicitNanos, start)
- stopCounter(rawTypeImpl, rawTypeStart)
- stopCounter(findMemberImpl, findMemberStart)
- stopCounter(subtypeImpl, subtypeStart)
+ Statistics.stopTimer(implicitNanos, start)
+ Statistics.stopCounter(rawTypeImpl, rawTypeStart)
+ Statistics.stopCounter(findMemberImpl, findMemberStart)
+ Statistics.stopCounter(subtypeImpl, subtypeStart)
deindentTyping()
printTyping("Implicit search yielded: "+ result)
result
@@ -307,12 +308,12 @@ trait Implicits {
/** Is implicit info `info1` better than implicit info `info2`?
*/
def improves(info1: ImplicitInfo, info2: ImplicitInfo) = {
- incCounter(improvesCount)
+ Statistics.incCounter(improvesCount)
(info2 == NoImplicitInfo) ||
(info1 != NoImplicitInfo) && {
if (info1.sym.isStatic && info2.sym.isStatic) {
improvesCache get (info1, info2) match {
- case Some(b) => incCounter(improvesCachedCount); b
+ case Some(b) => Statistics.incCounter(improvesCachedCount); b
case None =>
val result = isStrictlyMoreSpecific(info1.tpe, info2.tpe, info1.sym, info2.sym)
improvesCache((info1, info2)) = result
@@ -376,7 +377,7 @@ trait Implicits {
overlaps(dtor1, dted1) && (dtor1 =:= dted1 || complexity(dtor1) > complexity(dted1))
}
- incCounter(implicitSearchCount)
+ Statistics.incCounter(implicitSearchCount)
/** The type parameters to instantiate */
val undetParams = if (isView) List() else context.outer.undetparams
@@ -390,9 +391,10 @@ trait Implicits {
* Detect infinite search trees for implicits.
*
* @param info The given implicit info describing the implicit definition
+ * @param isLocal Is the implicit in the local scope of the call site?
* @pre `info.tpe` does not contain an error
*/
- private def typedImplicit(info: ImplicitInfo, ptChecked: Boolean): SearchResult = {
+ private def typedImplicit(info: ImplicitInfo, ptChecked: Boolean, isLocal: Boolean): SearchResult = {
(context.openImplicits find { case (tp, tree1) => tree1.symbol == tree.symbol && dominates(pt, tp)}) match {
case Some(pending) =>
//println("Pending implicit "+pending+" dominates "+pt+"/"+undetParams) //@MDEBUG
@@ -401,7 +403,7 @@ trait Implicits {
try {
context.openImplicits = (pt, tree) :: context.openImplicits
// println(" "*context.openImplicits.length+"typed implicit "+info+" for "+pt) //@MDEBUG
- typedImplicit0(info, ptChecked)
+ typedImplicit0(info, ptChecked, isLocal)
} catch {
case ex: DivergentImplicit =>
//println("DivergentImplicit for pt:"+ pt +", open implicits:"+context.openImplicits) //@MDEBUG
@@ -427,7 +429,7 @@ trait Implicits {
* This method is performance critical: 5-8% of typechecking time.
*/
private def matchesPt(tp: Type, pt: Type, undet: List[Symbol]): Boolean = {
- val start = startTimer(matchesPtNanos)
+ val start = Statistics.startTimer(matchesPtNanos)
val result = normSubType(tp, pt) || isView && {
pt match {
case TypeRef(_, Function1.Sym, args) =>
@@ -436,7 +438,7 @@ trait Implicits {
false
}
}
- stopTimer(matchesPtNanos, start)
+ Statistics.stopTimer(matchesPtNanos, start)
result
}
private def matchesPt(info: ImplicitInfo): Boolean = (
@@ -534,8 +536,8 @@ trait Implicits {
case _ => false
}
- private def typedImplicit0(info: ImplicitInfo, ptChecked: Boolean): SearchResult = {
- incCounter(plausiblyCompatibleImplicits)
+ private def typedImplicit0(info: ImplicitInfo, ptChecked: Boolean, isLocal: Boolean): SearchResult = {
+ Statistics.incCounter(plausiblyCompatibleImplicits)
printTyping (
ptBlock("typedImplicit0",
"info.name" -> info.name,
@@ -549,17 +551,24 @@ trait Implicits {
)
if (ptChecked || matchesPt(info))
- typedImplicit1(info)
+ typedImplicit1(info, isLocal)
else
SearchFailure
}
- private def typedImplicit1(info: ImplicitInfo): SearchResult = {
- incCounter(matchingImplicits)
+ private def typedImplicit1(info: ImplicitInfo, isLocal: Boolean): SearchResult = {
+ Statistics.incCounter(matchingImplicits)
val itree = atPos(pos.focus) {
- if (info.pre == NoPrefix) Ident(info.name)
- else {
+ // workaround for deficient context provided by ModelFactoryImplicitSupport#makeImplicitConstraints
+ val isScalaDoc = context.tree == EmptyTree
+
+ if (isLocal && !isScalaDoc) {
+ // SI-4270 SI-5376 Always use an unattributed Ident for implicits in the local scope,
+ // rather than an attributed Select, to detect shadowing.
+ Ident(info.name)
+ } else {
+ assert(info.pre != NoPrefix, info)
// SI-2405 Not info.name, which might be an aliased import
val implicitMemberName = info.sym.name
Select(gen.mkAttributedQualifier(info.pre), implicitMemberName)
@@ -586,7 +595,7 @@ trait Implicits {
if (context.hasErrors)
return fail("typed implicit %s has errors".format(info.sym.fullLocationString))
- incCounter(typedImplicits)
+ Statistics.incCounter(typedImplicits)
printTyping("typed implicit %s:%s, pt=%s".format(itree1, itree1.tpe, wildPt))
val itree2 = if (isView) (itree1: @unchecked) match { case Apply(fun, _) => fun }
@@ -607,8 +616,8 @@ trait Implicits {
if (context.hasErrors)
fail("hasMatchingSymbol reported threw error(s)")
- else if (!hasMatchingSymbol(itree1))
- fail("candidate implicit %s is shadowed by other implicit %s".format(
+ else if (isLocal && !hasMatchingSymbol(itree1))
+ fail("candidate implicit %s is shadowed by %s".format(
info.sym.fullLocationString, itree1.symbol.fullLocationString))
else {
val tvars = undetParams map freshVar
@@ -669,7 +678,7 @@ trait Implicits {
fail("typing TypeApply reported errors for the implicit tree")
else {
val result = new SearchResult(itree2, subst)
- incCounter(foundImplicits)
+ Statistics.incCounter(foundImplicits)
printInference("[success] found %s for pt %s".format(result, ptInstantiated))
result
}
@@ -683,17 +692,6 @@ trait Implicits {
}
}
- // #3453: in addition to the implicit symbols that may shadow the implicit with
- // name `name`, this method tests whether there's a non-implicit symbol with name
- // `name` in scope. Inspired by logic in typedIdent.
- private def nonImplicitSynonymInScope(name: Name) = {
- // the implicit ones are handled by the `shadowed` set above
- context.scope.lookupEntry(name) match {
- case x: ScopeEntry => reallyExists(x.sym) && !x.sym.isImplicit
- case _ => false
- }
- }
-
/** Should implicit definition symbol `sym` be considered for applicability testing?
* This is the case if one of the following holds:
* - the symbol's type is initialized
@@ -737,19 +735,38 @@ trait Implicits {
/** Prune ImplicitInfos down to either all the eligible ones or the best one.
*
* @param iss list of list of infos
- * @param shadowed set in which to record names that are shadowed by implicit infos
- * If it is null, no shadowing.
+ * @param isLocal if true, `iss` represents in-scope implicits, which must respect the normal rules of
+ * shadowing. The head of the list `iss` must represent implicits from the closest
+ * enclosing scope, and so on.
*/
- class ImplicitComputation(iss: Infoss, shadowed: util.HashSet[Name]) {
+ class ImplicitComputation(iss: Infoss, isLocal: Boolean) {
+ abstract class Shadower {
+ def addInfos(infos: Infos)
+ def isShadowed(name: Name): Boolean
+ }
+ private val shadower: Shadower = {
+ /** Used for exclude implicits from outer scopes that are shadowed by same-named implicits */
+ final class LocalShadower extends Shadower {
+ val shadowed = util.HashSet[Name](512)
+ def addInfos(infos: Infos) {
+ shadowed addEntries infos.map(_.name)
+ }
+ def isShadowed(name: Name) = shadowed(name)
+ }
+ /** Used for the implicits of expected type, when no shadowing checks are needed. */
+ object NoShadower extends Shadower {
+ def addInfos(infos: Infos) {}
+ def isShadowed(name: Name) = false
+ }
+ if (isLocal) new LocalShadower else NoShadower
+ }
+
private var best: SearchResult = SearchFailure
- private def isShadowed(name: Name) = (
- (shadowed != null)
- && (shadowed(name) || nonImplicitSynonymInScope(name))
- )
+
private def isIneligible(info: ImplicitInfo) = (
info.isCyclicOrErroneous
|| isView && isPredefMemberNamed(info.sym, nme.conforms)
- || isShadowed(info.name)
+ || shadower.isShadowed(info.name)
|| (!context.macrosEnabled && info.sym.isTermMacro)
)
@@ -788,9 +805,7 @@ trait Implicits {
val eligible = {
val matches = iss flatMap { is =>
val result = is filter (info => checkValid(info.sym) && survives(info))
- if (shadowed ne null)
- shadowed addEntries (is map (_.name))
-
+ shadower addInfos is
result
}
@@ -812,7 +827,7 @@ trait Implicits {
case Nil => acc
case i :: is =>
def tryImplicitInfo(i: ImplicitInfo) =
- try typedImplicit(i, true)
+ try typedImplicit(i, ptChecked = true, isLocal)
catch divergenceHandler
tryImplicitInfo(i) match {
@@ -842,7 +857,7 @@ trait Implicits {
/** Returns all eligible ImplicitInfos and their SearchResults in a map.
*/
- def findAll() = mapFrom(eligible)(typedImplicit(_, false))
+ def findAll() = mapFrom(eligible)(typedImplicit(_, ptChecked = false, isLocal))
/** Returns the SearchResult of the best match.
*/
@@ -890,11 +905,11 @@ trait Implicits {
* @return map from infos to search results
*/
def applicableInfos(iss: Infoss, isLocal: Boolean): Map[ImplicitInfo, SearchResult] = {
- val start = startCounter(subtypeAppInfos)
- val computation = new ImplicitComputation(iss, if (isLocal) util.HashSet[Name](512) else null) { }
+ val start = Statistics.startCounter(subtypeAppInfos)
+ val computation = new ImplicitComputation(iss, isLocal) { }
val applicable = computation.findAll()
- stopCounter(subtypeAppInfos, start)
+ Statistics.stopCounter(subtypeAppInfos, start)
applicable
}
@@ -909,7 +924,7 @@ trait Implicits {
*/
def searchImplicit(implicitInfoss: Infoss, isLocal: Boolean): SearchResult =
if (implicitInfoss.forall(_.isEmpty)) SearchFailure
- else new ImplicitComputation(implicitInfoss, if (isLocal) util.HashSet[Name](128) else null) findBest()
+ else new ImplicitComputation(implicitInfoss, isLocal) findBest()
/** Produce an implicict info map, i.e. a map from the class symbols C of all parts of this type to
* the implicit infos in the companion objects of these class symbols C.
@@ -1109,26 +1124,28 @@ trait Implicits {
* These are all implicits found in companion objects of classes C
* such that some part of `tp` has C as one of its superclasses.
*/
- private def implicitsOfExpectedType: Infoss = implicitsCache get pt match {
- case Some(implicitInfoss) =>
- incCounter(implicitCacheHits)
- implicitInfoss
- case None =>
- incCounter(implicitCacheMisses)
- val start = startTimer(subtypeETNanos)
-// val implicitInfoss = companionImplicits(pt)
- val implicitInfoss1 = companionImplicitMap(pt).valuesIterator.toList
-// val is1 = implicitInfoss.flatten.toSet
-// val is2 = implicitInfoss1.flatten.toSet
-// for (i <- is1)
-// if (!(is2 contains i)) println("!!! implicit infos of "+pt+" differ, new does not contain "+i+",\nold: "+implicitInfoss+",\nnew: "+implicitInfoss1)
-// for (i <- is2)
-// if (!(is1 contains i)) println("!!! implicit infos of "+pt+" differ, old does not contain "+i+",\nold: "+implicitInfoss+",\nnew: "+implicitInfoss1)
- stopTimer(subtypeETNanos, start)
- implicitsCache(pt) = implicitInfoss1
- if (implicitsCache.size >= sizeLimit)
- implicitsCache -= implicitsCache.keysIterator.next
- implicitInfoss1
+ private def implicitsOfExpectedType: Infoss = {
+ Statistics.incCounter(implicitCacheHits)
+ implicitsCache get pt match {
+ case Some(implicitInfoss) =>
+ Statistics.incCounter(implicitCacheHits)
+ implicitInfoss
+ case None =>
+ val start = Statistics.startTimer(subtypeETNanos)
+ // val implicitInfoss = companionImplicits(pt)
+ val implicitInfoss1 = companionImplicitMap(pt).valuesIterator.toList
+ // val is1 = implicitInfoss.flatten.toSet
+ // val is2 = implicitInfoss1.flatten.toSet
+ // for (i <- is1)
+ // if (!(is2 contains i)) println("!!! implicit infos of "+pt+" differ, new does not contain "+i+",\nold: "+implicitInfoss+",\nnew: "+implicitInfoss1)
+ // for (i <- is2)
+ // if (!(is1 contains i)) println("!!! implicit infos of "+pt+" differ, old does not contain "+i+",\nold: "+implicitInfoss+",\nnew: "+implicitInfoss1)
+ Statistics.stopTimer(subtypeETNanos, start)
+ implicitsCache(pt) = implicitInfoss1
+ if (implicitsCache.size >= sizeLimit)
+ implicitsCache -= implicitsCache.keysIterator.next
+ implicitInfoss1
+ }
}
private def TagSymbols = TagMaterializers.keySet
@@ -1319,7 +1336,7 @@ trait Implicits {
if (full) {
val cm = typed(Ident(ReflectRuntimeCurrentMirror))
gen.mkMethodCall(ReflectRuntimeUniverse, nme.typeTagToManifest, List(tp), List(cm, tagInScope))
- } else gen.mkMethodCall(ReflectRuntimeUniverse, nme.classTagToClassManifest, List(tp), List(tagInScope))
+ } else gen.mkMethodCall(ReflectBasis, nme.classTagToClassManifest, List(tp), List(tagInScope))
wrapResult(interop)
}
}
@@ -1354,30 +1371,30 @@ trait Implicits {
* If all fails return SearchFailure
*/
def bestImplicit: SearchResult = {
- val failstart = startTimer(inscopeFailNanos)
- val succstart = startTimer(inscopeSucceedNanos)
+ val failstart = Statistics.startTimer(inscopeFailNanos)
+ val succstart = Statistics.startTimer(inscopeSucceedNanos)
var result = searchImplicit(context.implicitss, true)
if (result == SearchFailure) {
- stopTimer(inscopeFailNanos, failstart)
+ Statistics.stopTimer(inscopeFailNanos, failstart)
} else {
- stopTimer(inscopeSucceedNanos, succstart)
- incCounter(inscopeImplicitHits)
+ Statistics.stopTimer(inscopeSucceedNanos, succstart)
+ Statistics.incCounter(inscopeImplicitHits)
}
if (result == SearchFailure) {
val previousErrs = context.flushAndReturnBuffer()
- val failstart = startTimer(oftypeFailNanos)
- val succstart = startTimer(oftypeSucceedNanos)
+ val failstart = Statistics.startTimer(oftypeFailNanos)
+ val succstart = Statistics.startTimer(oftypeSucceedNanos)
result = implicitTagOrOfExpectedType(pt)
if (result == SearchFailure) {
context.updateBuffer(previousErrs)
- stopTimer(oftypeFailNanos, failstart)
+ Statistics.stopTimer(oftypeFailNanos, failstart)
} else {
- stopTimer(oftypeSucceedNanos, succstart)
- incCounter(oftypeImplicitHits)
+ Statistics.stopTimer(oftypeSucceedNanos, succstart)
+ Statistics.incCounter(oftypeImplicitHits)
}
}
@@ -1397,20 +1414,23 @@ trait Implicits {
def allImplicitsPoly(tvars: List[TypeVar]): List[(SearchResult, List[TypeConstraint])] = {
def resetTVars() = tvars foreach { _.constr = new TypeConstraint }
- def eligibleInfos(iss: Infoss, isLocal: Boolean) = new ImplicitComputation(iss, if (isLocal) util.HashSet[Name](512) else null).eligible
- val allEligibleInfos = (eligibleInfos(context.implicitss, true) ++ eligibleInfos(implicitsOfExpectedType, false)).toList
-
- allEligibleInfos flatMap { ii =>
+ def eligibleInfos(iss: Infoss, isLocal: Boolean) = {
+ val eligible = new ImplicitComputation(iss, isLocal).eligible
+ eligible.toList.flatMap {
+ (ii: ImplicitInfo) =>
// each ImplicitInfo contributes a distinct set of constraints (generated indirectly by typedImplicit)
// thus, start each type var off with a fresh for every typedImplicit
resetTVars()
// any previous errors should not affect us now
context.flushBuffer()
- val res = typedImplicit(ii, false)
+
+ val res = typedImplicit(ii, ptChecked = false, isLocal)
if (res.tree ne EmptyTree) List((res, tvars map (_.constr)))
else Nil
}
}
+ eligibleInfos(context.implicitss, isLocal = true) ++ eligibleInfos(implicitsOfExpectedType, isLocal = false)
+ }
}
object ImplicitNotFoundMsg {
@@ -1455,5 +1475,37 @@ trait Implicits {
}
}
}
+
+object ImplicitsStats {
+
+ import reflect.internal.TypesStats._
+
+ val rawTypeImpl = Statistics.newSubCounter (" of which in implicits", rawTypeCount)
+ val subtypeImpl = Statistics.newSubCounter(" of which in implicit", subtypeCount)
+ val findMemberImpl = Statistics.newSubCounter(" of which in implicit", findMemberCount)
+ val subtypeAppInfos = Statistics.newSubCounter(" of which in app impl", subtypeCount)
+ val subtypeImprovCount = Statistics.newSubCounter(" of which in improves", subtypeCount)
+ val implicitSearchCount = Statistics.newCounter ("#implicit searches", "typer")
+ val triedImplicits = Statistics.newSubCounter(" #tried", implicitSearchCount)
+ val plausiblyCompatibleImplicits
+ = Statistics.newSubCounter(" #plausibly compatible", implicitSearchCount)
+ val matchingImplicits = Statistics.newSubCounter(" #matching", implicitSearchCount)
+ val typedImplicits = Statistics.newSubCounter(" #typed", implicitSearchCount)
+ val foundImplicits = Statistics.newSubCounter(" #found", implicitSearchCount)
+ val improvesCount = Statistics.newSubCounter("implicit improves tests", implicitSearchCount)
+ val improvesCachedCount = Statistics.newSubCounter("#implicit improves cached ", implicitSearchCount)
+ val inscopeImplicitHits = Statistics.newSubCounter("#implicit inscope hits", implicitSearchCount)
+ val oftypeImplicitHits = Statistics.newSubCounter("#implicit oftype hits ", implicitSearchCount)
+ val implicitNanos = Statistics.newSubTimer ("time spent in implicits", typerNanos)
+ val inscopeSucceedNanos = Statistics.newSubTimer (" successful in scope", typerNanos)
+ val inscopeFailNanos = Statistics.newSubTimer (" failed in scope", typerNanos)
+ val oftypeSucceedNanos = Statistics.newSubTimer (" successful of type", typerNanos)
+ val oftypeFailNanos = Statistics.newSubTimer (" failed of type", typerNanos)
+ val subtypeETNanos = Statistics.newSubTimer (" assembling parts", typerNanos)
+ val matchesPtNanos = Statistics.newSubTimer (" matchesPT", typerNanos)
+ val implicitCacheAccs = Statistics.newCounter ("implicit cache accesses", "typer")
+ val implicitCacheHits = Statistics.newSubCounter("implicit cache hits", implicitCacheAccs)
+}
+
class DivergentImplicit extends Exception
object DivergentImplicit extends DivergentImplicit
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index 9e371dd2dd..688dcd91ac 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -1276,7 +1276,8 @@ trait Infer {
} else {
for (arg <- args) {
if (sym == ArrayClass) check(arg, bound)
- else if (arg.typeArgs.nonEmpty) () // avoid spurious warnings with higher-kinded types
+ else if (arg.typeArgs.nonEmpty) () // avoid spurious warnings with higher-kinded types
+ else if (sym == NonLocalReturnControlClass) () // no way to suppress unchecked warnings on try/catch
else arg match {
case TypeRef(_, sym, _) if isLocalBinding(sym) =>
;
diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
index ec14476d1a..d157666e47 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
@@ -7,7 +7,7 @@ import scala.tools.nsc.util.ClassPath._
import scala.reflect.runtime.ReflectionUtils
import scala.collection.mutable.ListBuffer
import scala.compat.Platform.EOL
-import util.Statistics._
+import reflect.internal.util.Statistics
import scala.reflect.makro.util._
import java.lang.{Class => jClass}
import java.lang.reflect.{Array => jArray, Method => jMethod}
@@ -42,6 +42,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
import global._
import definitions._
+ import MacrosStats._
def globalSettings = global.settings
val globalMacroCache = collection.mutable.Map[Any, Any]()
@@ -945,8 +946,8 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
this.fail(typer, tree, err.errPos, "failed to %s: %s".format(what, err.errMsg))
return expandee
}
- val start = startTimer(macroExpandNanos)
- incCounter(macroExpandCount)
+ val start = Statistics.startTimer(macroExpandNanos)
+ Statistics.incCounter(macroExpandCount)
try {
macroExpand1(typer, expandee) match {
case Success(expanded0) =>
@@ -993,7 +994,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
result
}
} finally {
- stopTimer(macroExpandNanos, start)
+ Statistics.stopTimer(macroExpandNanos, start)
}
}
@@ -1292,3 +1293,9 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
})
}.transform(expandee)
}
+
+object MacrosStats {
+ import reflect.internal.TypesStats.typerNanos
+ val macroExpandCount = Statistics.newCounter ("#macro expansions", "typer")
+ val macroExpandNanos = Statistics.newSubTimer("time spent in macroExpand", typerNanos)
+}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Modes.scala b/src/compiler/scala/tools/nsc/typechecker/Modes.scala
index 3eff5ef024..bde3ad98c9 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Modes.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Modes.scala
@@ -86,6 +86,10 @@ trait Modes {
*/
final val TYPEPATmode = 0x10000
+ /** RETmode is set when we are typing a return expression.
+ */
+ final val RETmode = 0x20000
+
final private val StickyModes = EXPRmode | PATTERNmode | TYPEmode | ALTmode
final def onlyStickyModes(mode: Int) =
diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
index 4e8f416b16..9b8ddffb49 100644
--- a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
@@ -14,7 +14,7 @@ import scala.tools.nsc.transform.TypingTransformers
import scala.tools.nsc.transform.Transform
import scala.collection.mutable.HashSet
import scala.collection.mutable.HashMap
-import scala.tools.nsc.util.Statistics
+import reflect.internal.util.Statistics
/** Translate pattern matching.
*
@@ -38,6 +38,7 @@ import scala.tools.nsc.util.Statistics
*/
trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL { // self: Analyzer =>
import Statistics._
+ import PatternMatchingStats._
val global: Global // need to repeat here because otherwise last mixin defines global as
// SymbolTable. If we had DOT this would not be an issue
@@ -183,7 +184,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
case _ => tp
}
- val start = startTimer(patmatNanos)
+ val start = Statistics.startTimer(patmatNanos)
val selectorTp = repeatedToSeq(elimAnonymousClass(selector.tpe.widen.withoutAnnotations))
@@ -210,7 +211,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// pt = Any* occurs when compiling test/files/pos/annotDepMethType.scala with -Xexperimental
val combined = combineCases(selector, selectorSym, cases map translateCase(selectorSym, pt), pt, matchOwner, matchFailGenOverride)
- stopTimer(patmatNanos, start)
+ Statistics.stopTimer(patmatNanos, start)
combined
}
@@ -1694,7 +1695,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// TODO: for V1 representing x1 and V2 standing for x1.head, encode that
// V1 = Nil implies -(V2 = Ci) for all Ci in V2's domain (i.e., it is unassignable)
def removeVarEq(props: List[Prop], considerNull: Boolean = false): (Prop, List[Prop]) = {
- val start = startTimer(patmatAnaVarEq)
+ val start = Statistics.startTimer(patmatAnaVarEq)
val vars = new collection.mutable.HashSet[Var]
@@ -1766,7 +1767,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// patmatDebug("eqAxioms:\n"+ cnfString(eqFreePropToSolvable(eqAxioms)))
// patmatDebug("pure:\n"+ cnfString(eqFreePropToSolvable(pure)))
- stopTimer(patmatAnaVarEq, start)
+ Statistics.stopTimer(patmatAnaVarEq, start)
(eqAxioms, pure)
}
@@ -1865,10 +1866,10 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
}
- val start = startTimer(patmatCNF)
+ val start = Statistics.startTimer(patmatCNF)
val res = conjunctiveNormalForm(negationNormalForm(p))
- stopTimer(patmatCNF, start)
- patmatCNFSizes(res.size) += 1
+ Statistics.stopTimer(patmatCNF, start)
+ patmatCNFSizes(res.size).value += 1
// patmatDebug("cnf for\n"+ p +"\nis:\n"+cnfString(res))
res
@@ -1945,7 +1946,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// patmatDebug("dpll\n"+ cnfString(f))
- val start = startTimer(patmatAnaDPLL)
+ val start = Statistics.startTimer(patmatAnaDPLL)
val satisfiableWithModel: Model =
if (f isEmpty) EmptyModel
@@ -1983,7 +1984,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
}
- stopTimer(patmatAnaDPLL, start)
+ Statistics.stopTimer(patmatAnaDPLL, start)
satisfiableWithModel
}
@@ -2291,7 +2292,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def makeCondPessimistic(tm: TreeMaker)(recurse: TreeMaker => Cond): Cond = makeCond(tm)(recurse)
}
- val start = startTimer(patmatAnaReach)
+ val start = Statistics.startTimer(patmatAnaReach)
// use the same approximator so we share variables,
// but need different conditions depending on whether we're conservatively looking for failure or success
@@ -2340,7 +2341,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
}
- stopTimer(patmatAnaReach, start)
+ Statistics.stopTimer(patmatAnaReach, start)
if (reachable) None else Some(caseIndex)
} catch {
@@ -2428,7 +2429,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// - back off (to avoid crying exhaustive too often) when:
// - there are guards -->
// - there are extractor calls (that we can't secretly/soundly) rewrite
- val start = startTimer(patmatAnaExhaust)
+ val start = Statistics.startTimer(patmatAnaExhaust)
var backoff = false
object exhaustivityApproximation extends TreeMakersToConds(prevBinder) {
def makeCondExhaustivity(tm: TreeMaker)(recurse: TreeMaker => Cond): Cond = tm match {
@@ -2503,7 +2504,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
val pruned = CounterExample.prune(counterExamples).map(_.toString).sorted
- stopTimer(patmatAnaExhaust, start)
+ Statistics.stopTimer(patmatAnaExhaust, start)
pruned
} catch {
case e : CNFBudgetExceeded =>
@@ -3186,3 +3187,13 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
}
}
+
+object PatternMatchingStats {
+ val patmatNanos = Statistics.newTimer ("time spent in patmat", "patmat")
+ val patmatAnaDPLL = Statistics.newSubTimer (" of which DPLL", patmatNanos)
+ val patmatCNF = Statistics.newSubTimer (" of which in CNF conversion", patmatNanos)
+ val patmatCNFSizes = Statistics.newQuantMap[Int, Statistics.Counter](" CNF size counts", "patmat")(Statistics.newCounter(""))
+ val patmatAnaVarEq = Statistics.newSubTimer (" of which variable equality", patmatNanos)
+ val patmatAnaExhaust = Statistics.newSubTimer (" of which in exhaustivity", patmatNanos)
+ val patmatAnaReach = Statistics.newSubTimer (" of which in unreachability", patmatNanos)
+}
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index 26cf246ed7..119bb0852c 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -766,7 +766,16 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
for (member <- clazz.info.decls)
if (member.isAnyOverride && !(clazz.thisType.baseClasses exists (hasMatchingSym(_, member)))) {
// for (bc <- clazz.info.baseClasses.tail) Console.println("" + bc + " has " + bc.info.decl(member.name) + ":" + bc.info.decl(member.name).tpe);//DEBUG
- unit.error(member.pos, member.toString() + " overrides nothing");
+
+ val nonMatching: List[Symbol] = clazz.info.member(member.name).alternatives.filterNot(_.owner == clazz).filterNot(_.isFinal)
+ def issueError(suffix: String) = unit.error(member.pos, member.toString() + " overrides nothing" + suffix);
+ nonMatching match {
+ case Nil =>
+ issueError("")
+ case ms =>
+ val superSigs = ms.map(m => m.defStringSeenAs(clazz.tpe memberType m)).mkString("\n")
+ issueError(s".\nNote: the super classes of ${member.owner} contain the following, non final members named ${member.name}:\n${superSigs}")
+ }
member resetFlag (OVERRIDE | ABSOVERRIDE) // Any Override
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 2bdae4164a..b12ca4f0b4 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -16,8 +16,7 @@ import scala.collection.mutable
import scala.reflect.internal.util.BatchSourceFile
import mutable.ListBuffer
import symtab.Flags._
-import util.Statistics
-import util.Statistics._
+import reflect.internal.util.Statistics
// Suggestion check whether we can do without priming scopes with symbols of outer scopes,
// like the IDE does.
@@ -31,6 +30,7 @@ trait Typers extends Modes with Adaptations with Tags {
import global._
import definitions._
+ import TypersStats._
import patmat.DefaultOverrideMatchAttachment
final def forArgMode(fun: Tree, mode: Int) =
@@ -707,10 +707,16 @@ trait Typers extends Modes with Adaptations with Tags {
def silent[T](op: Typer => T,
reportAmbiguousErrors: Boolean = context.ambiguousErrors,
newtree: Tree = context.tree): SilentResult[T] = {
- val rawTypeStart = startCounter(rawTypeFailed)
- val findMemberStart = startCounter(findMemberFailed)
- val subtypeStart = startCounter(subtypeFailed)
- val failedSilentStart = startTimer(failedSilentNanos)
+ val rawTypeStart = Statistics.startCounter(rawTypeFailed)
+ val findMemberStart = Statistics.startCounter(findMemberFailed)
+ val subtypeStart = Statistics.startCounter(subtypeFailed)
+ val failedSilentStart = Statistics.startTimer(failedSilentNanos)
+ def stopStats() = {
+ Statistics.stopCounter(rawTypeFailed, rawTypeStart)
+ Statistics.stopCounter(findMemberFailed, findMemberStart)
+ Statistics.stopCounter(subtypeFailed, subtypeStart)
+ Statistics.stopTimer(failedSilentNanos, failedSilentStart)
+ }
try {
if (context.reportErrors ||
reportAmbiguousErrors != context.ambiguousErrors ||
@@ -724,8 +730,10 @@ trait Typers extends Modes with Adaptations with Tags {
context.undetparams = context1.undetparams
context.savedTypeBounds = context1.savedTypeBounds
context.namedApplyBlockInfo = context1.namedApplyBlockInfo
- if (context1.hasErrors) SilentTypeError(context1.errBuffer.head)
- else SilentResultValue(result)
+ if (context1.hasErrors) {
+ stopStats()
+ SilentTypeError(context1.errBuffer.head)
+ } else SilentResultValue(result)
} else {
assert(context.bufferErrors || isPastTyper, "silent mode is not available past typer")
withSavedContext(context){
@@ -739,10 +747,7 @@ trait Typers extends Modes with Adaptations with Tags {
case ex: TypeError =>
// fallback in case TypeError is still thrown
// @H this happens for example in cps annotation checker
- stopCounter(rawTypeFailed, rawTypeStart)
- stopCounter(findMemberFailed, findMemberStart)
- stopCounter(subtypeFailed, subtypeStart)
- stopTimer(failedSilentNanos, failedSilentStart)
+ stopStats()
SilentTypeError(TypeErrorWrapper(ex))
}
}
@@ -1841,16 +1846,13 @@ trait Typers extends Modes with Adaptations with Tags {
val pending = ListBuffer[AbsTypeError]()
// an object cannot be allowed to pass a reference to itself to a superconstructor
// because of initialization issues; bug #473
- for (arg <- superArgs ; tree <- arg) {
- val sym = tree.symbol
- if (sym != null && (sym.info.baseClasses contains clazz)) {
- if (sym.isModule)
- pending += SuperConstrReferenceError(tree)
- tree match {
- case This(qual) =>
- pending += SuperConstrArgsThisReferenceError(tree)
- case _ => ()
- }
+ foreachSubTreeBoundTo(superArgs, clazz) { tree =>
+ if (tree.symbol.isModule)
+ pending += SuperConstrReferenceError(tree)
+ tree match {
+ case This(qual) =>
+ pending += SuperConstrArgsThisReferenceError(tree)
+ case _ => ()
}
}
@@ -1884,7 +1886,39 @@ trait Typers extends Modes with Adaptations with Tags {
pending.foreach(ErrorUtils.issueTypeError)
}
- /** Check if a structurally defined method violates implementation restrictions.
+ // Check for SI-4842.
+ private def checkSelfConstructorArgs(ddef: DefDef, clazz: Symbol) {
+ val pending = ListBuffer[AbsTypeError]()
+ ddef.rhs match {
+ case Block(stats, expr) =>
+ val selfConstructorCall = stats.headOption.getOrElse(expr)
+ foreachSubTreeBoundTo(List(selfConstructorCall), clazz) {
+ case tree @ This(qual) =>
+ pending += SelfConstrArgsThisReferenceError(tree)
+ case _ => ()
+ }
+ case _ =>
+ }
+ pending.foreach(ErrorUtils.issueTypeError)
+ }
+
+ /**
+ * Run the provided function for each sub tree of `trees` that
+ * are bound to a symbol with `clazz` as a base class.
+ *
+ * @param f This function can assume that `tree.symbol` is non null
+ */
+ private def foreachSubTreeBoundTo[A](trees: List[Tree], clazz: Symbol)(f: Tree => Unit): Unit =
+ for {
+ tree <- trees
+ subTree <- tree
+ } {
+ val sym = subTree.symbol
+ if (sym != null && sym.info.baseClasses.contains(clazz))
+ f(subTree)
+ }
+
+ /** Check if a structurally defined method violates implementation restrictions.
* A method cannot be called if it is a non-private member of a refinement type
* and if its parameter's types are any of:
* - the self-type of the refinement
@@ -2002,11 +2036,14 @@ trait Typers extends Modes with Adaptations with Tags {
transformedOrTyped(ddef.rhs, EXPRmode, tpt1.tpe)
}
- if (meth.isPrimaryConstructor && meth.isClassConstructor && !isPastTyper && !reporter.hasErrors && !meth.owner.isSubClass(AnyValClass)) {
- // At this point in AnyVal there is no supercall, which will blow up
- // in computeParamAliases; there's nothing to be computed for Anyval anyway.
+ if (meth.isClassConstructor && !isPastTyper && !reporter.hasErrors && !meth.owner.isSubClass(AnyValClass)) {
+ // At this point in AnyVal there is no supercall, which will blow up
+ // in computeParamAliases; there's nothing to be computed for Anyval anyway.
+ if (meth.isPrimaryConstructor)
computeParamAliases(meth.owner, vparamss1, rhs1)
- }
+ else
+ checkSelfConstructorArgs(ddef, meth.owner)
+ }
if (tpt1.tpe.typeSymbol != NothingClass && !context.returnsSeen && rhs1.tpe.typeSymbol != NothingClass)
rhs1 = checkDead(rhs1)
@@ -3481,9 +3518,9 @@ trait Typers extends Modes with Adaptations with Tags {
def isCapturedExistential(sym: Symbol) =
(sym hasAllFlags (EXISTENTIAL | CAPTURED)) && {
- val start = startTimer(isReferencedNanos)
+ val start = Statistics.startTimer(isReferencedNanos)
try !isReferencedFrom(context, sym)
- finally stopTimer(isReferencedNanos, start)
+ finally Statistics.stopTimer(isReferencedNanos, start)
}
def packCaptured(tpe: Type): Type = {
@@ -3987,7 +4024,8 @@ trait Typers extends Modes with Adaptations with Tags {
ReturnWithoutTypeError(tree, enclMethod.owner)
} else {
context.enclMethod.returnsSeen = true
- val expr1: Tree = typed(expr, EXPRmode | BYVALmode, restpt.tpe)
+ val expr1: Tree = typed(expr, EXPRmode | BYVALmode | RETmode, restpt.tpe)
+
// Warn about returning a value if no value can be returned.
if (restpt.tpe.typeSymbol == UnitClass) {
// The typing in expr1 says expr is Unit (it has already been coerced if
@@ -4109,10 +4147,10 @@ trait Typers extends Modes with Adaptations with Tags {
* insert an implicit conversion.
*/
def tryTypedApply(fun: Tree, args: List[Tree]): Tree = {
- val start = startTimer(failedApplyNanos)
+ val start = Statistics.startTimer(failedApplyNanos)
def onError(typeError: AbsTypeError): Tree = {
- stopTimer(failedApplyNanos, start)
+ Statistics.stopTimer(failedApplyNanos, start)
// If the problem is with raw types, copnvert to existentials and try again.
// See #4712 for a case where this situation arises,
@@ -4175,8 +4213,8 @@ trait Typers extends Modes with Adaptations with Tags {
typed1(tree, mode & ~PATTERNmode | EXPRmode, pt)
} else {
val funpt = if (isPatternMode) pt else WildcardType
- val appStart = startTimer(failedApplyNanos)
- val opeqStart = startTimer(failedOpEqNanos)
+ val appStart = Statistics.startTimer(failedApplyNanos)
+ val opeqStart = Statistics.startTimer(failedOpEqNanos)
def onError(reportError: => Tree): Tree = {
fun match {
@@ -4184,14 +4222,14 @@ trait Typers extends Modes with Adaptations with Tags {
if !isPatternMode && nme.isOpAssignmentName(newTermName(name.decode)) =>
val qual1 = typedQualifier(qual)
if (treeInfo.isVariableOrGetter(qual1)) {
- stopTimer(failedOpEqNanos, opeqStart)
+ Statistics.stopTimer(failedOpEqNanos, opeqStart)
convertToAssignment(fun, qual1, name, args)
} else {
- stopTimer(failedApplyNanos, appStart)
+ Statistics.stopTimer(failedApplyNanos, appStart)
reportError
}
case _ =>
- stopTimer(failedApplyNanos, appStart)
+ Statistics.stopTimer(failedApplyNanos, appStart)
reportError
}
}
@@ -4200,7 +4238,7 @@ trait Typers extends Modes with Adaptations with Tags {
if ((mode & EXPRmode) != 0) tree else context.tree) match {
case SilentResultValue(fun1) =>
val fun2 = if (stableApplication) stabilizeFun(fun1, mode, pt) else fun1
- incCounter(typedApplyCount)
+ Statistics.incCounter(typedApplyCount)
def isImplicitMethod(tpe: Type) = tpe match {
case mt: MethodType => mt.isImplicit
case _ => false
@@ -4976,7 +5014,7 @@ trait Typers extends Modes with Adaptations with Tags {
typedSelect(qual1, nme.CONSTRUCTOR)
case Select(qual, name) =>
- incCounter(typedSelectCount)
+ Statistics.incCounter(typedSelectCount)
var qual1 = checkDead(typedQualifier(qual, mode))
if (name.isTypeName) qual1 = checkStable(qual1)
@@ -5008,7 +5046,7 @@ trait Typers extends Modes with Adaptations with Tags {
else tree1
case Ident(name) =>
- incCounter(typedIdentCount)
+ Statistics.incCounter(typedIdentCount)
if ((name == nme.WILDCARD && (mode & (PATTERNmode | FUNmode)) == PATTERNmode) ||
(name == tpnme.WILDCARD && (mode & TYPEmode) != 0))
tree setType makeFullyDefined(pt)
@@ -5083,15 +5121,9 @@ trait Typers extends Modes with Adaptations with Tags {
indentTyping()
var alreadyTyped = false
+ val startByType = Statistics.pushTimerClass(byTypeNanos, tree.getClass)
+ Statistics.incCounter(visitsByType, tree.getClass)
try {
- if (Statistics.enabled) {
- val t = currentTime()
- if (pendingTreeTypes.nonEmpty) {
- microsByType(pendingTreeTypes.head) += ((t - typerTime) / 1000).toInt
- }
- typerTime = t
- pendingTreeTypes = tree.getClass :: pendingTreeTypes
- }
if (context.retyping &&
(tree.tpe ne null) && (tree.tpe.isErroneous || !(tree.tpe <:< pt))) {
tree.tpe = null
@@ -5145,14 +5177,7 @@ trait Typers extends Modes with Adaptations with Tags {
}
finally {
deindentTyping()
-
- if (Statistics.enabled) {
- val t = currentTime()
- microsByType(pendingTreeTypes.head) += ((t - typerTime) / 1000).toInt
- visitsByType(pendingTreeTypes.head) += 1
- typerTime = t
- pendingTreeTypes = pendingTreeTypes.tail
- }
+ Statistics.popTimerClass(byTypeNanos, startByType)
}
}
@@ -5328,3 +5353,22 @@ trait Typers extends Modes with Adaptations with Tags {
}
}
+object TypersStats {
+ import reflect.internal.TypesStats._
+ import reflect.internal.BaseTypeSeqsStats._
+ val typedIdentCount = Statistics.newCounter("#typechecked identifiers")
+ val typedSelectCount = Statistics.newCounter("#typechecked selections")
+ val typedApplyCount = Statistics.newCounter("#typechecked applications")
+ val rawTypeFailed = Statistics.newSubCounter (" of which in failed", rawTypeCount)
+ val subtypeFailed = Statistics.newSubCounter(" of which in failed", subtypeCount)
+ val findMemberFailed = Statistics.newSubCounter(" of which in failed", findMemberCount)
+ val compoundBaseTypeSeqCount = Statistics.newSubCounter(" of which for compound types", baseTypeSeqCount)
+ val typerefBaseTypeSeqCount = Statistics.newSubCounter(" of which for typerefs", baseTypeSeqCount)
+ val singletonBaseTypeSeqCount = Statistics.newSubCounter(" of which for singletons", baseTypeSeqCount)
+ val failedSilentNanos = Statistics.newSubTimer ("time spent in failed", typerNanos)
+ val failedApplyNanos = Statistics.newSubTimer (" failed apply", typerNanos)
+ val failedOpEqNanos = Statistics.newSubTimer (" failed op=", typerNanos)
+ val isReferencedNanos = Statistics.newSubTimer ("time spent ref scanning", typerNanos)
+ val visitsByType = Statistics.newByClass ("#visits by tree node", "typer")(Statistics.newCounter(""))
+ val byTypeNanos = Statistics.newByClassTimerStack("time spent by tree node", typerNanos)
+}
diff --git a/src/compiler/scala/tools/nsc/util/Statistics.scala b/src/compiler/scala/tools/nsc/util/Statistics.scala
deleted file mode 100644
index 087111a7ba..0000000000
--- a/src/compiler/scala/tools/nsc/util/Statistics.scala
+++ /dev/null
@@ -1,200 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package util
-
-class Statistics extends scala.reflect.internal.util.Statistics {
-
- var nodeByType = new ClassCounts
-
- var microsByType = new ClassCounts
- var visitsByType = new ClassCounts
- var pendingTreeTypes: List[Class[_]] = List()
- var typerTime: Long = 0L
-
- val typedApplyCount = new Counter
- val typedIdentCount = new Counter
- val typedSelectCount = new Counter
- val typerNanos = new Timer
- val classReadNanos = new Timer
-
- val failedApplyNanos = new Timer
- val failedOpEqNanos = new Timer
- val failedSilentNanos = new Timer
-
- val implicitSearchCount = new Counter
- val implicitNanos = new Timer
- val oftypeImplicitHits = new Counter
- val inscopeImplicitHits = new Counter
-
- val triedImplicits = new Counter
- val plausiblyCompatibleImplicits = new Counter
- val matchingImplicits = new Counter
- val typedImplicits = new Counter
- val foundImplicits = new Counter
-
- val inscopeSucceedNanos = new Timer
- val inscopeFailNanos = new Timer
- val oftypeSucceedNanos = new Timer
- val oftypeFailNanos = new Timer
- val implicitCacheHits = new Counter
- val implicitCacheMisses = new Counter
- val improvesCount = new Counter
- val improvesCachedCount = new Counter
- val subtypeAppInfos = new SubCounter(subtypeCount)
- val subtypeImprovCount = new SubCounter(subtypeCount)
- val subtypeETNanos = new Timer
- val matchesPtNanos = new Timer
- val isReferencedNanos = new Timer
- val ctr1 = new Counter
- val ctr2 = new Counter
- val ctr3 = new Counter
- val ctr4 = new Counter
- val counter1: SubCounter = new SubCounter(subtypeCount)
- val counter2: SubCounter = new SubCounter(subtypeCount)
- val timer1: Timer = new Timer
- val timer2: Timer = new Timer
-
- val macroExpandCount = new Counter
- val macroExpandNanos = new Timer
-
- val patmatNanos = new Timer
- val patmatAnaDPLL = new Timer
- val patmatAnaVarEq = new Timer
- val patmatCNF = new Timer
- val patmatAnaExhaust = new Timer
- val patmatAnaReach = new Timer
- val patmatCNFSizes = new collection.mutable.HashMap[Int, Int] withDefaultValue 0
-}
-
-object Statistics extends Statistics
-
-abstract class StatisticsInfo {
-
- import Statistics._
-
- val global: Global
- import global._
-
- var phasesShown = List("parser", "typer", "patmat", "erasure", "cleanup")
-
- def countNodes(tree: Tree, counts: ClassCounts) {
- for (t <- tree) counts(t.getClass) += 1
- }
-
- def showRelative(base: Long)(value: Long) =
- value+showPercent(value, base)
-
- def showRelTyper(timer: Timer) =
- timer+showPercent(timer.nanos, typerNanos.nanos)
-
- def showRelPatmat(timer: Timer) =
- timer+showPercent(timer.nanos, patmatNanos.nanos)
-
- def showCounts[T](counts: scala.collection.mutable.Map[T, Int]) =
- counts.toSeq.sortWith(_._2 > _._2).map {
- case (cls: Class[_], cnt) =>
- cls.toString.substring(cls.toString.lastIndexOf("$") + 1)+": "+cnt
- case (o, cnt) =>
- o.toString +": "+cnt
- }
-
- def print(phase: Phase) = if (phasesShown contains phase.name) {
- inform("*** Cumulative statistics at phase " + phase)
- inform("#created tree nodes : " + nodeCount)
- inform("#created tree nodes by type: "+showCounts(nodeByType))
- if (phase.name != "parser") {
- val counts = new ClassCounts
- for (u <- currentRun.units; t <- u.body) counts(t.getClass) += 1
- inform("#retained nodes : " + counts.values.sum)
- inform("#retained nodes by type : " + showCounts(counts))
- inform("#typechecked identifiers : " + typedIdentCount)
- inform("#typechecked selections : " + typedSelectCount)
- inform("#typechecked applications: " + typedApplyCount)
- inform("#raw type creations : " + rawTypeCount)
- inform(" of which in failed : " + rawTypeFailed)
- inform(" of which in implicits : " + rawTypeImpl)
- inform("#unique types : " + uniqueTypeCount)
- inform("#symbols : " + symbolCount)
- inform(" of which type symbols : " + typeSymbolCount)
- inform(" of which class symbols : " + classSymbolCount)
- inform("#base type seqs : " + baseTypeSeqCount)
- inform("avg base type seq length : " + baseTypeSeqLenTotal.value.toFloat / baseTypeSeqCount.value)
- inform("#singleton base type seqs: " + singletonBaseTypeSeqCount)
- inform("#compound base type seqs : " + compoundBaseTypeSeqCount)
- inform("#typeref base type seqs : " + typerefBaseTypeSeqCount)
- inform("#findMember ops : " + findMemberCount)
- inform(" of which in failed : " + findMemberFailed)
- inform(" of which in implicits : " + findMemberImpl)
- inform("#notfound member : " + noMemberCount)
- inform("#multiple member : " + multMemberCount)
- inform("#asSeenFrom ops : " + asSeenFromCount)
- inform("#subtype : " + subtypeCount)
- inform(" of which in failed : " + subtypeFailed)
- inform(" of which in implicits : " + subtypeImpl)
- inform(" of which in app impl : " + subtypeAppInfos)
- inform(" of which in improv : " + subtypeImprovCount)
- inform("#sametype : " + sametypeCount)
- inform("#toplevel lub : " + lubCount)
- inform("#all lub : " + nestedLubCount)
- inform("ms type-flow-analysis: " + analysis.timer.millis)
-
- if (phase.name == "typer") {
- inform("time spent typechecking : " + showRelTyper(typerNanos))
- inform("time classfilereading : " + showRelTyper(classReadNanos))
- inform("time spent in implicits : " + showRelTyper(implicitNanos))
- inform(" successful in scope : " + showRelTyper(inscopeSucceedNanos))
- inform(" failed in scope : " + showRelTyper(inscopeFailNanos))
- inform(" successful of type : " + showRelTyper(oftypeSucceedNanos))
- inform(" failed of type : " + showRelTyper(oftypeFailNanos))
- inform(" assembling parts : " + showRelTyper(subtypeETNanos))
- inform(" matchesPT : " + showRelTyper(matchesPtNanos))
- inform("implicit cache hits : " + showRelative(implicitCacheHits.value + implicitCacheMisses.value)(implicitCacheHits.value))
- inform("time spent in failed : " + showRelTyper(failedSilentNanos))
- inform(" failed apply : " + showRelTyper(failedApplyNanos))
- inform(" failed op= : " + showRelTyper(failedOpEqNanos))
- inform("time spent ref scanning : " + showRelTyper(isReferencedNanos))
- inform("time spent in lubs : " + showRelTyper(lubNanos))
- inform("micros by tree node : " + showCounts(microsByType))
- inform("#visits by tree node : " + showCounts(visitsByType))
- val average = new ClassCounts
- for (c <- microsByType.keysIterator) average(c) = microsByType(c)/visitsByType(c)
- inform("avg micros by tree node : " + showCounts(average))
- inform("time spent in <:< : " + showRelTyper(subtypeNanos))
- inform("time spent in findmember : " + showRelTyper(findMemberNanos))
- inform("time spent in asSeenFrom : " + showRelTyper(asSeenFromNanos))
- inform("#implicit searches : " + implicitSearchCount)
- inform("#tried, plausible, matching, typed, found implicits: "+triedImplicits+", "+plausiblyCompatibleImplicits+", "+matchingImplicits+", "+typedImplicits+", "+foundImplicits)
- inform("#implicit improves tests : " + improvesCount)
- inform("#implicit improves cached : " + improvesCachedCount)
- inform("#implicit inscope hits : " + inscopeImplicitHits)
- inform("#implicit oftype hits : " + oftypeImplicitHits)
- inform("#macro expansions : " + macroExpandCount)
- inform("#time spent in macroExpand : " + showRelTyper(macroExpandNanos))
- }
-
- if (ctr1 != null) inform("#ctr1 : " + ctr1)
- if (ctr2 != null) inform("#ctr2 : " + ctr2)
- if (ctr3 != null) inform("#ctr3 : " + ctr3)
- if (ctr4 != null) inform("#ctr4 : " + ctr4)
- if (counter1 != null) inform("#counter1 : " + counter1)
- if (counter2 != null) inform("#counter2 : " + counter2)
- if (timer1 != null) inform("#timer1 : " + timer1)
- if (timer2 != null) inform("#timer2 : " + timer2)
- //for (t <- uniques.iterator) println("unique: "+t)
-
- if (phase.name == "patmat") {
- inform("time spent in patmat : " + patmatNanos )
- inform(" of which DPLL : " + showRelPatmat(patmatAnaDPLL ))
- inform("of which in CNF conversion : " + showRelPatmat(patmatCNF ))
- inform(" CNF size counts : " + showCounts(patmatCNFSizes ))
- inform("of which variable equality : " + showRelPatmat(patmatAnaVarEq ))
- inform(" of which in exhaustivity : " + showRelPatmat(patmatAnaExhaust))
- inform("of which in unreachability : " + showRelPatmat(patmatAnaReach ))
- }
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/util/StatisticsInfo.scala b/src/compiler/scala/tools/nsc/util/StatisticsInfo.scala
new file mode 100644
index 0000000000..f6a1ae1414
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/util/StatisticsInfo.scala
@@ -0,0 +1,38 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2011 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools.nsc
+package util
+
+import reflect.internal.util.Statistics
+
+abstract class StatisticsInfo {
+
+ val global: Global
+ import global._
+ import reflect.internal.TreesStats.nodeByType
+
+ val phasesShown = List("parser", "typer", "patmat", "erasure", "cleanup")
+
+ val retainedCount = Statistics.newCounter("#retained tree nodes")
+ val retainedByType = Statistics.newByClass("#retained tree nodes by type")(Statistics.newCounter(""))
+
+ def print(phase: Phase) = if (phasesShown contains phase.name) {
+ inform("*** Cumulative statistics at phase " + phase)
+ retainedCount.value = 0
+ for (c <- retainedByType.keys)
+ retainedByType(c).value = 0
+ for (u <- currentRun.units; t <- u.body) {
+ retainedCount.value += 1
+ retainedByType(t.getClass).value += 1
+ }
+
+ val quants =
+ if (phase.name == "parser") Seq(treeNodeCount, nodeByType, retainedCount, retainedByType)
+ else Statistics.allQuantities
+
+ for (q <- quants if q.showAt(phase.name)) inform(q.line)
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
index 7cf515425d..278f4e3ff7 100644
--- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
+++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
@@ -126,7 +126,10 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
wrapper(currentTyper.silent(_.typed(expr, analyzer.EXPRmode, pt)) match {
case analyzer.SilentResultValue(result) =>
trace("success: ")(showAttributed(result, true, true, settings.Yshowsymkinds.value))
- var Block(dummies, unwrapped) = result
+ var (dummies, unwrapped) = result match {
+ case Block(dummies, unwrapped) => (dummies, unwrapped)
+ case unwrapped => (Nil, unwrapped)
+ }
var invertedIndex = freeTerms map (_.swap)
// todo. also fixup singleton types
unwrapped = new Transformer {
diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
index 862b19d0a4..464ffc6fab 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
@@ -4,6 +4,7 @@ package scala.tools.selectivecps
import scala.tools.nsc.Global
import scala.tools.nsc.typechecker.Modes
+import scala.tools.nsc.MissingRequirementError
abstract class CPSAnnotationChecker extends CPSUtils with Modes {
val global: Global
@@ -170,6 +171,9 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
vprintln("yes we can!! (byval)")
return true
}
+ } else if ((mode & global.analyzer.RETmode) != 0) {
+ vprintln("yes we can!! (return)")
+ return true
}
}
false
@@ -183,6 +187,7 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
val patMode = (mode & global.analyzer.PATTERNmode) != 0
val exprMode = (mode & global.analyzer.EXPRmode) != 0
val byValMode = (mode & global.analyzer.BYVALmode) != 0
+ val retMode = (mode & global.analyzer.RETmode) != 0
val annotsTree = cpsParamAnnotation(tree.tpe)
val annotsExpected = cpsParamAnnotation(pt)
@@ -209,6 +214,12 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
val res = tree modifyType addMinusMarker
vprintln("adapted annotations (by val) of " + tree + " to " + res.tpe)
res
+ } else if (retMode && !hasPlusMarker(tree.tpe) && annotsTree.isEmpty && annotsExpected.nonEmpty) {
+ // add a marker annotation that will make tree.tpe behave as pt, subtyping wise
+ // tree will look like having no annotation
+ val res = tree modifyType (_ withAnnotations List(newPlusMarker()))
+ vprintln("adapted annotations (return) of " + tree + " to " + res.tpe)
+ res
} else tree
}
@@ -356,8 +367,9 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
* for a tree. All this should do is add annotations. */
override def addAnnotations(tree: Tree, tpe: Type): Type = {
+ import scala.util.control._
if (!cpsEnabled) {
- if (hasCpsParamTypes(tpe))
+ if (Exception.failAsValue(classOf[MissingRequirementError])(false)(hasCpsParamTypes(tpe)))
global.reporter.error(tree.pos, "this code must be compiled with the Scala continuations plugin enabled")
return tpe
}
@@ -464,6 +476,11 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
}
tpe
+ case ret @ Return(expr) =>
+ if (hasPlusMarker(expr.tpe))
+ ret setType expr.tpe
+ ret.tpe
+
case _ =>
tpe
}
diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
index 3a1dc87a6a..765cde5a81 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
@@ -3,6 +3,7 @@
package scala.tools.selectivecps
import scala.tools.nsc.Global
+import scala.collection.mutable.ListBuffer
trait CPSUtils {
val global: Global
@@ -135,4 +136,43 @@ trait CPSUtils {
case _ => None
}
}
+
+ def isTailReturn(retExpr: Tree, body: Tree): Boolean = {
+ val removed = ListBuffer[Tree]()
+ removeTailReturn(body, removed)
+ removed contains retExpr
+ }
+
+ def removeTailReturn(tree: Tree, removed: ListBuffer[Tree]): Tree = tree match {
+ case Block(stms, r @ Return(expr)) =>
+ removed += r
+ treeCopy.Block(tree, stms, expr)
+
+ case Block(stms, expr) =>
+ treeCopy.Block(tree, stms, removeTailReturn(expr, removed))
+
+ case If(cond, r1 @ Return(thenExpr), r2 @ Return(elseExpr)) =>
+ removed ++= Seq(r1, r2)
+ treeCopy.If(tree, cond, removeTailReturn(thenExpr, removed), removeTailReturn(elseExpr, removed))
+
+ case If(cond, thenExpr, elseExpr) =>
+ treeCopy.If(tree, cond, removeTailReturn(thenExpr, removed), removeTailReturn(elseExpr, removed))
+
+ case Try(block, catches, finalizer) =>
+ treeCopy.Try(tree,
+ removeTailReturn(block, removed),
+ (catches map (t => removeTailReturn(t, removed))).asInstanceOf[List[CaseDef]],
+ removeTailReturn(finalizer, removed))
+
+ case CaseDef(pat, guard, r @ Return(expr)) =>
+ removed += r
+ treeCopy.CaseDef(tree, pat, guard, expr)
+
+ case CaseDef(pat, guard, body) =>
+ treeCopy.CaseDef(tree, pat, guard, removeTailReturn(body, removed))
+
+ case _ =>
+ tree
+ }
+
}
diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala
index 017c8d24fd..fe465aad0d 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala
@@ -9,6 +9,8 @@ import scala.tools.nsc.plugins._
import scala.tools.nsc.ast._
+import scala.collection.mutable.ListBuffer
+
/**
* In methods marked @cps, explicitly name results of calls to other @cps methods
*/
@@ -46,10 +48,20 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
// this would cause infinite recursion. But we could remove the
// ValDef case here.
- case dd @ DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
+ case dd @ DefDef(mods, name, tparams, vparamss, tpt, rhs0) =>
debuglog("transforming " + dd.symbol)
atOwner(dd.symbol) {
+ val tailReturns = ListBuffer[Tree]()
+ val rhs = removeTailReturn(rhs0, tailReturns)
+ // throw an error if there is a Return tree which is not in tail position
+ rhs0 foreach {
+ case r @ Return(_) =>
+ if (!tailReturns.contains(r))
+ unit.error(r.pos, "return expressions in CPS code must be in tail position")
+ case _ => /* do nothing */
+ }
+
val rhs1 = transExpr(rhs, None, getExternalAnswerTypeAnn(tpt.tpe))
debuglog("result "+rhs1)
@@ -153,7 +165,6 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
}
}
-
def transExpr(tree: Tree, cpsA: CPSInfo, cpsR: CPSInfo): Tree = {
transTailValue(tree, cpsA, cpsR) match {
case (Nil, b) => b
diff --git a/src/library/scala/Predef.scala b/src/library/scala/Predef.scala
index 65e8549ae1..99bd7f0736 100644
--- a/src/library/scala/Predef.scala
+++ b/src/library/scala/Predef.scala
@@ -121,16 +121,16 @@ object Predef extends LowPriorityImplicits {
// Deprecated
- @deprecated("Use sys.error(message) instead", "2.9.0")
+ @deprecated("Use `sys.error(message)` instead", "2.9.0")
def error(message: String): Nothing = sys.error(message)
- @deprecated("Use sys.exit() instead", "2.9.0")
+ @deprecated("Use `sys.exit()` instead", "2.9.0")
def exit(): Nothing = sys.exit()
- @deprecated("Use sys.exit(status) instead", "2.9.0")
+ @deprecated("Use `sys.exit(status)` instead", "2.9.0")
def exit(status: Int): Nothing = sys.exit(status)
- @deprecated("Use formatString.format(args: _*) or arg.formatted(formatString) instead", "2.9.0")
+ @deprecated("Use `formatString.format(args: _*)` or `arg.formatted(formatString)` instead", "2.9.0")
def format(text: String, xs: Any*) = augmentString(text).format(xs: _*)
// errors and asserts -------------------------------------------------
@@ -219,7 +219,7 @@ object Predef extends LowPriorityImplicits {
final class Ensuring[A](val __resultOfEnsuring: A) extends AnyVal {
// `__resultOfEnsuring` must be a public val to allow inlining.
// See comments in ArrowAssoc for more.
- @deprecated("Use __resultOfEnsuring instead", "2.10.0")
+ @deprecated("Use `__resultOfEnsuring` instead", "2.10.0")
def x = __resultOfEnsuring
def ensuring(cond: Boolean): A = { assert(cond); __resultOfEnsuring }
@@ -255,7 +255,7 @@ object Predef extends LowPriorityImplicits {
// being confused why they get an ambiguous implicit conversion
// error. (`foo.x` used to produce this error since both
// any2Ensuring and any2ArrowAssoc pimped an `x` onto everything)
- @deprecated("Use __leftOfArrow instead", "2.10.0")
+ @deprecated("Use `__leftOfArrow` instead", "2.10.0")
def x = __leftOfArrow
@inline def -> [B](y: B): Tuple2[A, B] = Tuple2(__leftOfArrow, y)
@@ -320,30 +320,30 @@ object Predef extends LowPriorityImplicits {
// Primitive Widenings --------------------------------------------------------------
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def byte2short(x: Byte): Short = x.toShort
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def byte2int(x: Byte): Int = x.toInt
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def byte2long(x: Byte): Long = x.toLong
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def byte2float(x: Byte): Float = x.toFloat
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def byte2double(x: Byte): Double = x.toDouble
+ @deprecated("Use `.toShort` for explicit conversion and `Byte.byte2short` for implicit conversion", "2.10.0") def byte2short(x: Byte): Short = x.toShort
+ @deprecated("Use `.toInt` for explicit conversion and `Byte.byte2int` for implicit conversion", "2.10.0") def byte2int(x: Byte): Int = x.toInt
+ @deprecated("Use `.toLong` for explicit conversion and `Byte.byte2long for implicit conversion", "2.10.0") def byte2long(x: Byte): Long = x.toLong
+ @deprecated("Use `.toFloat` for explicit conversion and `Byte.byte2float` for implicit conversion", "2.10.0") def byte2float(x: Byte): Float = x.toFloat
+ @deprecated("Use `.toDouble` for explicit conversion and `Byte.byte2double` for implicit conversion", "2.10.0") def byte2double(x: Byte): Double = x.toDouble
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def short2int(x: Short): Int = x.toInt
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def short2long(x: Short): Long = x.toLong
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def short2float(x: Short): Float = x.toFloat
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def short2double(x: Short): Double = x.toDouble
+ @deprecated("Use `.toInt` for explicit conversion and `Short.short2int` for implicit conversion", "2.10.0") def short2int(x: Short): Int = x.toInt
+ @deprecated("Use `.toLong` for explicit conversion and `Short.short2long` for implicit conversion", "2.10.0") def short2long(x: Short): Long = x.toLong
+ @deprecated("Use `.toFloat` for explicit conversion and `Short.short2float` for implicit conversion", "2.10.0") def short2float(x: Short): Float = x.toFloat
+ @deprecated("Use `.toDouble` for explicit conversion and `Short.short2double` for implicit conversion", "2.10.0") def short2double(x: Short): Double = x.toDouble
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def char2int(x: Char): Int = x.toInt
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def char2long(x: Char): Long = x.toLong
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def char2float(x: Char): Float = x.toFloat
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def char2double(x: Char): Double = x.toDouble
+ @deprecated("Use `.toInt` for explicit conversion and `Char.char2int` for implicit conversion", "2.10.0") def char2int(x: Char): Int = x.toInt
+ @deprecated("Use `.toLong` for explicit conversion and `Char.char2long` for implicit conversion", "2.10.0") def char2long(x: Char): Long = x.toLong
+ @deprecated("Use `.toFloat` for explicit conversion and `Char.char2float` for implicit conversion", "2.10.0") def char2float(x: Char): Float = x.toFloat
+ @deprecated("Use `.toDouble` for explicit conversion and `Char.char2double` for implicit conversion", "2.10.0") def char2double(x: Char): Double = x.toDouble
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def int2long(x: Int): Long = x.toLong
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def int2float(x: Int): Float = x.toFloat
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def int2double(x: Int): Double = x.toDouble
+ @deprecated("Use `.toLong` for explicit conversion and `Int.int2long` for implicit conversion", "2.10.0") def int2long(x: Int): Long = x.toLong
+ @deprecated("Use `.toFloat` for explicit conversion and `Int.int2float` for implicit conversion", "2.10.0") def int2float(x: Int): Float = x.toFloat
+ @deprecated("Use `.toDouble` for explicit conversion and `Int.int2double` for implicit conversion", "2.10.0") def int2double(x: Int): Double = x.toDouble
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def long2float(x: Long): Float = x.toFloat
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def long2double(x: Long): Double = x.toDouble
+ @deprecated("Use `.toFloat` for explicit conversion and `Long.long2float` for implicit conversion", "2.10.0") def long2float(x: Long): Float = x.toFloat
+ @deprecated("Use `.toDouble` for explicit conversion and `Long.long2double` for implicit conversion", "2.10.0") def long2double(x: Long): Double = x.toDouble
- @deprecated("Use a method in an AnyVal's companion object", "2.10.0") def float2double(x: Float): Double = x.toDouble
+ @deprecated("Use `.toDouble` for explicit conversion and `Float.float2double` for implicit conversion", "2.10.0") def float2double(x: Float): Double = x.toDouble
// "Autoboxing" and "Autounboxing" ---------------------------------------------------
@@ -385,7 +385,7 @@ object Predef extends LowPriorityImplicits {
implicit def any2stringadd(x: Any) = new runtime.StringAdd(x)
implicit def unaugmentString(x: StringOps): String = x.repr
- @deprecated("Use StringCanBuildFrom", "2.10.0")
+ @deprecated("Use `StringCanBuildFrom`", "2.10.0")
def stringCanBuildFrom: CanBuildFrom[String, Char, String] = StringCanBuildFrom
implicit val StringCanBuildFrom: CanBuildFrom[String, Char, String] = new CanBuildFrom[String, Char, String] {
diff --git a/src/library/scala/collection/GenTraversableOnce.scala b/src/library/scala/collection/GenTraversableOnce.scala
index eadacd9209..e475865391 100644
--- a/src/library/scala/collection/GenTraversableOnce.scala
+++ b/src/library/scala/collection/GenTraversableOnce.scala
@@ -9,6 +9,8 @@
package scala.collection
import scala.reflect.ClassTag
+import scala.collection.generic.CanBuildFrom
+import scala.annotation.unchecked.{ uncheckedVariance => uV }
/** A template trait for all traversable-once objects which may be
* traversed in parallel.
@@ -552,4 +554,21 @@ trait GenTraversableOnce[+A] extends Any {
* containing all key/value pairs of type `(T, U)` of this $coll.
*/
def toMap[K, V](implicit ev: A <:< (K, V)): GenMap[K, V]
+
+ /** Converts this $coll to a Vector.
+ * $willNotTerminateInf
+ * @return a vector containing all elements of this $coll.
+ */
+ def toVector: Vector[A]
+
+ /** Converts this $coll into another by copying all elements.
+ * @tparam Col The collection type to build.
+ * @return a new collection containing all elements of this $coll.
+ *
+ * @usecase def convertTo[Col[_]]: Col[A]
+ * @inheritdoc
+ * $willNotTerminateInf
+ * @return a new collection containing all elements of this $coll.
+ */
+ def convertTo[Col[_]](implicit cbf: CanBuildFrom[Nothing, A, Col[A @uV]]): Col[A @uV]
}
diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala
index b2bbc8d888..5f369de3b7 100644
--- a/src/library/scala/collection/Iterator.scala
+++ b/src/library/scala/collection/Iterator.scala
@@ -11,6 +11,8 @@ package scala.collection
import mutable.ArrayBuffer
import annotation.migration
import immutable.Stream
+import scala.collection.generic.CanBuildFrom
+import scala.annotation.unchecked.{ uncheckedVariance => uV }
/** The `Iterator` object provides various functions for creating specialized iterators.
*
@@ -1138,6 +1140,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
def toStream: Stream[A] =
if (self.hasNext) Stream.cons(self.next, self.toStream)
else Stream.empty[A]
+
/** Converts this iterator to a string.
*
diff --git a/src/library/scala/collection/MapLike.scala b/src/library/scala/collection/MapLike.scala
index b9b8f62574..75f9ff93db 100644
--- a/src/library/scala/collection/MapLike.scala
+++ b/src/library/scala/collection/MapLike.scala
@@ -227,30 +227,34 @@ self =>
def default(key: A): B =
throw new NoSuchElementException("key not found: " + key)
- /** Filters this map by retaining only keys satisfying a predicate.
- * @param p the predicate used to test keys
- * @return an immutable map consisting only of those key value pairs of this map where the key satisfies
- * the predicate `p`. The resulting map wraps the original map without copying any elements.
- */
- def filterKeys(p: A => Boolean): Map[A, B] = new AbstractMap[A, B] with DefaultMap[A, B] {
+ protected class FilteredKeys(p: A => Boolean) extends AbstractMap[A, B] with DefaultMap[A, B] {
override def foreach[C](f: ((A, B)) => C): Unit = for (kv <- self) if (p(kv._1)) f(kv)
def iterator = self.iterator.filter(kv => p(kv._1))
override def contains(key: A) = self.contains(key) && p(key)
def get(key: A) = if (!p(key)) None else self.get(key)
}
-
- /** Transforms this map by applying a function to every retrieved value.
- * @param f the function used to transform values of this map.
- * @return a map view which maps every key of this map
- * to `f(this(key))`. The resulting map wraps the original map without copying any elements.
+
+ /** Filters this map by retaining only keys satisfying a predicate.
+ * @param p the predicate used to test keys
+ * @return an immutable map consisting only of those key value pairs of this map where the key satisfies
+ * the predicate `p`. The resulting map wraps the original map without copying any elements.
*/
- def mapValues[C](f: B => C): Map[A, C] = new AbstractMap[A, C] with DefaultMap[A, C] {
+ def filterKeys(p: A => Boolean): Map[A, B] = new FilteredKeys(p)
+
+ protected class MappedValues[C](f: B => C) extends AbstractMap[A, C] with DefaultMap[A, C] {
override def foreach[D](g: ((A, C)) => D): Unit = for ((k, v) <- self) g((k, f(v)))
def iterator = for ((k, v) <- self.iterator) yield (k, f(v))
override def size = self.size
override def contains(key: A) = self.contains(key)
def get(key: A) = self.get(key).map(f)
}
+
+ /** Transforms this map by applying a function to every retrieved value.
+ * @param f the function used to transform values of this map.
+ * @return a map view which maps every key of this map
+ * to `f(this(key))`. The resulting map wraps the original map without copying any elements.
+ */
+ def mapValues[C](f: B => C): Map[A, C] = new MappedValues(f)
// The following 5 operations (updated, two times +, two times ++) should really be
// generic, returning This[B]. We need better covariance support to express that though.
diff --git a/src/library/scala/collection/TraversableLike.scala b/src/library/scala/collection/TraversableLike.scala
index 3716a318d9..e5861f5760 100644
--- a/src/library/scala/collection/TraversableLike.scala
+++ b/src/library/scala/collection/TraversableLike.scala
@@ -616,6 +616,13 @@ trait TraversableLike[+A, +Repr] extends Any
def toTraversable: Traversable[A] = thisCollection
def toIterator: Iterator[A] = toStream.iterator
def toStream: Stream[A] = toBuffer.toStream
+ // Override to provide size hint.
+ override def convertTo[Col[_]](implicit cbf: CanBuildFrom[Nothing, A, Col[A @uV]]): Col[A @uV] = {
+ val b = cbf()
+ b.sizeHint(this)
+ b ++= thisCollection
+ b.result
+ }
/** Converts this $coll to a string.
*
diff --git a/src/library/scala/collection/TraversableOnce.scala b/src/library/scala/collection/TraversableOnce.scala
index 386ce2d95a..8dc6184d88 100644
--- a/src/library/scala/collection/TraversableOnce.scala
+++ b/src/library/scala/collection/TraversableOnce.scala
@@ -9,6 +9,7 @@
package scala.collection
import mutable.{ Buffer, Builder, ListBuffer, ArrayBuffer }
+import generic.CanBuildFrom
import annotation.unchecked.{ uncheckedVariance => uV }
import language.{implicitConversions, higherKinds}
import reflect.ClassTag
@@ -239,17 +240,25 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
def toTraversable: Traversable[A]
- def toList: List[A] = (new ListBuffer[A] ++= seq).toList
+ def toList: List[A] = convertTo[List]
def toIterable: Iterable[A] = toStream
def toSeq: Seq[A] = toStream
- def toIndexedSeq: immutable.IndexedSeq[A] = immutable.IndexedSeq() ++ seq
+ def toIndexedSeq: immutable.IndexedSeq[A] = convertTo[immutable.IndexedSeq]
- def toBuffer[B >: A]: mutable.Buffer[B] = new ArrayBuffer[B] ++= seq
+ def toBuffer[B >: A]: mutable.Buffer[B] = convertTo[ArrayBuffer].asInstanceOf[mutable.Buffer[B]]
- def toSet[B >: A]: immutable.Set[B] = immutable.Set() ++ seq
+ def toSet[B >: A]: immutable.Set[B] = convertTo[immutable.Set].asInstanceOf[immutable.Set[B]]
+
+ def toVector: Vector[A] = convertTo[Vector]
+
+ def convertTo[Col[_]](implicit cbf: CanBuildFrom[Nothing, A, Col[A @uV]]): Col[A @uV] = {
+ val b = cbf()
+ b ++= seq
+ b.result
+ }
def toMap[T, U](implicit ev: A <:< (T, U)): immutable.Map[T, U] = {
val b = immutable.Map.newBuilder[T, U]
diff --git a/src/library/scala/collection/concurrent/TrieMap.scala b/src/library/scala/collection/concurrent/TrieMap.scala
index 08e9125bd8..2d8217551a 100644
--- a/src/library/scala/collection/concurrent/TrieMap.scala
+++ b/src/library/scala/collection/concurrent/TrieMap.scala
@@ -473,7 +473,11 @@ extends CNodeBase[K, V] {
private def computeSize(ct: TrieMap[K, V]): Int = {
var i = 0
var sz = 0
- val offset = math.abs(util.Random.nextInt()) % array.length
+ val offset =
+ if (array.length > 0)
+ //util.Random.nextInt(array.length) /* <-- benchmarks show that this causes observable contention */
+ scala.concurrent.forkjoin.ThreadLocalRandom.current.nextInt(0, array.length)
+ else 0
while (i < array.length) {
val pos = (i + offset) % array.length
array(pos) match {
diff --git a/src/library/scala/collection/immutable/Vector.scala b/src/library/scala/collection/immutable/Vector.scala
index 1395a8f52d..d100bf93df 100644
--- a/src/library/scala/collection/immutable/Vector.scala
+++ b/src/library/scala/collection/immutable/Vector.scala
@@ -77,6 +77,8 @@ override def companion: GenericCompanion[Vector] = Vector
override def par = new ParVector(this)
+ override def toVector: Vector[A] = this
+
override def lengthCompare(len: Int): Int = length - len
private[collection] final def initIterator[B >: A](s: VectorIterator[B]) {
diff --git a/src/library/scala/collection/mutable/ArrayOps.scala b/src/library/scala/collection/mutable/ArrayOps.scala
index 01636eb54e..7a595f211d 100644
--- a/src/library/scala/collection/mutable/ArrayOps.scala
+++ b/src/library/scala/collection/mutable/ArrayOps.scala
@@ -64,7 +64,7 @@ abstract class ArrayOps[T] extends ArrayLike[T, Array[T]] with CustomParalleliza
* @param asTrav A function that converts elements of this array to rows - arrays of type `U`.
* @return An array obtained by concatenating rows of this array.
*/
- def flatten[U, To](implicit asTrav: T => collection.Traversable[U], m: ClassTag[U]): Array[U] = {
+ def flatten[U](implicit asTrav: T => collection.Traversable[U], m: ClassTag[U]): Array[U] = {
val b = Array.newBuilder[U]
b.sizeHint(map{case is: collection.IndexedSeq[_] => is.size case _ => 0}.sum)
for (xs <- this)
diff --git a/src/library/scala/collection/mutable/LinkedHashMap.scala b/src/library/scala/collection/mutable/LinkedHashMap.scala
index 4150cf9eba..5643e070f8 100644
--- a/src/library/scala/collection/mutable/LinkedHashMap.scala
+++ b/src/library/scala/collection/mutable/LinkedHashMap.scala
@@ -49,7 +49,8 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B]
with Map[A, B]
with MapLike[A, B, LinkedHashMap[A, B]]
with HashTable[A, LinkedEntry[A, B]]
- with Serializable {
+ with Serializable
+{
override def empty = LinkedHashMap.empty[A, B]
override def size = tableSize
@@ -107,7 +108,25 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B]
if (hasNext) { val res = (cur.key, cur.value); cur = cur.later; res }
else Iterator.empty.next
}
+
+ protected class FilteredKeys(p: A => Boolean) extends super.FilteredKeys(p) {
+ override def empty = LinkedHashMap.empty
+ }
+
+ override def filterKeys(p: A => Boolean): scala.collection.Map[A, B] = new FilteredKeys(p)
+ protected class MappedValues[C](f: B => C) extends super.MappedValues[C](f) {
+ override def empty = LinkedHashMap.empty
+ }
+
+ override def mapValues[C](f: B => C): scala.collection.Map[A, C] = new MappedValues(f)
+
+ protected class DefaultKeySet extends super.DefaultKeySet {
+ override def empty = LinkedHashSet.empty
+ }
+
+ override def keySet: scala.collection.Set[A] = new DefaultKeySet
+
override def keysIterator: Iterator[A] = new AbstractIterator[A] {
private var cur = firstEntry
def hasNext = cur ne null
diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala
index a447f1b5e4..a7ec833193 100644
--- a/src/library/scala/collection/parallel/ParIterableLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableLike.scala
@@ -851,6 +851,12 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def toMap[K, V](implicit ev: T <:< (K, V)): immutable.ParMap[K, V] = toParMap[K, V, immutable.ParMap[K, V]](() => immutable.ParMap.newCombiner[K, V])
+ // TODO(@alex22): make these better
+ override def toVector: Vector[T] = seq.toVector
+
+ override def convertTo[Col[_]](implicit cbf: CanBuildFrom[Nothing, T, Col[T @uncheckedVariance]]): Col[T @uncheckedVariance] = seq.convertTo[Col]
+
+
/* tasks */
protected trait StrictSplitterCheckTask[R, Tp] extends Task[R, Tp] {
diff --git a/src/library/scala/collection/parallel/immutable/ParVector.scala b/src/library/scala/collection/parallel/immutable/ParVector.scala
index 1ece663a1d..e4099f1809 100644
--- a/src/library/scala/collection/parallel/immutable/ParVector.scala
+++ b/src/library/scala/collection/parallel/immutable/ParVector.scala
@@ -62,6 +62,8 @@ extends ParSeq[T]
override def seq: Vector[T] = vector
+ override def toVector: Vector[T] = vector
+
class ParVectorIterator(_start: Int, _end: Int) extends VectorIterator[T](_start, _end) with SeqSplitter[T] {
def remaining: Int = remainingElementCount
def dup: SeqSplitter[T] = (new ParVector(remainingVector)).splitter
diff --git a/src/library/scala/concurrent/impl/Future.scala b/src/library/scala/concurrent/impl/Future.scala
index 47534e398b..6a3487adde 100644
--- a/src/library/scala/concurrent/impl/Future.scala
+++ b/src/library/scala/concurrent/impl/Future.scala
@@ -44,13 +44,13 @@ private[concurrent] object Future {
}
def boxedType(c: Class[_]): Class[_] = if (c.isPrimitive) toBoxed(c) else c
-
- def apply[T](body: =>T)(implicit executor: ExecutionContext): Future[T] = {
+
+ private[impl] class PromiseCompletingTask[T](override val executor: ExecutionContext, body: => T)
+ extends Task {
val promise = new Promise.DefaultPromise[T]()
- //TODO: use `dispatchFuture`?
- executor.execute(new Runnable {
- def run = promise complete {
+ protected override def task() = {
+ promise complete {
try Right(body) catch {
case NonFatal(e) =>
// Commenting out reporting for now, since it produces too much output in the tests
@@ -58,9 +58,14 @@ private[concurrent] object Future {
Left(e)
}
}
- })
-
- promise.future
+ }
+ }
+
+ def apply[T](body: =>T)(implicit executor: ExecutionContext): Future[T] = {
+ val task = new PromiseCompletingTask(executor, body)
+ task.dispatch()
+
+ task.promise.future
}
private[impl] val throwableId: Throwable => Throwable = identity _
@@ -70,38 +75,77 @@ private[concurrent] object Future {
// so that it can be stolen from
// OR: a push to the local task queue should be so cheap that this is
// not even needed, but stealing is still possible
- private val _taskStack = new ThreadLocal[Stack[() => Unit]]()
+
+ private[impl] case class TaskStack(stack: Stack[Task], executor: ExecutionContext)
+
+ private val _taskStack = new ThreadLocal[TaskStack]()
+
+ private[impl] trait Task extends Runnable {
+ def executor: ExecutionContext
+
+ // run the original callback (no dispatch)
+ protected def task(): Unit
+
+ // we implement Runnable to avoid creating
+ // an extra object. run() runs ourselves with
+ // a TaskStack pushed, and then runs any
+ // other tasks that show up in the stack.
+ final override def run() = {
+ try {
+ val taskStack = TaskStack(Stack[Task](this), executor)
+ _taskStack set taskStack
+ while (taskStack.stack.nonEmpty) {
+ val next = taskStack.stack.pop()
+ require(next.executor eq executor)
+ try next.task() catch { case NonFatal(e) => executor reportFailure e }
+ }
+ } finally {
+ _taskStack.remove()
+ }
+ }
+
+ // send the task to the running executor.execute() via
+ // _taskStack, or start a new executor.execute()
+ def dispatch(force: Boolean = false): Unit =
+ _taskStack.get match {
+ case stack if (stack ne null) && (executor eq stack.executor) && !force => stack.stack push this
+ case _ => executor.execute(this)
+ }
+ }
+
+ private[impl] class ReleaseTask(override val executor: ExecutionContext, val elems: List[Task])
+ extends Task {
+ protected override def task() = {
+ _taskStack.get.stack.elems = elems
+ }
+ }
private[impl] def releaseStack(executor: ExecutionContext): Unit =
_taskStack.get match {
- case stack if (stack ne null) && stack.nonEmpty =>
- val tasks = stack.elems
- stack.clear()
+ case stack if (stack ne null) && stack.stack.nonEmpty =>
+ val tasks = stack.stack.elems
+ stack.stack.clear()
_taskStack.remove()
- dispatchFuture(executor, () => _taskStack.get.elems = tasks, true)
+ val release = new ReleaseTask(executor, tasks)
+ release.dispatch(force=true)
case null =>
// do nothing - there is no local batching stack anymore
case _ =>
_taskStack.remove()
}
- private[impl] def dispatchFuture(executor: ExecutionContext, task: () => Unit, force: Boolean = false): Unit =
- _taskStack.get match {
- case stack if (stack ne null) && !force => stack push task // FIXME we can't mix tasks aimed for different ExecutionContexts see: https://github.com/akka/akka/blob/v2.0.1/akka-actor/src/main/scala/akka/dispatch/Future.scala#L373
- case _ => executor.execute(new Runnable {
- def run() {
- try {
- val taskStack = Stack[() => Unit](task)
- _taskStack set taskStack
- while (taskStack.nonEmpty) {
- val next = taskStack.pop()
- try next() catch { case NonFatal(e) => executor reportFailure e }
- }
- } finally {
- _taskStack.remove()
- }
- }
- })
+ private[impl] class OnCompleteTask[T](override val executor: ExecutionContext, val onComplete: (Either[Throwable, T]) => Any)
+ extends Task {
+ private var value: Either[Throwable, T] = null
+
+ protected override def task() = {
+ require(value ne null) // dispatch(value) must be called before dispatch()
+ onComplete(value)
}
-
+
+ def dispatch(value: Either[Throwable, T]): Unit = {
+ this.value = value
+ dispatch()
+ }
+ }
}
diff --git a/src/library/scala/concurrent/impl/Promise.scala b/src/library/scala/concurrent/impl/Promise.scala
index 1d573ef818..c5060a2368 100644
--- a/src/library/scala/concurrent/impl/Promise.scala
+++ b/src/library/scala/concurrent/impl/Promise.scala
@@ -94,10 +94,10 @@ object Promise {
val resolved = resolveEither(value)
(try {
@tailrec
- def tryComplete(v: Either[Throwable, T]): List[Either[Throwable, T] => Unit] = {
+ def tryComplete(v: Either[Throwable, T]): List[Future.OnCompleteTask[T]] = {
getState match {
case raw: List[_] =>
- val cur = raw.asInstanceOf[List[Either[Throwable, T] => Unit]]
+ val cur = raw.asInstanceOf[List[Future.OnCompleteTask[T]]]
if (updateState(cur, v)) cur else tryComplete(v)
case _ => null
}
@@ -108,32 +108,21 @@ object Promise {
}) match {
case null => false
case cs if cs.isEmpty => true
- // this assumes that f(resolved) will go via dispatchFuture
- // and notifyCompleted (see onComplete below)
- case cs => cs.foreach(f => f(resolved)); true
+ case cs => cs.foreach(c => c.dispatch(resolved)); true
}
}
def onComplete[U](func: Either[Throwable, T] => U)(implicit executor: ExecutionContext): Unit = {
- val bound: Either[Throwable, T] => Unit = (either: Either[Throwable, T]) =>
- Future.dispatchFuture(executor, () => notifyCompleted(func, either))
+ val bound = new Future.OnCompleteTask[T](executor, func)
@tailrec //Tries to add the callback, if already completed, it dispatches the callback to be executed
def dispatchOrAddCallback(): Unit =
getState match {
- case r: Either[_, _] => bound(r.asInstanceOf[Either[Throwable, T]])
+ case r: Either[_, _] => bound.dispatch(r.asInstanceOf[Either[Throwable, T]])
case listeners: List[_] => if (updateState(listeners, bound :: listeners)) () else dispatchOrAddCallback()
}
dispatchOrAddCallback()
}
-
- private final def notifyCompleted(func: Either[Throwable, T] => Any, result: Either[Throwable, T])(implicit executor: ExecutionContext) {
- try {
- func(result)
- } catch {
- case NonFatal(e) => executor reportFailure e
- }
- }
}
/** An already completed Future is given its result at creation.
@@ -149,8 +138,8 @@ object Promise {
def tryComplete(value: Either[Throwable, T]): Boolean = false
def onComplete[U](func: Either[Throwable, T] => U)(implicit executor: ExecutionContext): Unit = {
- val completedAs = value.get // Avoid closing over "this"
- Future.dispatchFuture(executor, () => func(completedAs))
+ val completedAs = value.get
+ (new Future.OnCompleteTask(executor, func)).dispatch(completedAs)
}
def ready(atMost: Duration)(implicit permit: CanAwait): this.type = this
diff --git a/src/library/scala/reflect/base/Base.scala b/src/library/scala/reflect/base/Base.scala
index 461eaa2e9e..490a9e8c03 100644
--- a/src/library/scala/reflect/base/Base.scala
+++ b/src/library/scala/reflect/base/Base.scala
@@ -451,7 +451,7 @@ class Base extends Universe { self =>
}
}
- def show(tree: Tree) = s"<tree ${tree.getClass}>"
+ def treeToString(tree: Tree) = s"<tree ${tree.getClass}>"
trait TermTree extends Tree
diff --git a/src/library/scala/reflect/base/Trees.scala b/src/library/scala/reflect/base/Trees.scala
index 298d229570..2814450ae3 100644
--- a/src/library/scala/reflect/base/Trees.scala
+++ b/src/library/scala/reflect/base/Trees.scala
@@ -28,11 +28,11 @@ trait Trees { self: Universe =>
def isType: Boolean
/** Obtains string representation of a tree */
- override def toString: String = show(this)
+ override def toString: String = treeToString(this)
}
/** Obtains string representation of a tree */
- def show(tree: Tree): String
+ protected def treeToString(tree: Tree): String
/** Tree is the basis for scala's abstract syntax. The nodes are
* implemented as case classes, and the parameters which initialize
diff --git a/src/library/scala/util/control/Breaks.scala b/src/library/scala/util/control/Breaks.scala
index bb84fcb5fe..d7f5a57f50 100644
--- a/src/library/scala/util/control/Breaks.scala
+++ b/src/library/scala/util/control/Breaks.scala
@@ -73,7 +73,7 @@ class Breaks {
*
* @note This might be different than the statically closest enclosing block!
*/
- def break() { throw breakException }
+ def break(): Nothing = { throw breakException }
}
/** An object that can be used for the break control abstraction.
diff --git a/src/reflect/scala/reflect/api/Printers.scala b/src/reflect/scala/reflect/api/Printers.scala
new file mode 100644
index 0000000000..7f4ff8a7fb
--- /dev/null
+++ b/src/reflect/scala/reflect/api/Printers.scala
@@ -0,0 +1,94 @@
+package scala.reflect
+package api
+
+import java.io.{ PrintWriter, StringWriter }
+
+trait Printers { self: Universe =>
+
+ trait TreePrinter {
+ def print(args: Any*)
+ protected var printTypes = false
+ protected var printIds = false
+ protected var printKinds = false
+ def withTypes: this.type = { printTypes = true; this }
+ def withoutTypes: this.type = { printTypes = false; this }
+ def withIds: this.type = { printIds = true; this }
+ def withoutIds: this.type = { printIds = false; this }
+ def withKinds: this.type = { printKinds = true; this }
+ def withoutKinds: this.type = { printKinds = false; this }
+ }
+
+ case class BooleanFlag(val value: Option[Boolean])
+ object BooleanFlag {
+ import language.implicitConversions
+ implicit def booleanToBooleanFlag(value: Boolean): BooleanFlag = BooleanFlag(Some(value))
+ implicit def optionToBooleanFlag(value: Option[Boolean]): BooleanFlag = BooleanFlag(value)
+ }
+
+ protected def render(what: Any, mkPrinter: PrintWriter => TreePrinter, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printKinds: BooleanFlag = None): String = {
+ val buffer = new StringWriter()
+ val writer = new PrintWriter(buffer)
+ var printer = mkPrinter(writer)
+ printTypes.value.map(printTypes => if (printTypes) printer.withTypes else printer.withoutTypes)
+ printIds.value.map(printIds => if (printIds) printer.withIds else printer.withoutIds)
+ printKinds.value.map(printKinds => if (printKinds) printer.withKinds else printer.withoutKinds)
+ printer.print(what)
+ writer.flush()
+ buffer.toString
+ }
+
+ /** By default trees are printed with `show` */
+ override protected def treeToString(tree: Tree) = show(tree)
+
+ /** Renders a prettified representation of a tree.
+ * Typically it looks very close to the Scala code it represents.
+ * This function is used in Tree.toString.
+ */
+ def show(tree: Tree, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printKinds: BooleanFlag = None): String =
+ render(tree, newTreePrinter(_), printTypes, printIds, printKinds)
+
+ /** Hook to define what `show(tree)` means.
+ */
+ def newTreePrinter(out: PrintWriter): TreePrinter
+
+ /** Renders internal structure of a tree.
+ */
+ def showRaw(tree: Tree, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printKinds: BooleanFlag = None): String =
+ render(tree, newRawTreePrinter(_), printTypes, printIds, printKinds)
+
+ /** Hook to define what `showRaw(tree)` means.
+ */
+ def newRawTreePrinter(out: PrintWriter): TreePrinter
+
+ /** Renders a prettified representation of a symbol.
+ */
+ def show(sym: Symbol): String = sym.toString
+
+ /** Renders internal structure of a symbol.
+ */
+ def showRaw(sym: Symbol): String = render(sym, newRawTreePrinter(_))
+
+ /** Renders a prettified representation of a type.
+ */
+ def show(tpe: Type): String = tpe.toString
+
+ /** Renders internal structure of a type.
+ */
+ def showRaw(tpe: Type): String = render(tpe, newRawTreePrinter(_))
+
+ /** Renders a prettified representation of a name.
+ */
+ def show(name: Name): String
+
+ /** Renders internal structure of a name.
+ */
+ def showRaw(name: Name): String = name.toString
+
+ /** Renders a prettified representation of a flag set.
+ */
+ def show(flags: FlagSet): String
+
+ /** Renders internal structure of a flag set.
+ */
+ def showRaw(flags: FlagSet): String = flags.toString
+}
diff --git a/src/reflect/scala/reflect/api/Symbols.scala b/src/reflect/scala/reflect/api/Symbols.scala
index 1d266dc778..eb9921a31a 100644
--- a/src/reflect/scala/reflect/api/Symbols.scala
+++ b/src/reflect/scala/reflect/api/Symbols.scala
@@ -116,11 +116,28 @@ trait Symbols extends base.Symbols { self: Universe =>
*/
def isValue: Boolean
+ /** Does this symbol denote a stable value? */
+ def isStable: Boolean
+
/** Does this symbol represent a mutable value?
* If yes, `isTerm` and `isValue` are also guaranteed to be true.
*/
def isVariable: Boolean
+ /** Does this symbol represent a getter or a setter?
+ */
+ def isAccessor: Boolean
+
+ /** Does this symbol represent a getter of a field?
+ * If yes, `isTerm` and `isMethod` are also guaranteed to be true.
+ */
+ def isGetter: Boolean
+
+ /** Does this symbol represent a setter of a field?
+ * If yes, `isTerm` and `isMethod` are also guaranteed to be true.
+ */
+ def isSetter: Boolean
+
/** Does this symbol represent the definition of a package?
* If yes, `isTerm` is also guaranteed to be true.
*/
@@ -177,6 +194,25 @@ trait Symbols extends base.Symbols { self: Universe =>
*/
def isErroneous : Boolean
+ /** Can this symbol be loaded by a reflective mirror?
+ *
+ * Scalac relies on `ScalaSignature' annotation to retain symbols across compilation runs.
+ * Such annotations (also called "pickles") are applied on top-level classes and include information
+ * about all symbols reachable from the annotee. However, local symbols (e.g. classes or definitions local to a block)
+ * are typically unreachable and information about them gets lost.
+ *
+ * This method is useful for macro writers who wish to save certain ASTs to be used at runtime.
+ * With `isLocatable' it's possible to check whether a tree can be retained as is, or it needs special treatment.
+ */
+ def isLocatable: Boolean
+
+ /** Is this symbol static (i.e. with no outer instance)?
+ * Q: When exactly is a sym marked as STATIC?
+ * A: If it's a member of a toplevel object, or of an object contained in a toplevel object, or any number of levels deep.
+ * http://groups.google.com/group/scala-internals/browse_thread/thread/d385bcd60b08faf6
+ */
+ def isStatic: Boolean
+
/** The type signature of this symbol seen as a member of given type `site`.
*/
def typeSignatureIn(site: Type): Type
diff --git a/src/reflect/scala/reflect/api/TreePrinters.scala b/src/reflect/scala/reflect/api/TreePrinters.scala
deleted file mode 100644
index 08a08e7b90..0000000000
--- a/src/reflect/scala/reflect/api/TreePrinters.scala
+++ /dev/null
@@ -1,87 +0,0 @@
-package scala.reflect
-package api
-
-import java.io.{ PrintWriter, StringWriter }
-
-trait TreePrinters { self: Universe =>
-
- trait TreePrinter {
- def print(args: Any*)
- protected var typesPrinted = false
- protected var uniqueIds = false
- def withTypesPrinted: this.type = { typesPrinted = true; this }
- def withUniqueIds: this.type = { uniqueIds = true; this }
- }
-
- def show(tree: Tree): String = show(tree, newTreePrinter)
-
- def show(tree: Tree, mkPrinter: PrintWriter => TreePrinter): String = {
- val buffer = new StringWriter()
- val writer = new PrintWriter(buffer)
- val printer = mkPrinter(writer)
- printer.print(tree)
- writer.flush()
- buffer.toString
- }
-
- def showRaw(tree: Tree): String = show(tree, new RawTreePrinter(_))
-
- /** Hook to define what `show(tree)` means.
- */
- def newTreePrinter(out: PrintWriter): TreePrinter
-
- // emits more or less verbatim representation of the provided tree
- // [Eugene] todo. needs to be refined
- // http://groups.google.com/group/scala-user/browse_thread/thread/de5a5be2e083cf8e
- class RawTreePrinter(out: PrintWriter) extends TreePrinter {
- def print(args: Any*): Unit = args foreach {
- case EmptyTree =>
- print("EmptyTree")
- case tree @ TypeTree() =>
- print("TypeTree()")
- if (tree.tpe != null)
- print(".setType(", tree.tpe, ")")
- else if (tree.original != null)
- print(".setOriginal(", tree.original, ")")
- case Literal(Constant(s: String)) =>
- print("Literal(Constant(\"" + s + "\"))")
- case tree: Tree =>
- print(tree.productPrefix+"(")
- val it = tree.productIterator
- while (it.hasNext) {
- it.next() match {
- case name: Name if uniqueIds && tree.hasSymbol && tree.symbol != NoSymbol =>
- print(tree.symbol.name, "#", tree.symbol.id)
- case arg =>
- print(arg)
- }
- print(if (it.hasNext) ", " else "")
- }
- print(")")
- if (typesPrinted)
- print(".setType(", tree.tpe, ")")
- case list: List[_] =>
- print("List(")
- val it = list.iterator
- while (it.hasNext) {
- print(it.next())
- print(if (it.hasNext) ", " else "")
- }
- print(")")
- case mods: Modifiers =>
- val parts = collection.mutable.ListBuffer[String]()
- parts += mods.flagString
- if (mods.privateWithin.toString.nonEmpty)
- parts += "newTypeName(\"" + mods.privateWithin.toString + "\")"
- if (mods.annotations.nonEmpty)
- parts += mods.annotations map showRaw mkString ("List(", ", ", ")")
- print(parts mkString ("Modifiers(", ", ", ")"))
- case name: Name =>
- if (name.isTermName) print("newTermName(\"") else print("newTypeName(\"")
- print(name.toString)
- print("\")")
- case arg =>
- out.print(arg)
- }
- }
-}
diff --git a/src/reflect/scala/reflect/api/Universe.scala b/src/reflect/scala/reflect/api/Universe.scala
index 002cd2e673..85d8adc44f 100644
--- a/src/reflect/scala/reflect/api/Universe.scala
+++ b/src/reflect/scala/reflect/api/Universe.scala
@@ -9,7 +9,7 @@ abstract class Universe extends base.Universe
with FlagSets
with Names
with Trees
- with TreePrinters
+ with Printers
with Constants
with Positions
with Mirrors
diff --git a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
index 5f78671012..fa758edf05 100644
--- a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
+++ b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
@@ -8,7 +8,7 @@ package internal
// todo implement in terms of BitSet
import scala.collection.{ mutable, immutable }
import math.max
-import util.Statistics._
+import util.Statistics
/** A base type sequence (BaseTypeSeq) is an ordered sequence spanning all the base types
* of a type. It characterized by the following two laws:
@@ -28,6 +28,7 @@ import util.Statistics._
trait BaseTypeSeqs {
this: SymbolTable =>
import definitions._
+ import BaseTypeSeqsStats._
protected def newBaseTypeSeq(parents: List[Type], elems: Array[Type]) =
new BaseTypeSeq(parents, elems)
@@ -38,8 +39,8 @@ trait BaseTypeSeqs {
*/
class BaseTypeSeq protected[BaseTypeSeqs] (private[BaseTypeSeqs] val parents: List[Type], private[BaseTypeSeqs] val elems: Array[Type]) {
self =>
- incCounter(baseTypeSeqCount)
- incCounter(baseTypeSeqLenTotal, elems.length)
+ Statistics.incCounter(baseTypeSeqCount)
+ Statistics.incCounter(baseTypeSeqLenTotal, elems.length)
/** The number of types in the sequence */
def length: Int = elems.length
@@ -231,3 +232,8 @@ trait BaseTypeSeqs {
val CyclicInheritance = new Throwable
}
+
+object BaseTypeSeqsStats {
+ val baseTypeSeqCount = Statistics.newCounter("#base type seqs")
+ val baseTypeSeqLenTotal = Statistics.newRelCounter("avg base type seq length", baseTypeSeqCount)
+}
diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala
index a9d9b06621..320cd3ddae 100644
--- a/src/reflect/scala/reflect/internal/Definitions.scala
+++ b/src/reflect/scala/reflect/internal/Definitions.scala
@@ -179,7 +179,7 @@ trait Definitions extends api.StandardDefinitions {
val EmptyPackage: ModuleSymbol = rootMirror.EmptyPackage
@deprecated("Moved to rootMirror.EmptyPackageClass", "2.10.0")
- val EmptyPackageClass: ClassSymbol = rootMirror.RootClass
+ val EmptyPackageClass: ClassSymbol = rootMirror.EmptyPackageClass
// It becomes tricky to create dedicated objects for other symbols because
// of initialization order issues.
diff --git a/src/reflect/scala/reflect/internal/TreePrinters.scala b/src/reflect/scala/reflect/internal/Printers.scala
index 6d035c8b9d..82a8c42f7c 100644
--- a/src/reflect/scala/reflect/internal/TreePrinters.scala
+++ b/src/reflect/scala/reflect/internal/Printers.scala
@@ -11,7 +11,7 @@ package internal
import java.io.{ OutputStream, PrintWriter, StringWriter, Writer }
import Flags._
-trait TreePrinters extends api.TreePrinters { self: SymbolTable =>
+trait Printers extends api.Printers { self: SymbolTable =>
//nsc import treeInfo.{ IsTrue, IsFalse }
@@ -62,8 +62,9 @@ trait TreePrinters extends api.TreePrinters { self: SymbolTable =>
protected val indentStep = 2
protected var indentString = " " // 40
- typesPrinted = settings.printtypes.value
- uniqueIds = settings.uniqid.value
+ printTypes = settings.printtypes.value
+ printIds = settings.uniqid.value
+ printKinds = settings.Yshowsymkinds.value
protected def doPrintPositions = settings.Xprintpos.value
def indent() = indentMargin += indentStep
@@ -320,7 +321,7 @@ trait TreePrinters extends api.TreePrinters { self: SymbolTable =>
case Function(vparams, body) =>
print("("); printValueParams(vparams); print(" => ", body, ")")
- if (uniqueIds && tree.symbol != null) print("#"+tree.symbol.id)
+ if (printIds && tree.symbol != null) print("#"+tree.symbol.id)
case Assign(lhs, rhs) =>
print(lhs, " = ", rhs)
@@ -429,7 +430,7 @@ trait TreePrinters extends api.TreePrinters { self: SymbolTable =>
printColumn(whereClauses, " forSome { ", ";", "}")
// SelectFromArray is no longer visible in reflect.internal.
-// eliminated until we figure out what we will do with both TreePrinters and
+// eliminated until we figure out what we will do with both Printers and
// SelectFromArray.
// case SelectFromArray(qualifier, name, _) =>
// print(qualifier); print(".<arr>"); print(symName(tree, name))
@@ -437,7 +438,7 @@ trait TreePrinters extends api.TreePrinters { self: SymbolTable =>
case tree =>
xprintTree(this, tree)
}
- if (typesPrinted && tree.isTerm && !tree.isEmpty) {
+ if (printTypes && tree.isTerm && !tree.isEmpty) {
print("{", if (tree.tpe eq null) "<null>" else tree.tpe.toString, "}")
}
}
@@ -475,4 +476,167 @@ trait TreePrinters extends api.TreePrinters { self: SymbolTable =>
def close = { /* do nothing */ }
def flush = { /* do nothing */ }
}
+
+ // provides footnotes for types
+ private var typeCounter = 0
+ private val typeMap = collection.mutable.WeakHashMap[Type, Int]()
+
+ def newRawTreePrinter(writer: PrintWriter): RawTreePrinter = new RawTreePrinter(writer)
+ def newRawTreePrinter(stream: OutputStream): RawTreePrinter = newRawTreePrinter(new PrintWriter(stream))
+ def newRawTreePrinter(): RawTreePrinter = newRawTreePrinter(new PrintWriter(ConsoleWriter))
+
+ // emits more or less verbatim representation of the provided tree
+ class RawTreePrinter(out: PrintWriter) extends super.TreePrinter {
+ private var depth = 0
+ private var footnotes = collection.mutable.Map[Int, Type]()
+ private var printingFootnotes = false
+ private var printTypesInFootnotes = true
+
+ def print(args: Any*): Unit = {
+ if (depth == 0 && args.length == 1 && args(0) != null && args(0).isInstanceOf[Type])
+ printTypesInFootnotes = false
+
+ depth += 1
+ args foreach {
+ case EmptyTree =>
+ print("EmptyTree")
+ case emptyValDef: AnyRef if emptyValDef eq self.emptyValDef =>
+ print("emptyValDef")
+ case Literal(Constant(value)) =>
+ def print(s: String) = this.print("Literal(Constant(" + s + "))")
+ value match {
+ case s: String => print("\"" + s + "\"")
+ case null => print(null)
+ case _ => print(value.toString)
+ }
+ case tree: Tree =>
+ val hasSymbol = tree.hasSymbol && tree.symbol != NoSymbol
+ val isError = hasSymbol && tree.symbol.name.toString == nme.ERROR.toString
+ printProduct(
+ tree,
+ preamble = _ => {
+ print(tree.productPrefix)
+ if (printTypes && tree.tpe != null) print(tree.tpe)
+ },
+ body = {
+ case name: Name =>
+ if (isError) {
+ if (isError) print("<")
+ print(name)
+ if (isError) print(": error>")
+ } else if (hasSymbol) {
+ tree match {
+ case _: Ident | _: Select | _: SelectFromTypeTree => print(tree.symbol)
+ case _ => print(tree.symbol.name)
+ }
+ } else {
+ print(name)
+ }
+ case arg =>
+ print(arg)
+ },
+ postamble = {
+ case tree @ TypeTree() if tree.original != null => print(".setOriginal(", tree.original, ")")
+ case _ => // do nothing
+ })
+ case sym: Symbol =>
+ if (sym.isStatic && (sym.isClass || sym.isModule)) print(sym.fullName)
+ else print(sym.name)
+ if (printIds) print("#", sym.id)
+ if (printKinds) print("#", sym.abbreviatedKindString)
+ case NoType =>
+ print("NoType")
+ case NoPrefix =>
+ print("NoPrefix")
+ case tpe: Type if printTypesInFootnotes && !printingFootnotes =>
+ val index = typeMap.getOrElseUpdate(tpe, { typeCounter += 1; typeCounter })
+ footnotes(index) = tpe
+ print("[", index, "]")
+ case mods: Modifiers =>
+ print("Modifiers(")
+ if (mods.flags != NoFlags || mods.privateWithin != tpnme.EMPTY || mods.annotations.nonEmpty) print(show(mods.flags))
+ if (mods.privateWithin != tpnme.EMPTY || mods.annotations.nonEmpty) { print(", "); print(mods.privateWithin) }
+ if (mods.annotations.nonEmpty) { print(", "); print(mods.annotations); }
+ print(")")
+ case name: Name =>
+ print(show(name))
+ case list: List[_] =>
+ print("List")
+ printIterable(list)
+ case product: Product =>
+ printProduct(product)
+ case arg =>
+ out.print(arg)
+ }
+ depth -= 1
+ if (depth == 0 && footnotes.nonEmpty && !printingFootnotes) {
+ printingFootnotes = true
+ out.println()
+ val typeIndices = footnotes.keys.toList.sorted
+ typeIndices.zipWithIndex foreach {
+ case (typeIndex, i) =>
+ print("[" + typeIndex + "] ")
+ print(footnotes(typeIndex))
+ if (i < typeIndices.length - 1) out.println()
+ }
+ }
+ }
+
+ def printProduct(
+ p: Product,
+ preamble: Product => Unit = p => print(p.productPrefix),
+ body: Any => Unit = print(_),
+ postamble: Product => Unit = p => print("")): Unit =
+ {
+ preamble(p)
+ printIterable(p.productIterator.toList, body = body)
+ postamble(p)
+ }
+
+ def printIterable(
+ iterable: List[_],
+ preamble: => Unit = print(""),
+ body: Any => Unit = print(_),
+ postamble: => Unit = print("")): Unit =
+ {
+ preamble
+ print("(")
+ val it = iterable.iterator
+ while (it.hasNext) {
+ body(it.next)
+ print(if (it.hasNext) ", " else "")
+ }
+ print(")")
+ postamble
+ }
+ }
+
+ def show(name: Name): String = name match {
+ // base.StandardNames
+ case tpnme.EMPTY => "tpnme.EMPTY"
+ case tpnme.ROOT => "tpnme.ROOT"
+ case tpnme.EMPTY_PACKAGE_NAME => "tpnme.EMPTY_PACKAGE_NAME"
+ case tpnme.WILDCARD => "tpnme.WILDCARD"
+ case nme.CONSTRUCTOR => "nme.CONSTRUCTOR"
+ case nme.NO_NAME => "nme.NO_NAME"
+ // api.StandardNames
+ case tpnme.ERROR => "tpnme.ERROR"
+ case nme.ERROR => "nme.ERROR"
+ case nme.EMPTY => "nme.EMPTY"
+ case tpnme.PACKAGE => "tpnme.PACKAGE"
+ case nme.PACKAGE => "nme.PACKAGE"
+ case _ =>
+ val prefix = if (name.isTermName) "newTermName(\"" else "newTypeName(\""
+ prefix + name.toString + "\")"
+ }
+
+ def show(flags: FlagSet): String = {
+ if (flags == NoFlags) nme.NoFlags.toString
+ else {
+ val s_flags = new collection.mutable.ListBuffer[String]
+ for (i <- 0 to 63 if (flags containsAll (1L << i)))
+ s_flags += flagToString(1L << i).replace("<", "").replace(">", "").toUpperCase
+ s_flags mkString " | "
+ }
+ }
}
diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala
index cadd76b1ba..18adab7c68 100644
--- a/src/reflect/scala/reflect/internal/SymbolTable.scala
+++ b/src/reflect/scala/reflect/internal/SymbolTable.scala
@@ -28,7 +28,7 @@ abstract class SymbolTable extends makro.Universe
with AnnotationInfos
with AnnotationCheckers
with Trees
- with TreePrinters
+ with Printers
with Positions
with TypeDebugging
with Importers
@@ -40,7 +40,7 @@ abstract class SymbolTable extends makro.Universe
{
val gen = new TreeGen { val global: SymbolTable.this.type = SymbolTable.this }
- val treeBuild = gen
+ lazy val treeBuild = gen
def log(msg: => AnyRef): Unit
def abort(msg: String): Nothing = throw new FatalError(supplementErrorMessage(msg))
diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala
index 288eb63332..4b0ceeb86b 100644
--- a/src/reflect/scala/reflect/internal/Symbols.scala
+++ b/src/reflect/scala/reflect/internal/Symbols.scala
@@ -8,18 +8,17 @@ package internal
import scala.collection.{ mutable, immutable }
import scala.collection.mutable.ListBuffer
-import util.Statistics._
+import util.Statistics
import Flags._
trait Symbols extends api.Symbols { self: SymbolTable =>
import definitions._
+ import SymbolsStats._
protected var ids = 0
val emptySymbolArray = new Array[Symbol](0)
- def symbolCount = ids // statistics
-
protected def nextId() = { ids += 1; ids }
/** Used for deciding in the IDE whether we can interrupt the compiler */
@@ -666,7 +665,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def isClassLocalToConstructor = false
final def isDerivedValueClass =
- isClass && !hasFlag(PACKAGE | TRAIT) &&
+ isClass && !hasFlag(PACKAGE | TRAIT) &&
info.firstParent.typeSymbol == AnyValClass && !isPrimitiveValueClass
final def isMethodWithExtension =
@@ -2713,7 +2712,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
}
- incCounter(typeSymbolCount)
+ Statistics.incCounter(typeSymbolCount)
}
implicit val TypeSymbolTag = ClassTag[TypeSymbol](classOf[TypeSymbol])
@@ -2929,7 +2928,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def children = childSet
override def addChild(sym: Symbol) { childSet = childSet + sym }
- incCounter(classSymbolCount)
+ Statistics.incCounter(classSymbolCount)
}
implicit val ClassSymbolTag = ClassTag[ClassSymbol](classOf[ClassSymbol])
@@ -3188,4 +3187,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def toList: List[TypeHistory] = this :: ( if (prev eq null) Nil else prev.toList )
}
+
+ Statistics.newView("#symbols")(ids)
+}
+
+object SymbolsStats {
+ val typeSymbolCount = Statistics.newCounter("#type symbols")
+ val classSymbolCount = Statistics.newCounter("#class symbols")
}
diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala
index 75bb0e6d49..dd13dd4c4c 100644
--- a/src/reflect/scala/reflect/internal/Trees.scala
+++ b/src/reflect/scala/reflect/internal/Trees.scala
@@ -9,6 +9,7 @@ package internal
import Flags._
import base.Attachments
import collection.mutable.{ListBuffer, LinkedHashSet}
+import util.Statistics
trait Trees extends api.Trees { self: SymbolTable =>
@@ -18,6 +19,8 @@ trait Trees extends api.Trees { self: SymbolTable =>
val id = nodeCount // TODO: add to attachment?
nodeCount += 1
+ Statistics.incCounter(TreesStats.nodeByType, getClass)
+
@inline final def pos: Position = rawatt.pos
def pos_=(pos: Position): Unit = rawatt = (rawatt withPos pos)
def setPos(newpos: Position): this.type = { pos = newpos; this }
@@ -809,7 +812,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
}
}
- // Belongs in TreeInfo but then I can't reach it from TreePrinters.
+ // Belongs in TreeInfo but then I can't reach it from Printers.
def isReferenceToScalaMember(t: Tree, Id: Name) = t match {
case Ident(Id) => true
case Select(Ident(nme.scala_), Id) => true
@@ -1592,4 +1595,11 @@ trait Trees extends api.Trees { self: SymbolTable =>
implicit val TypeBoundsTreeTag = ClassTag[TypeBoundsTree](classOf[TypeBoundsTree])
implicit val ExistentialTypeTreeTag = ClassTag[ExistentialTypeTree](classOf[ExistentialTypeTree])
implicit val TypeTreeTag = ClassTag[TypeTree](classOf[TypeTree])
+
+ val treeNodeCount = Statistics.newView("#created tree nodes")(nodeCount)
+}
+
+object TreesStats {
+ // statistics
+ val nodeByType = Statistics.newByClass("#created tree nodes by type")(Statistics.newCounter(""))
}
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index 8a4394bf1d..d4b895bcb4 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -13,8 +13,7 @@ import mutable.ListBuffer
import Flags._
import scala.util.control.ControlThrowable
import scala.annotation.tailrec
-import util.Statistics._
-import language.postfixOps
+import util.Statistics
/* A standard type pattern match:
case ErrorType =>
@@ -73,9 +72,7 @@ import language.postfixOps
trait Types extends api.Types { self: SymbolTable =>
import definitions._
-
- //statistics
- def uniqueTypeCount = if (uniques == null) 0 else uniques.size
+ import TypesStats._
private var explainSwitch = false
private final val emptySymbolSet = immutable.Set.empty[Symbol]
@@ -681,8 +678,8 @@ trait Types extends api.Types { self: SymbolTable =>
if (isTrivial || phase.erasedTypes && pre.typeSymbol != ArrayClass) this
else {
// scala.tools.nsc.util.trace.when(pre.isInstanceOf[ExistentialType])("X "+this+".asSeenfrom("+pre+","+clazz+" = ") {
- incCounter(asSeenFromCount)
- val start = startTimer(asSeenFromNanos)
+ Statistics.incCounter(asSeenFromCount)
+ val start = Statistics.startTimer(asSeenFromNanos)
val m = new AsSeenFromMap(pre.normalize, clazz)
val tp = m apply this
val tp1 = existentialAbstraction(m.capturedParams, tp)
@@ -690,7 +687,7 @@ trait Types extends api.Types { self: SymbolTable =>
if (m.capturedSkolems.isEmpty) tp1
else deriveType(m.capturedSkolems, _.cloneSymbol setFlag CAPTURED)(tp1)
- stopTimer(asSeenFromNanos, start)
+ Statistics.stopTimer(asSeenFromNanos, start)
result
}
}
@@ -828,26 +825,26 @@ trait Types extends api.Types { self: SymbolTable =>
}
def stat_<:<(that: Type): Boolean = {
- incCounter(subtypeCount)
- val start = startTimer(subtypeNanos)
+ Statistics.incCounter(subtypeCount)
+ val start = Statistics.startTimer(subtypeNanos)
val result =
(this eq that) ||
(if (explainSwitch) explain("<:", isSubType, this, that)
else isSubType(this, that, AnyDepth))
- stopTimer(subtypeNanos, start)
+ Statistics.stopTimer(subtypeNanos, start)
result
}
/** Is this type a weak subtype of that type? True also for numeric types, i.e. Int weak_<:< Long.
*/
def weak_<:<(that: Type): Boolean = {
- incCounter(subtypeCount)
- val start = startTimer(subtypeNanos)
+ Statistics.incCounter(subtypeCount)
+ val start = Statistics.startTimer(subtypeNanos)
val result =
((this eq that) ||
(if (explainSwitch) explain("weak_<:", isWeakSubType, this, that)
else isWeakSubType(this, that)))
- stopTimer(subtypeNanos, start)
+ Statistics.stopTimer(subtypeNanos, start)
result
}
@@ -1020,8 +1017,8 @@ trait Types extends api.Types { self: SymbolTable =>
// See (t0851) for a situation where this happens.
val suspension: List[TypeVar] = if (this.isGround) null else suspendTypeVarsInType(this)
- incCounter(findMemberCount)
- val start = startTimer(findMemberNanos)
+ Statistics.incCounter(findMemberCount)
+ val start = Statistics.startTimer(findMemberNanos)
//Console.println("find member " + name.decode + " in " + this + ":" + this.baseClasses)//DEBUG
var members: Scope = null
@@ -1048,7 +1045,7 @@ trait Types extends api.Types { self: SymbolTable =>
!sym.isPrivateLocal ||
(bcs0.head.hasTransOwner(bcs.head)))) {
if (name.isTypeName || stableOnly && sym.isStable) {
- stopTimer(findMemberNanos, start)
+ Statistics.stopTimer(findMemberNanos, start)
if (suspension ne null) suspension foreach (_.suspended = false)
return sym
} else if (member == NoSymbol) {
@@ -1094,13 +1091,13 @@ trait Types extends api.Types { self: SymbolTable =>
} // while (!bcs.isEmpty)
excluded = excludedFlags
} // while (continue)
- stopTimer(findMemberNanos, start)
+ Statistics.stopTimer(findMemberNanos, start)
if (suspension ne null) suspension foreach (_.suspended = false)
if (members eq null) {
- if (member == NoSymbol) incCounter(noMemberCount)
+ if (member == NoSymbol) Statistics.incCounter(noMemberCount)
member
} else {
- incCounter(multMemberCount)
+ Statistics.incCounter(multMemberCount)
baseClasses.head.newOverloaded(this, members.toList)
}
}
@@ -1185,7 +1182,7 @@ trait Types extends api.Types { self: SymbolTable =>
override def isVolatile = underlying.isVolatile
override def widen: Type = underlying.widen
override def baseTypeSeq: BaseTypeSeq = {
- incCounter(singletonBaseTypeSeqCount)
+ Statistics.incCounter(singletonBaseTypeSeqCount)
underlying.baseTypeSeq prepend this
}
override def isHigherKinded = false // singleton type classifies objects, thus must be kind *
@@ -1536,7 +1533,7 @@ trait Types extends api.Types { self: SymbolTable =>
val bts = copyRefinedType(tpe.asInstanceOf[RefinedType], tpe.parents map varToParam, varToParam mapOver tpe.decls).baseTypeSeq
tpe.baseTypeSeqCache = bts lateMap paramToVar
} else {
- incCounter(compoundBaseTypeSeqCount)
+ Statistics.incCounter(compoundBaseTypeSeqCount)
tpe.baseTypeSeqCache = undetBaseTypeSeq
tpe.baseTypeSeqCache = if (tpe.typeSymbol.isRefinementClass)
tpe.memo(compoundBaseTypeSeq(tpe))(_.baseTypeSeq updateHead tpe.typeSymbol.tpe)
@@ -2392,7 +2389,7 @@ trait Types extends api.Types { self: SymbolTable =>
if (period != currentPeriod) {
tpe.baseTypeSeqPeriod = currentPeriod
if (!isValidForBaseClasses(period)) {
- incCounter(typerefBaseTypeSeqCount)
+ Statistics.incCounter(typerefBaseTypeSeqCount)
tpe.baseTypeSeqCache = undetBaseTypeSeq
tpe.baseTypeSeqCache = tpe.baseTypeSeqImpl
}
@@ -3702,7 +3699,7 @@ trait Types extends api.Types { self: SymbolTable =>
private var uniqueRunId = NoRunId
protected def unique[T <: Type](tp: T): T = {
- incCounter(rawTypeCount)
+ Statistics.incCounter(rawTypeCount)
if (uniqueRunId != currentRunId) {
uniques = util.HashSet[Type]("uniques", initialUniquesCapacity)
uniqueRunId = currentRunId
@@ -5104,7 +5101,7 @@ trait Types extends api.Types { self: SymbolTable =>
/** Do `tp1` and `tp2` denote equivalent types? */
def isSameType(tp1: Type, tp2: Type): Boolean = try {
- incCounter(sametypeCount)
+ Statistics.incCounter(sametypeCount)
subsametypeRecursions += 1
undoLog undoUnless {
isSameType1(tp1, tp2)
@@ -6308,14 +6305,14 @@ trait Types extends api.Types { self: SymbolTable =>
case List() => NothingClass.tpe
case List(t) => t
case _ =>
- incCounter(lubCount)
- val start = startTimer(lubNanos)
+ Statistics.incCounter(lubCount)
+ val start = Statistics.startTimer(lubNanos)
try {
lub(ts, lubDepth(ts))
} finally {
lubResults.clear()
glbResults.clear()
- stopTimer(lubNanos, start)
+ Statistics.stopTimer(lubNanos, start)
}
}
@@ -6431,7 +6428,7 @@ trait Types extends api.Types { self: SymbolTable =>
indent = indent + " "
assert(indent.length <= 100)
}
- incCounter(nestedLubCount)
+ Statistics.incCounter(nestedLubCount)
val res = lub0(ts)
if (printLubs) {
indent = indent stripSuffix " "
@@ -6456,14 +6453,14 @@ trait Types extends api.Types { self: SymbolTable =>
case List() => AnyClass.tpe
case List(t) => t
case ts0 =>
- incCounter(lubCount)
- val start = startTimer(lubNanos)
+ Statistics.incCounter(lubCount)
+ val start = Statistics.startTimer(lubNanos)
try {
glbNorm(ts0, lubDepth(ts0))
} finally {
lubResults.clear()
glbResults.clear()
- stopTimer(lubNanos, start)
+ Statistics.stopTimer(lubNanos, start)
}
}
@@ -6578,7 +6575,7 @@ trait Types extends api.Types { self: SymbolTable =>
}
// if (settings.debug.value) { println(indent + "glb of " + ts + " at depth "+depth); indent = indent + " " } //DEBUG
- incCounter(nestedLubCount)
+ Statistics.incCounter(nestedLubCount)
val res = glb0(ts)
// if (settings.debug.value) { indent = indent.substring(0, indent.length() - 2); log(indent + "glb of " + ts + " is " + res) }//DEBUG
@@ -6871,4 +6868,27 @@ trait Types extends api.Types { self: SymbolTable =>
implicit val TypeBoundsTag = ClassTag[TypeBounds](classOf[TypeBounds])
implicit val TypeRefTag = ClassTag[TypeRef](classOf[TypeRef])
implicit val TypeTagg = ClassTag[Type](classOf[Type])
+
+ Statistics.newView("#unique types") { if (uniques == null) 0 else uniques.size }
+}
+
+object TypesStats {
+ import BaseTypeSeqsStats._
+ val rawTypeCount = Statistics.newCounter ("#raw type creations")
+ val asSeenFromCount = Statistics.newCounter ("#asSeenFrom ops")
+ val subtypeCount = Statistics.newCounter ("#subtype ops")
+ val sametypeCount = Statistics.newCounter ("#sametype ops")
+ val lubCount = Statistics.newCounter ("#toplevel lubs/glbs")
+ val nestedLubCount = Statistics.newCounter ("#all lubs/glbs")
+ val findMemberCount = Statistics.newCounter ("#findMember ops")
+ val noMemberCount = Statistics.newSubCounter(" of which not found", findMemberCount)
+ val multMemberCount = Statistics.newSubCounter(" of which multiple overloaded", findMemberCount)
+ val typerNanos = Statistics.newTimer ("time spent typechecking", "typer")
+ val lubNanos = Statistics.newSubTimer ("time spent in lubs", typerNanos)
+ val subtypeNanos = Statistics.newSubTimer ("time spent in <:<", typerNanos)
+ val findMemberNanos = Statistics.newSubTimer ("time spent in findmember", typerNanos)
+ val asSeenFromNanos = Statistics.newSubTimer ("time spent in asSeenFrom", typerNanos)
+ val compoundBaseTypeSeqCount = Statistics.newSubCounter(" of which for compound types", baseTypeSeqCount)
+ val typerefBaseTypeSeqCount = Statistics.newSubCounter(" of which for typerefs", baseTypeSeqCount)
+ val singletonBaseTypeSeqCount = Statistics.newSubCounter(" of which for singletons", baseTypeSeqCount)
}
diff --git a/src/reflect/scala/reflect/internal/util/StatBase.scala b/src/reflect/scala/reflect/internal/util/StatBase.scala
deleted file mode 100644
index b033ff98bc..0000000000
--- a/src/reflect/scala/reflect/internal/util/StatBase.scala
+++ /dev/null
@@ -1,97 +0,0 @@
-package scala.reflect.internal.util
-
-class StatBase {
-
- private var _enabled = false
-
- def enabled = _enabled
- def enabled_=(cond: Boolean) = {
- if (cond && !_enabled) {
- val test = new Timer()
- val start = System.nanoTime()
- var total = 0L
- for (i <- 1 to 10000) {
- val time = System.nanoTime()
- total += System.nanoTime() - time
- }
- val total2 = System.nanoTime() - start
- println("Enabling statistics, measuring overhead = "+
- total/10000.0+"ns to "+total2/10000.0+"ns per timer")
- _enabled = true
- }
- }
-
- def currentTime() =
- if (_enabled) System.nanoTime() else 0L
-
- def showPercent(x: Double, base: Double) =
- if (base == 0) "" else " ("+"%2.1f".format(x / base * 100)+"%)"
-
- def incCounter(c: Counter) {
- if (_enabled) c.value += 1
- }
-
- def incCounter(c: Counter, delta: Int) {
- if (_enabled) c.value += delta
- }
-
- def startCounter(sc: SubCounter): IntPair =
- if (_enabled) sc.start() else null
-
- def stopCounter(sc: SubCounter, start: IntPair) {
- if (_enabled) sc.stop(start)
- }
-
- def startTimer(tm: Timer): LongPair =
- if (_enabled) tm.start() else null
-
- def stopTimer(tm: Timer, start: LongPair) {
- if (_enabled) tm.stop(start)
- }
-
- case class IntPair(x: Int, y: Int)
- case class LongPair(x: Long, y: Long)
-
- class Counter {
- var value: Int = 0
- override def toString = value.toString
- }
-
- class SubCounter(c: Counter) {
- var value: Int = 0
- def start(): IntPair =
- if (_enabled) IntPair(value, c.value) else null
- def stop(prev: IntPair) {
- if (_enabled) {
- val IntPair(value0, cvalue0) = prev
- value = value0 + c.value - cvalue0
- }
- }
- override def toString =
- value+showPercent(value, c.value)
- }
-
- class Timer {
- var nanos: Long = 0
- var timings = 0
- def start(): LongPair =
- if (_enabled) {
- timings += 1
- LongPair(nanos, System.nanoTime())
- } else null
- def stop(prev: LongPair) {
- if (_enabled) {
- val LongPair(nanos0, start) = prev
- nanos = nanos0 + System.nanoTime() - start
- timings += 1
- }
- }
- override def toString = (timings/2)+" spans, "+nanos.toString+"ns"
- }
-
- import Predef.Class
-
- class ClassCounts extends scala.collection.mutable.HashMap[Class[_], Int] {
- override def default(key: Class[_]) = 0
- }
-} \ No newline at end of file
diff --git a/src/reflect/scala/reflect/internal/util/Statistics.scala b/src/reflect/scala/reflect/internal/util/Statistics.scala
index de0830aa3a..57c9e98174 100644
--- a/src/reflect/scala/reflect/internal/util/Statistics.scala
+++ b/src/reflect/scala/reflect/internal/util/Statistics.scala
@@ -1,37 +1,232 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Martin Odersky
- */
package scala.reflect.internal.util
-class Statistics extends StatBase {
- val singletonBaseTypeSeqCount = new Counter
- val compoundBaseTypeSeqCount = new Counter
- val typerefBaseTypeSeqCount = new Counter
- val findMemberCount = new Counter
- val noMemberCount = new Counter
- val multMemberCount = new Counter
- val findMemberNanos = new Timer
- val asSeenFromCount = new Counter
- val asSeenFromNanos = new Timer
- val subtypeCount = new Counter
- val subtypeNanos = new Timer
- val sametypeCount = new Counter
- val rawTypeCount = new Counter
- val rawTypeFailed = new SubCounter(rawTypeCount)
- val findMemberFailed = new SubCounter(findMemberCount)
- val subtypeFailed = new SubCounter(subtypeCount)
- val rawTypeImpl = new SubCounter(rawTypeCount)
- val findMemberImpl = new SubCounter(findMemberCount)
- val subtypeImpl = new SubCounter(subtypeCount)
- val baseTypeSeqCount = new Counter
- val baseTypeSeqLenTotal = new Counter
- val typeSymbolCount = new Counter
- val classSymbolCount = new Counter
- val lubCount = new Counter
- val nestedLubCount = new Counter
- val lubNanos = new Timer
-}
+import collection.mutable
+
+object Statistics {
+
+ /** If enabled, increment counter by one */
+ @inline final def incCounter(c: Counter) {
+ if (_enabled && c != null) c.value += 1
+ }
+
+ /** If enabled, increment counter by given delta */
+ @inline final def incCounter(c: Counter, delta: Int) {
+ if (_enabled && c != null) c.value += delta
+ }
+
+ /** If enabled, increment counter in map `ctrs` at index `key` by one */
+ @inline final def incCounter[K](ctrs: QuantMap[K, Counter], key: K) =
+ if (_enabled && ctrs != null) ctrs(key).value += 1
+
+ /** If enabled, start subcounter. While active it will track all increments of
+ * its base counter.
+ */
+ @inline final def startCounter(sc: SubCounter): (Int, Int) =
+ if (_enabled && sc != null) sc.start() else null
+
+ /** If enabled, stop subcounter from tracking its base counter. */
+ @inline final def stopCounter(sc: SubCounter, start: (Int, Int)) {
+ if (_enabled && sc != null) sc.stop(start)
+ }
+
+ /** If enabled, start timer */
+ @inline final def startTimer(tm: Timer): (Long, Long) =
+ if (_enabled && tm != null) tm.start() else null
+
+ /** If enabled, stop timer */
+ @inline final def stopTimer(tm: Timer, start: (Long, Long)) {
+ if (_enabled && tm != null) tm.stop(start)
+ }
+
+ /** If enabled, push and start a new timer in timer stack */
+ @inline final def pushTimerClass(timers: ByClassTimerStack, cls: Class[_]): (Long, Long) =
+ if (_enabled && timers != null) timers.push(cls) else null
+
+ /** If enabled, stop and pop timer from timer stack */
+ @inline final def popTimerClass(timers: ByClassTimerStack, prev: (Long, Long)) {
+ if (_enabled && timers != null) timers.pop(prev)
+ }
+
+ /** Create a new counter that shows as `prefix` and is active in given phases */
+ def newCounter(prefix: String, phases: String*) = new Counter(prefix, phases)
+
+ /** Create a new relative counter that shows as `prefix` and is active
+ * in the same phases as its base counter. Relative counters print as percentages
+ * of their base counters.
+ */
+ def newRelCounter(prefix: String, ctr: Counter): Counter = new RelCounter(prefix, ctr)
+
+ /** Create a new subcounter that shows as `prefix` and is active
+ * in the same phases as its base counter. Subcounters can track
+ * increments of their base counters and print as percentages
+ * of their base counters.
+ */
+ def newSubCounter(prefix: String, ctr: Counter): SubCounter = new SubCounter(prefix, ctr)
+
+ /** Create a new counter that shows as `prefix` and is active in given phases */
+ def newTimer(prefix: String, phases: String*): Timer = new Timer(prefix, phases)
+
+ /** Create a new subtimer that shows as `prefix` and is active
+ * in the same phases as its base timer. Subtimers can track
+ * increments of their base timers and print as percentages
+ * of their base timers.
+ */
+ def newSubTimer(prefix: String, timer: Timer): Timer = new SubTimer(prefix, timer)
+
+ /** Create a new view that shows as `prefix` and is active in given phases.
+ * The view always reflects the current value of `quant` as a quantity.
+ */
+ def newView(prefix: String, phases: String*)(quant: => Any): View = new View(prefix, phases,
+quant)
-object Statistics extends Statistics
+ /** Create a new quantity map that shows as `prefix` and is active in given phases.
+ */
+ def newQuantMap[K, V <% Ordered[V]](prefix: String, phases: String*)(initValue: => V): QuantMap[K, V] = new QuantMap(prefix, phases, initValue)
+ /** Same as newQuantMap, where the key type is fixed to be Class[_] */
+ def newByClass[V <% Ordered[V]](prefix: String, phases: String*)(initValue: => V): QuantMap[Class[_], V] = new QuantMap(prefix, phases, initValue)
+
+ /** Create a new timer stack map, indexed by Class[_]. */
+ def newByClassTimerStack(prefix: String, underlying: Timer) = new ByClassTimerStack(prefix, underlying)
+
+ def allQuantities: Iterable[Quantity] =
+ for ((q, _) <- qs if !q.isInstanceOf[SubQuantity];
+ r <- q :: q.children.toList if r.prefix.nonEmpty) yield r
+
+ private def showPercent(x: Double, base: Double) =
+ if (base == 0) "" else f" (${x / base * 100}%2.1f%)"
+
+ trait Quantity {
+ qs += (this -> ())
+ val prefix: String
+ val phases: Seq[String]
+ def showAt(phase: String) = phases.isEmpty || (phases contains phase)
+ def line = f"$prefix%-30s: ${this}"
+ val children = new mutable.ListBuffer[Quantity]
+ }
+
+ trait SubQuantity extends Quantity {
+ protected def underlying: Quantity
+ underlying.children += this
+ }
+
+ class Counter(val prefix: String, val phases: Seq[String]) extends Quantity with Ordered[Counter] {
+ var value: Int = 0
+ def compare(that: Counter): Int =
+ if (this.value < that.value) -1
+ else if (this.value > that.value) 1
+ else 0
+ override def toString = value.toString
+ }
+
+ class View(val prefix: String, val phases: Seq[String], quant: => Any) extends Quantity {
+ override def toString = quant.toString
+ }
+
+ private class RelCounter(prefix: String, val underlying: Counter) extends Counter(prefix, underlying.phases) with SubQuantity {
+ override def toString =
+ if (value == 0) "0"
+ else {
+ assert(underlying.value != 0, prefix+"/"+underlying.line)
+ f"${value.toFloat / underlying.value}%2.1f"
+ }
+ }
+
+ class SubCounter(prefix: String, override val underlying: Counter) extends Counter(prefix, underlying.phases) with SubQuantity {
+ def start() = (value, underlying.value)
+ def stop(prev: (Int, Int)) {
+ val (value0, uvalue0) = prev
+ value = value0 + underlying.value - uvalue0
+ }
+ override def toString =
+ value + showPercent(value, underlying.value)
+ }
+
+ class Timer(val prefix: String, val phases: Seq[String]) extends Quantity with Ordered[Timer] {
+ var nanos: Long = 0
+ var timings = 0
+ def compare(that: Timer): Int =
+ if (this.nanos < that.nanos) -1
+ else if (this.nanos > that.nanos) 1
+ else 0
+ def start() = {
+ (nanos, System.nanoTime())
+ }
+ def stop(prev: (Long, Long)) {
+ val (nanos0, start) = prev
+ nanos = nanos0 + System.nanoTime() - start
+ timings += 1
+ }
+ override def toString = s"$timings spans, ${nanos/1000}ms"
+ }
+
+ private class SubTimer(prefix: String, override val underlying: Timer) extends Timer(prefix, underlying.phases) with SubQuantity {
+ override def toString: String = super.toString + showPercent(nanos, underlying.nanos)
+ }
+
+ /** A mutable map quantity where missing elements are automatically inserted
+ * on access by executing `initValue`.
+ */
+ class QuantMap[K, V <% Ordered[V]](val prefix: String, val phases: Seq[String], initValue: => V)
+ extends scala.collection.mutable.HashMap[K, V] with Quantity {
+ override def default(key: K) = {
+ val elem = initValue
+ this(key) = elem
+ elem
+ }
+ override def toString =
+ this.toSeq.sortWith(_._2 > _._2).map {
+ case (cls: Class[_], elem) =>
+ s"${cls.toString.substring(cls.toString.lastIndexOf("$") + 1)}: $elem"
+ case (key, elem) =>
+ s"$key: $elem"
+ }.mkString(", ")
+ }
+
+ /** A mutable map quantity that takes class keys to subtimer values, relative to
+ * some `underlying` timer. In addition, class timers can be pushed and popped.
+ * Pushing the timer for a class means stopping the currently active timer.
+ */
+ class ByClassTimerStack(prefix: String, val underlying: Timer)
+ extends QuantMap[Class[_], Timer](prefix, underlying.phases, new SubTimer("", underlying)) with SubQuantity {
+ private var elems: List[(Timer, Long)] = Nil
+ def push(cls: Class[_]): (Long, Long) = {
+ val topTimer = this(cls)
+ elems = (topTimer, 0L) :: elems
+ topTimer.start()
+ }
+ def pop(prev: (Long, Long)) = {
+ val (nanos0, start) = prev
+ val duration = System.nanoTime() - start
+ val (topTimer, nestedNanos) :: rest = elems
+ topTimer.nanos = nanos0 + duration - nestedNanos
+ topTimer.timings += 1
+ elems = rest match {
+ case (outerTimer, outerNested) :: elems1 =>
+ (outerTimer, outerNested + duration) :: elems1
+ case Nil =>
+ Nil
+ }
+ }
+ }
+
+ private var _enabled = false
+ private val qs = new mutable.WeakHashMap[Quantity, Unit]
+
+ def enabled = _enabled
+ def enabled_=(cond: Boolean) = {
+ if (cond && !_enabled) {
+ val test = new Timer("", Nil)
+ val start = System.nanoTime()
+ var total = 0L
+ for (i <- 1 to 10000) {
+ val time = System.nanoTime()
+ total += System.nanoTime() - time
+ }
+ val total2 = System.nanoTime() - start
+ println("Enabling statistics, measuring overhead = "+
+ total/10000.0+"ns to "+total2/10000.0+"ns per timer")
+ _enabled = true
+ }
+ }
+}
diff --git a/src/reflect/scala/reflect/makro/Universe.scala b/src/reflect/scala/reflect/makro/Universe.scala
index ffc4042a0a..98046be555 100644
--- a/src/reflect/scala/reflect/makro/Universe.scala
+++ b/src/reflect/scala/reflect/makro/Universe.scala
@@ -15,25 +15,6 @@ abstract class Universe extends scala.reflect.api.Universe {
// [Eugene++ to Martin] should we also add mutability methods here (similarly to what's done below for trees)?
// I'm talking about `setAnnotations` and friends
-
- /** Can this symbol be loaded by a reflective mirror?
- *
- * Scalac relies on `ScalaSignature' annotation to retain symbols across compilation runs.
- * Such annotations (also called "pickles") are applied on top-level classes and include information
- * about all symbols reachable from the annotee. However, local symbols (e.g. classes or definitions local to a block)
- * are typically unreachable and information about them gets lost.
- *
- * This method is useful for macro writers who wish to save certain ASTs to be used at runtime.
- * With `isLocatable' it's possible to check whether a tree can be retained as is, or it needs special treatment.
- */
- def isLocatable: Boolean
-
- /** Is this symbol static (i.e. with no outer instance)?
- * Q: When exactly is a sym marked as STATIC?
- * A: If it's a member of a toplevel object, or of an object contained in a toplevel object, or any number of levels deep.
- * http://groups.google.com/group/scala-internals/browse_thread/thread/d385bcd60b08faf6
- */
- def isStatic: Boolean
}
// Tree extensions ---------------------------------------------------------------
diff --git a/src/reflect/scala/reflect/runtime/JavaUniverse.scala b/src/reflect/scala/reflect/runtime/JavaUniverse.scala
index d4a83b960d..629df76178 100644
--- a/src/reflect/scala/reflect/runtime/JavaUniverse.scala
+++ b/src/reflect/scala/reflect/runtime/JavaUniverse.scala
@@ -12,12 +12,6 @@ class JavaUniverse extends internal.SymbolTable with ReflectSetup with runtime.S
def picklerPhase = SomePhase
- type TreeGen = internal.TreeGen
-
- override type Position = scala.reflect.internal.util.Position
-
- override val gen = new TreeGen { val global: self.type = self }
-
lazy val settings = new Settings
def forInteractive = false
def forScaladoc = false
diff --git a/src/scalacheck/org/scalacheck/Arbitrary.scala b/src/scalacheck/org/scalacheck/Arbitrary.scala
index 28e116b479..8c43cdaafe 100644
--- a/src/scalacheck/org/scalacheck/Arbitrary.scala
+++ b/src/scalacheck/org/scalacheck/Arbitrary.scala
@@ -5,7 +5,7 @@
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck
@@ -178,9 +178,10 @@ object Arbitrary {
import java.math.MathContext._
val mcGen = oneOf(UNLIMITED, DECIMAL32, DECIMAL64, DECIMAL128)
val bdGen = for {
- mc <- mcGen
- scale <- arbInt.arbitrary
x <- arbBigInt.arbitrary
+ mc <- mcGen
+ limit <- value(if(mc == UNLIMITED) 0 else math.max(x.abs.toString.length - mc.getPrecision, 0))
+ scale <- Gen.chooseNum(Int.MinValue + limit , Int.MaxValue)
} yield BigDecimal(x, scale, mc)
Arbitrary(bdGen)
}
@@ -197,24 +198,37 @@ object Arbitrary {
}
/** Generates an arbitrary property */
- implicit lazy val arbProp: Arbitrary[Prop] =
+ implicit lazy val arbProp: Arbitrary[Prop] = {
+ import Prop._
+ val undecidedOrPassed = forAll { b: Boolean =>
+ b ==> true
+ }
Arbitrary(frequency(
- (5, Prop.proved),
- (4, Prop.falsified),
- (2, Prop.undecided),
- (1, Prop.exception(null))
+ (4, falsified),
+ (4, passed),
+ (3, proved),
+ (3, undecidedOrPassed),
+ (2, undecided),
+ (1, exception(null))
))
+ }
/** Arbitrary instance of test params */
implicit lazy val arbTestParams: Arbitrary[Test.Params] =
Arbitrary(for {
- minSuccTests <- choose(10,150)
- maxDiscTests <- choose(100,500)
+ minSuccTests <- choose(10,200)
+ maxDiscardRatio <- choose(0.2f,10f)
minSize <- choose(0,500)
sizeDiff <- choose(0,500)
maxSize <- choose(minSize, minSize + sizeDiff)
ws <- choose(1,4)
- } yield Test.Params(minSuccTests,maxDiscTests,minSize,maxSize,workers = ws))
+ } yield Test.Params(
+ minSuccessfulTests = minSuccTests,
+ maxDiscardRatio = maxDiscardRatio,
+ minSize = minSize,
+ maxSize = maxSize,
+ workers = ws
+ ))
/** Arbitrary instance of gen params */
implicit lazy val arbGenParams: Arbitrary[Gen.Params] =
diff --git a/src/scalacheck/org/scalacheck/Arg.scala b/src/scalacheck/org/scalacheck/Arg.scala
index 908bce2a81..8959211f09 100644
--- a/src/scalacheck/org/scalacheck/Arg.scala
+++ b/src/scalacheck/org/scalacheck/Arg.scala
@@ -5,7 +5,7 @@
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck
diff --git a/src/scalacheck/org/scalacheck/Commands.scala b/src/scalacheck/org/scalacheck/Commands.scala
index 112dda28a7..5ad82c513d 100644
--- a/src/scalacheck/org/scalacheck/Commands.scala
+++ b/src/scalacheck/org/scalacheck/Commands.scala
@@ -5,7 +5,7 @@
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck
@@ -46,13 +46,6 @@ trait Commands extends Prop {
def run(s: State): Any
def nextState(s: State): State
- /** @deprecated Use <code>preConditions += ...</code> instead. */
- @deprecated("Use 'preConditions += ...' instead.", "1.6")
- def preCondition_=(f: State => Boolean) = {
- preConditions.clear
- preConditions += f
- }
-
/** Returns all preconditions merged into a single function */
def preCondition: (State => Boolean) = s => preConditions.toList.forall(_.apply(s))
@@ -62,20 +55,6 @@ trait Commands extends Prop {
* conditions to the precondition list */
val preConditions = new collection.mutable.ListBuffer[State => Boolean]
- /** @deprecated Use <code>postConditions += ...</code> instead. */
- @deprecated("Use 'postConditions += ...' instead.", "1.6")
- def postCondition_=(f: (State,Any) => Prop) = {
- postConditions.clear
- postConditions += ((s0,s1,r) => f(s0,r))
- }
-
- /** @deprecated Use <code>postConditions += ...</code> instead. */
- @deprecated("Use 'postConditions += ...' instead.", "1.6")
- def postCondition_=(f: (State,State,Any) => Prop) = {
- postConditions.clear
- postConditions += f
- }
-
/** Returns all postconditions merged into a single function */
def postCondition: (State,State,Any) => Prop = (s0,s1,r) => all(postConditions.map(_.apply(s0,s1,r)): _*)
diff --git a/src/scalacheck/org/scalacheck/ConsoleReporter.scala b/src/scalacheck/org/scalacheck/ConsoleReporter.scala
index c3af6c83a3..93f1dc222e 100644
--- a/src/scalacheck/org/scalacheck/ConsoleReporter.scala
+++ b/src/scalacheck/org/scalacheck/ConsoleReporter.scala
@@ -5,7 +5,7 @@
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck
@@ -37,31 +37,6 @@ object ConsoleReporter {
* the given verbosity */
def apply(verbosity: Int = 0) = new ConsoleReporter(verbosity)
- @deprecated("(v1.8)", "1.8")
- def propReport(s: Int, d: Int) = {
- if(d == 0) printf("\rPassed %s tests\r", s)
- else printf("\rPassed %s tests; %s discarded\r", s, d)
- Console.flush
- }
-
- @deprecated("(v1.8)", "1.8")
- def propReport(pName: String, s: Int, d: Int) = {
- if(d == 0) printf("\r %s: Passed %s tests\r", pName, s)
- else printf("\r %s: Passed %s tests; %s discarded\r", pName, s, d)
- Console.flush
- }
-
- @deprecated("(v1.8)", "1.8")
- def testReport(res: Test.Result) = {
- print(List.fill(78)(' ').mkString)
- val s = (if(res.passed) "+ " else "! ") + pretty(res, Params(0))
- printf("\r%s\n", format(s, "", "", 75))
- res
- }
-
- @deprecated("(v1.8)", "1.8")
- def testStatsEx(res: Test.Result): Unit = testStatsEx("", res)
-
def testStatsEx(msg: String, res: Test.Result) = {
lazy val m = if(msg.length == 0) "" else msg + ": "
res.status match {
diff --git a/src/scalacheck/org/scalacheck/Gen.scala b/src/scalacheck/org/scalacheck/Gen.scala
index a253b040cd..64bb61c2d3 100644
--- a/src/scalacheck/org/scalacheck/Gen.scala
+++ b/src/scalacheck/org/scalacheck/Gen.scala
@@ -5,7 +5,7 @@
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck
@@ -59,6 +59,12 @@ object Choose {
}
}
+case class FiniteGenRes[+T](
+ r: T
+)
+
+sealed trait FiniteGen[+T] extends Gen[FiniteGenRes[T]]
+
/** Class that represents a generator. */
sealed trait Gen[+T] {
@@ -150,13 +156,6 @@ sealed trait Gen[+T] {
/** Returns a new property that holds if and only if both this
* and the given generator generates the same result, or both
- * generators generate no result.
- * @deprecated Use <code>==</code> instead */
- @deprecated("Use == instead", "1.7")
- def ===[U](g: Gen[U]): Prop = this == g
-
- /** Returns a new property that holds if and only if both this
- * and the given generator generates the same result, or both
* generators generate no result. */
def ==[U](g: Gen[U]) = Prop(prms =>
(this(prms.genPrms), g(prms.genPrms)) match {
@@ -221,11 +220,6 @@ object Gen {
}
}
- /* Default generator parameters
- * @deprecated Use <code>Gen.Params()</code> instead */
- @deprecated("Use Gen.Params() instead", "1.8")
- val defaultParams = Params()
-
/* Generator factory method */
def apply[T](g: Gen.Params => Option[T]) = new Gen[T] {
def apply(p: Gen.Params) = g(p)
@@ -310,20 +304,6 @@ object Gen {
x <- if(i == 0) g1 else if(i == 1) g2 else gs(i-2)
} yield x
- /** Chooses one of the given values, with a weighted random distribution.
- * @deprecated Use <code>frequency</code> with constant generators
- * instead. */
- @deprecated("Use 'frequency' with constant generators instead.", "1.6")
- def elementsFreq[T](vs: (Int, T)*): Gen[T] =
- frequency(vs.map { case (w,v) => (w, value(v)) } : _*)
-
- /** A generator that returns a random element from a list
- * @deprecated Use <code>oneOf</code> with constant generators instead. */
- @deprecated("Use 'oneOf' with constant generators instead.", "1.6")
- def elements[T](xs: T*): Gen[T] = if(xs.isEmpty) fail else for {
- i <- choose(0,xs.length-1)
- } yield xs(i)
-
//// List Generators ////
@@ -368,12 +348,6 @@ object Gen {
* <code>containerOfN[List,T](n,g)</code>. */
def listOfN[T](n: Int, g: Gen[T]) = containerOfN[List,T](n,g)
- /** Generates a list of the given length. This method is equal to calling
- * <code>containerOfN[List,T](n,g)</code>.
- * @deprecated Use the method <code>listOfN</code> instead. */
- @deprecated("Use 'listOfN' instead.", "1.6")
- def vectorOf[T](n: Int, g: Gen[T]) = containerOfN[List,T](n,g)
-
/** A generator that picks a random number of elements from a list */
def someOf[T](l: Iterable[T]) = choose(0,l.size) flatMap (pick(_,l))
@@ -438,16 +412,6 @@ object Gen {
//// Number Generators ////
- /* Generates positive integers
- * @deprecated Use <code>posNum[Int]code> instead */
- @deprecated("Use posNum[Int] instead", "1.7")
- def posInt: Gen[Int] = sized(max => choose(1, max))
-
- /* Generates negative integers
- * @deprecated Use <code>negNum[Int]code> instead */
- @deprecated("Use negNum[Int] instead", "1.7")
- def negInt: Gen[Int] = sized(max => choose(-max, -1))
-
/** Generates positive numbers of uniform distribution, with an
* upper bound of the generation size parameter. */
def posNum[T](implicit num: Numeric[T], c: Choose[T]): Gen[T] = {
diff --git a/src/scalacheck/org/scalacheck/Pretty.scala b/src/scalacheck/org/scalacheck/Pretty.scala
index f59ac315c7..c40e4aa718 100644
--- a/src/scalacheck/org/scalacheck/Pretty.scala
+++ b/src/scalacheck/org/scalacheck/Pretty.scala
@@ -5,7 +5,7 @@
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck
@@ -49,6 +49,8 @@ object Pretty {
implicit def prettyAny(t: Any) = Pretty { p => t.toString }
+ implicit def prettyString(t: String) = Pretty { p => "\""++t++"\"" }
+
implicit def prettyList(l: List[Any]) = Pretty { p =>
l.map("\""+_+"\"").mkString("List(", ", ", ")")
}
diff --git a/src/scalacheck/org/scalacheck/Prop.scala b/src/scalacheck/org/scalacheck/Prop.scala
index 3ae9f22234..dfd85a832a 100644
--- a/src/scalacheck/org/scalacheck/Prop.scala
+++ b/src/scalacheck/org/scalacheck/Prop.scala
@@ -5,12 +5,13 @@
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck
import util.{FreqMap,Buildable}
import scala.collection._
+import scala.annotation.tailrec
/** A property is a generator that generates a property result */
trait Prop {
@@ -102,15 +103,6 @@ trait Prop {
}
}
- /** Returns a new property that holds if and only if both this
- * and the given property generates a result with the exact
- * same status. Note that this means that if one of the properties is
- * proved, and the other one passed, then the resulting property
- * will fail.
- * @deprecated Use <code>==</code> instead */
- @deprecated("Use == instead.", "1.7")
- def ===(p: Prop): Prop = this == p
-
override def toString = "Prop"
/** Put a label on the property to make test reports clearer */
@@ -201,7 +193,7 @@ object Prop {
case (_,Undecided) => r
case (_,Proof) => merge(this, r, this.status)
- case (Proof,_) => merge(this, r, this.status)
+ case (Proof,_) => merge(this, r, r.status)
case (True,True) => merge(this, r, True)
}
@@ -337,15 +329,12 @@ object Prop {
/** A property that depends on the generator size */
def sizedProp(f: Int => Prop): Prop = Prop { prms =>
+ // provedToTrue since if the property is proved for
+ // one size, it shouldn't be regarded as proved for
+ // all sizes.
provedToTrue(f(prms.genPrms.size)(prms))
}
- /** Implication
- * @deprecated Use the implication operator of the Prop class instead
- */
- @deprecated("Use the implication operator of the Prop class instead", "1.7")
- def ==>(b: => Boolean, p: => Prop): Prop = (b: Prop) ==> p
-
/** Implication with several conditions */
def imply[T](x: T, f: PartialFunction[T,Prop]): Prop =
secure(if(f.isDefinedAt(x)) f(x) else undecided)
@@ -758,4 +747,17 @@ object Prop {
a8: Arbitrary[A8], s8: Shrink[A8], pp8: A8 => Pretty
): Prop = forAll((a: A1) => forAll(f(a, _:A2, _:A3, _:A4, _:A5, _:A6, _:A7, _:A8)))
+ /** Ensures that the property expression passed in completes within the given space of time. */
+ def within(maximumMs: Long)(wrappedProp: => Prop): Prop = new Prop {
+ @tailrec private def attempt(prms: Params, endTime: Long): Result = {
+ val result = wrappedProp.apply(prms)
+ if (System.currentTimeMillis > endTime) {
+ (if (result.failure) result else Result(False)).label("Timeout")
+ } else {
+ if (result.success) result
+ else attempt(prms, endTime)
+ }
+ }
+ def apply(prms: Params) = attempt(prms, System.currentTimeMillis + maximumMs)
+ }
}
diff --git a/src/scalacheck/org/scalacheck/Properties.scala b/src/scalacheck/org/scalacheck/Properties.scala
index 8a5b3febc9..26059231d6 100644
--- a/src/scalacheck/org/scalacheck/Properties.scala
+++ b/src/scalacheck/org/scalacheck/Properties.scala
@@ -5,7 +5,7 @@
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck
diff --git a/src/scalacheck/org/scalacheck/Shrink.scala b/src/scalacheck/org/scalacheck/Shrink.scala
index a077f21573..ae15bd9616 100644
--- a/src/scalacheck/org/scalacheck/Shrink.scala
+++ b/src/scalacheck/org/scalacheck/Shrink.scala
@@ -5,7 +5,7 @@
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck
diff --git a/src/scalacheck/org/scalacheck/Test.scala b/src/scalacheck/org/scalacheck/Test.scala
index 48b0a151a1..4368184823 100644
--- a/src/scalacheck/org/scalacheck/Test.scala
+++ b/src/scalacheck/org/scalacheck/Test.scala
@@ -5,7 +5,7 @@
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck
@@ -19,12 +19,18 @@ object Test {
/** Test parameters */
case class Params(
minSuccessfulTests: Int = 100,
- maxDiscardedTests: Int = 500,
+
+ /** @deprecated Use maxDiscardRatio instead. */
+ @deprecated("Use maxDiscardRatio instead.", "1.10")
+ maxDiscardedTests: Int = -1,
+
minSize: Int = 0,
maxSize: Int = Gen.Params().size,
rng: java.util.Random = Gen.Params().rng,
workers: Int = 1,
- testCallback: TestCallback = new TestCallback {}
+ testCallback: TestCallback = new TestCallback {},
+ maxDiscardRatio: Float = 5,
+ customClassLoader: Option[ClassLoader] = None
)
/** Test statistics */
@@ -90,7 +96,7 @@ object Test {
import prms._
if(
minSuccessfulTests <= 0 ||
- maxDiscardedTests < 0 ||
+ maxDiscardRatio <= 0 ||
minSize < 0 ||
maxSize < minSize ||
workers <= 0
@@ -106,12 +112,13 @@ object Test {
val names = Set("minSuccessfulTests", "s")
val help = "Number of tests that must succeed in order to pass a property"
}
- object OptMaxDiscarded extends IntOpt {
- val default = Test.Params().maxDiscardedTests
- val names = Set("maxDiscardedTests", "d")
+ object OptMaxDiscardRatio extends FloatOpt {
+ val default = Test.Params().maxDiscardRatio
+ val names = Set("maxDiscardRatio", "r")
val help =
- "Number of tests that can be discarded before ScalaCheck stops " +
- "testing a property"
+ "The maximum ratio between discarded and succeeded tests " +
+ "allowed before ScalaCheck stops testing a property. At " +
+ "least minSuccessfulTests will always be tested, though."
}
object OptMinSize extends IntOpt {
val default = Test.Params().minSize
@@ -135,45 +142,54 @@ object Test {
}
val opts = Set[Opt[_]](
- OptMinSuccess, OptMaxDiscarded, OptMinSize,
+ OptMinSuccess, OptMaxDiscardRatio, OptMinSize,
OptMaxSize, OptWorkers, OptVerbosity
)
def parseParams(args: Array[String]) = parseArgs(args) {
optMap => Test.Params(
- optMap(OptMinSuccess),
- optMap(OptMaxDiscarded),
- optMap(OptMinSize),
- optMap(OptMaxSize),
- Test.Params().rng,
- optMap(OptWorkers),
- ConsoleReporter(optMap(OptVerbosity))
+ minSuccessfulTests = optMap(OptMinSuccess),
+ maxDiscardRatio = optMap(OptMaxDiscardRatio),
+ minSize = optMap(OptMinSize),
+ maxSize = optMap(OptMaxSize),
+ rng = Test.Params().rng,
+ workers = optMap(OptWorkers),
+ testCallback = ConsoleReporter(optMap(OptVerbosity))
)
}
}
/** Tests a property with the given testing parameters, and returns
* the test results. */
- def check(prms: Params, p: Prop): Result = {
+ def check(params: Params, p: Prop): Result = {
+
+ // maxDiscardedTests is deprecated, but if someone
+ // uses it let it override maxDiscardRatio
+ val mdr =
+ if(params.maxDiscardedTests < 0) params.maxDiscardRatio
+ else (params.maxDiscardedTests: Float)/(params.minSuccessfulTests: Float)
+ val prms = params.copy( maxDiscardRatio = mdr)
+
import prms._
- import actors.Futures.future
+ import scala.actors.Futures.future
assertParams(prms)
if(workers > 1)
assert(!p.isInstanceOf[Commands], "Commands cannot be checked multi-threaded")
- val iterations = minSuccessfulTests / workers
- val sizeStep = (maxSize-minSize) / (minSuccessfulTests: Float)
+ val iterations = math.ceil(minSuccessfulTests / (workers: Double))
+ val sizeStep = (maxSize-minSize) / (iterations*workers)
var stop = false
- def worker(workerdIdx: Int) = future {
- var n = 0
- var d = 0
- var size = minSize + (workerdIdx*sizeStep*iterations)
+ def worker(workerIdx: Int) = future {
+ params.customClassLoader.map(Thread.currentThread.setContextClassLoader(_))
+ var n = 0 // passed tests
+ var d = 0 // discarded tests
var res: Result = null
var fm = FreqMap.empty[immutable.Set[Any]]
while(!stop && res == null && n < iterations) {
- val propPrms = Prop.Params(Gen.Params(size.round, prms.rng), fm)
+ val size = (minSize: Double) + (sizeStep * (workerIdx + (workers*(n+d))))
+ val propPrms = Prop.Params(Gen.Params(size.round.toInt, prms.rng), fm)
secure(p(propPrms)) match {
case Right(e) => res =
Result(GenException(e), n, d, FreqMap.empty[immutable.Set[Any]])
@@ -184,35 +200,48 @@ object Test {
propRes.status match {
case Prop.Undecided =>
d += 1
- testCallback.onPropEval("", workerdIdx, n, d)
- if(d >= maxDiscardedTests) res = Result(Exhausted, n, d, fm)
+ testCallback.onPropEval("", workerIdx, n, d)
+ // The below condition is kind of hacky. We have to have
+ // some margin, otherwise workers might stop testing too
+ // early because they have been exhausted, but the overall
+ // test has not.
+ if (n+d > minSuccessfulTests && 1+workers*maxDiscardRatio*n < d)
+ res = Result(Exhausted, n, d, fm)
case Prop.True =>
n += 1
- testCallback.onPropEval("", workerdIdx, n, d)
+ testCallback.onPropEval("", workerIdx, n, d)
case Prop.Proof =>
n += 1
res = Result(Proved(propRes.args), n, d, fm)
- case Prop.False => res =
- Result(Failed(propRes.args, propRes.labels), n, d, fm)
- case Prop.Exception(e) => res =
- Result(PropException(propRes.args, e, propRes.labels), n, d, fm)
+ stop = true
+ case Prop.False =>
+ res = Result(Failed(propRes.args,propRes.labels), n, d, fm)
+ stop = true
+ case Prop.Exception(e) =>
+ res = Result(PropException(propRes.args,e,propRes.labels), n, d, fm)
+ stop = true
}
}
- size += sizeStep
}
- if(res != null) stop = true
- else res = Result(Passed, n, d, fm)
- res
+ if (res == null) {
+ if (maxDiscardRatio*n > d) Result(Passed, n, d, fm)
+ else Result(Exhausted, n, d, fm)
+ } else res
}
- def mergeResults(r1: () => Result, r2: () => Result) = r1() match {
- case Result(Passed, s1, d1, fm1, t) => r2() match {
- case Result(Passed, s2, d2, fm2, t) if d1+d2 >= maxDiscardedTests =>
- () => Result(Exhausted, s1+s2, d1+d2, fm1++fm2, t)
- case Result(st, s2, d2, fm2, t) =>
- () => Result(st, s1+s2, d1+d2, fm1++fm2, t)
+ def mergeResults(r1: () => Result, r2: () => Result) = {
+ val Result(st1, s1, d1, fm1, _) = r1()
+ val Result(st2, s2, d2, fm2, _) = r2()
+ if (st1 != Passed && st1 != Exhausted)
+ () => Result(st1, s1+s2, d1+d2, fm1++fm2, 0)
+ else if (st2 != Passed && st2 != Exhausted)
+ () => Result(st2, s1+s2, d1+d2, fm1++fm2, 0)
+ else {
+ if (s1+s2 >= minSuccessfulTests && maxDiscardRatio*(s1+s2) >= (d1+d2))
+ () => Result(Passed, s1+s2, d1+d2, fm1++fm2, 0)
+ else
+ () => Result(Exhausted, s1+s2, d1+d2, fm1++fm2, 0)
}
- case r => () => r
}
val start = System.currentTimeMillis
@@ -237,78 +266,4 @@ object Test {
(name,res)
}
-
- // Deprecated methods //
-
- /** Default testing parameters
- * @deprecated Use <code>Test.Params()</code> instead */
- @deprecated("Use Test.Params() instead", "1.8")
- val defaultParams = Params()
-
- /** Property evaluation callback. Takes number of passed and
- * discarded tests, respectively */
- @deprecated("(v1.8)", "1.8")
- type PropEvalCallback = (Int,Int) => Unit
-
- /** Property evaluation callback. Takes property name, and number of passed
- * and discarded tests, respectively */
- @deprecated("(v1.8)", "1.8")
- type NamedPropEvalCallback = (String,Int,Int) => Unit
-
- /** Test callback. Takes property name, and test results. */
- @deprecated("(v1.8)", "1.8")
- type TestResCallback = (String,Result) => Unit
-
- /** @deprecated (v1.8) Use <code>check(prms.copy(testCallback = myCallback), p)</code> instead. */
- @deprecated("Use check(prms.copy(testCallback = myCallback), p) instead", "1.8")
- def check(prms: Params, p: Prop, propCallb: PropEvalCallback): Result = {
- val testCallback = new TestCallback {
- override def onPropEval(n: String, t: Int, s: Int, d: Int) = propCallb(s,d)
- }
- check(prms copy (testCallback = testCallback), p)
- }
-
- /** Tests a property and prints results to the console. The
- * <code>maxDiscarded</code> parameter specifies how many
- * discarded tests that should be allowed before ScalaCheck
- * @deprecated (v1.8) Use <code>check(Params(maxDiscardedTests = n, testCallback = ConsoleReporter()), p)</code> instead. */
- @deprecated("Use check(Params(maxDiscardedTests = n, testCallback = ConsoleReporter()), p) instead.", "1.8")
- def check(p: Prop, maxDiscarded: Int): Result =
- check(Params(maxDiscardedTests = maxDiscarded, testCallback = ConsoleReporter()), p)
-
- /** Tests a property and prints results to the console
- * @deprecated (v1.8) Use <code>check(Params(testCallback = ConsoleReporter()), p)</code> instead. */
- @deprecated("Use check(Params(testCallback = ConsoleReporter()), p) instead.", "1.8")
- def check(p: Prop): Result = check(Params(testCallback = ConsoleReporter()), p)
-
- /** Tests all properties with the given testing parameters, and returns
- * the test results. <code>f</code> is a function which is called each
- * time a property is evaluted. <code>g</code> is a function called each
- * time a property has been fully tested.
- * @deprecated (v1.8) Use <code>checkProperties(prms.copy(testCallback = myCallback), ps)</code> instead. */
- @deprecated("Use checkProperties(prms.copy(testCallback = myCallback), ps) instead.", "1.8")
- def checkProperties(ps: Properties, prms: Params,
- propCallb: NamedPropEvalCallback, testCallb: TestResCallback
- ): Seq[(String,Result)] = {
- val testCallback = new TestCallback {
- override def onPropEval(n: String, t: Int, s: Int, d: Int) = propCallb(n,s,d)
- override def onTestResult(n: String, r: Result) = testCallb(n,r)
- }
- checkProperties(prms copy (testCallback = testCallback), ps)
- }
-
- /** Tests all properties with the given testing parameters, and returns
- * the test results.
- * @deprecated (v1.8) Use checkProperties(prms, ps) instead */
- @deprecated("Use checkProperties(prms, ps) instead", "1.8")
- def checkProperties(ps: Properties, prms: Params): Seq[(String,Result)] =
- checkProperties(ps, prms, (n,s,d) => (), (n,s) => ())
-
- /** Tests all properties with default testing parameters, and returns
- * the test results. The results are also printed on the console during
- * testing.
- * @deprecated (v1.8) Use <code>checkProperties(Params(), ps)</code> instead. */
- @deprecated("Use checkProperties(Params(), ps) instead.", "1.8")
- def checkProperties(ps: Properties): Seq[(String,Result)] =
- checkProperties(Params(), ps)
}
diff --git a/src/scalacheck/org/scalacheck/util/Buildable.scala b/src/scalacheck/org/scalacheck/util/Buildable.scala
index 5c960c3ba8..221b8a61c3 100644
--- a/src/scalacheck/org/scalacheck/util/Buildable.scala
+++ b/src/scalacheck/org/scalacheck/util/Buildable.scala
@@ -5,7 +5,7 @@
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck.util
@@ -31,7 +31,7 @@ object Buildable {
def builder = (new mutable.ListBuffer[T]).mapResult(_.toStream)
}
- implicit def buildableArray[T](implicit t: ClassTag[T]) =
+ implicit def buildableArray[T](implicit cm: ClassTag[T]) =
new Buildable[T,Array] {
def builder = mutable.ArrayBuilder.make[T]
}
diff --git a/src/scalacheck/org/scalacheck/util/CmdLineParser.scala b/src/scalacheck/org/scalacheck/util/CmdLineParser.scala
index a63e4ba10e..16ac1940b2 100644
--- a/src/scalacheck/org/scalacheck/util/CmdLineParser.scala
+++ b/src/scalacheck/org/scalacheck/util/CmdLineParser.scala
@@ -5,7 +5,7 @@
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck.util
@@ -26,6 +26,7 @@ trait CmdLineParser extends Parsers {
}
trait Flag extends Opt[Unit]
trait IntOpt extends Opt[Int]
+ trait FloatOpt extends Opt[Float]
trait StrOpt extends Opt[String]
class OptMap {
@@ -68,11 +69,17 @@ trait CmdLineParser extends Parsers {
case s if s != null && s.length > 0 && s.forall(_.isDigit) => s.toInt
})
+ private val floatVal: Parser[Float] = accept("float", {
+ case s if s != null && s.matches("[0987654321]+\\.?[0987654321]*")
+ => s.toFloat
+ })
+
private case class OptVal[T](o: Opt[T], v: T)
private val optVal: Parser[OptVal[Any]] = opt into {
case o: Flag => success(OptVal(o, ()))
case o: IntOpt => intVal ^^ (v => OptVal(o, v))
+ case o: FloatOpt => floatVal ^^ (v => OptVal(o, v))
case o: StrOpt => strVal ^^ (v => OptVal(o, v))
}
diff --git a/src/scalacheck/org/scalacheck/util/FreqMap.scala b/src/scalacheck/org/scalacheck/util/FreqMap.scala
index 902c148d67..c7474d3b87 100644
--- a/src/scalacheck/org/scalacheck/util/FreqMap.scala
+++ b/src/scalacheck/org/scalacheck/util/FreqMap.scala
@@ -5,7 +5,7 @@
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck.util
diff --git a/src/scalacheck/org/scalacheck/util/StdRand.scala b/src/scalacheck/org/scalacheck/util/StdRand.scala
index 4cc83a4172..317b0ccd10 100644
--- a/src/scalacheck/org/scalacheck/util/StdRand.scala
+++ b/src/scalacheck/org/scalacheck/util/StdRand.scala
@@ -5,7 +5,7 @@
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck.util
diff --git a/test/benchmarking/ParCtrie-size.scala b/test/benchmarking/ParCtrie-size.scala
index 5a6191fb62..3f47dc23fd 100644
--- a/test/benchmarking/ParCtrie-size.scala
+++ b/test/benchmarking/ParCtrie-size.scala
@@ -2,16 +2,18 @@
-import collection.parallel.mutable.ParCtrie
+import collection.parallel.mutable.ParTrieMap
object Size extends testing.Benchmark {
val length = sys.props("length").toInt
val par = sys.props("par").toInt
- var parctrie = ParCtrie((0 until length) zip (0 until length): _*)
+ var parctrie = ParTrieMap((0 until length) zip (0 until length): _*)
- collection.parallel.ForkJoinTasks.defaultForkJoinPool.setParallelism(par)
+ //collection.parallel.ForkJoinTasks.defaultForkJoinPool.setParallelism(par)
+ val ts = new collection.parallel.ForkJoinTaskSupport(new concurrent.forkjoin.ForkJoinPool(par))
+ parctrie.tasksupport = ts
def run = {
parctrie.size
@@ -21,7 +23,8 @@ object Size extends testing.Benchmark {
override def tearDown() {
iteration += 1
- if (iteration % 4 == 0) parctrie = ParCtrie((0 until length) zip (0 until length): _*)
+ if (iteration % 4 == 0) parctrie = ParTrieMap((0 until length) zip (0 until length): _*)
+ parctrie.tasksupport = ts
}
}
diff --git a/test/files/continuations-neg/ts-1681-nontail-return.check b/test/files/continuations-neg/ts-1681-nontail-return.check
new file mode 100644
index 0000000000..8fe15f154b
--- /dev/null
+++ b/test/files/continuations-neg/ts-1681-nontail-return.check
@@ -0,0 +1,4 @@
+ts-1681-nontail-return.scala:10: error: return expressions in CPS code must be in tail position
+ return v
+ ^
+one error found
diff --git a/test/files/continuations-neg/ts-1681-nontail-return.scala b/test/files/continuations-neg/ts-1681-nontail-return.scala
new file mode 100644
index 0000000000..af86ad304f
--- /dev/null
+++ b/test/files/continuations-neg/ts-1681-nontail-return.scala
@@ -0,0 +1,18 @@
+import scala.util.continuations._
+
+class ReturnRepro {
+ def s1: Int @cpsParam[Any, Unit] = shift { k => k(5) }
+ def caller = reset { println(p(3)) }
+
+ def p(i: Int): Int @cpsParam[Unit, Any] = {
+ val v= s1 + 3
+ if (v == 8)
+ return v
+ v + 1
+ }
+}
+
+object Test extends App {
+ val repro = new ReturnRepro
+ repro.caller
+}
diff --git a/test/files/continuations-run/ts-1681-2.check b/test/files/continuations-run/ts-1681-2.check
new file mode 100644
index 0000000000..35b3c93780
--- /dev/null
+++ b/test/files/continuations-run/ts-1681-2.check
@@ -0,0 +1,5 @@
+8
+hi
+8
+from try
+8
diff --git a/test/files/continuations-run/ts-1681-2.scala b/test/files/continuations-run/ts-1681-2.scala
new file mode 100644
index 0000000000..8a896dec2c
--- /dev/null
+++ b/test/files/continuations-run/ts-1681-2.scala
@@ -0,0 +1,44 @@
+import scala.util.continuations._
+
+class ReturnRepro {
+ def s1: Int @cps[Any] = shift { k => k(5) }
+ def caller = reset { println(p(3)) }
+ def caller2 = reset { println(p2(3)) }
+ def caller3 = reset { println(p3(3)) }
+
+ def p(i: Int): Int @cps[Any] = {
+ val v= s1 + 3
+ return v
+ }
+
+ def p2(i: Int): Int @cps[Any] = {
+ val v = s1 + 3
+ if (v > 0) {
+ println("hi")
+ return v
+ } else {
+ println("hi")
+ return 8
+ }
+ }
+
+ def p3(i: Int): Int @cps[Any] = {
+ val v = s1 + 3
+ try {
+ println("from try")
+ return v
+ } catch {
+ case e: Exception =>
+ println("from catch")
+ return 7
+ }
+ }
+
+}
+
+object Test extends App {
+ val repro = new ReturnRepro
+ repro.caller
+ repro.caller2
+ repro.caller3
+}
diff --git a/test/files/continuations-run/ts-1681-3.check b/test/files/continuations-run/ts-1681-3.check
new file mode 100644
index 0000000000..71489f097c
--- /dev/null
+++ b/test/files/continuations-run/ts-1681-3.check
@@ -0,0 +1,4 @@
+enter return expr
+8
+hi
+8
diff --git a/test/files/continuations-run/ts-1681-3.scala b/test/files/continuations-run/ts-1681-3.scala
new file mode 100644
index 0000000000..62c547f5a2
--- /dev/null
+++ b/test/files/continuations-run/ts-1681-3.scala
@@ -0,0 +1,27 @@
+import scala.util.continuations._
+
+class ReturnRepro {
+ def s1: Int @cpsParam[Any, Unit] = shift { k => k(5) }
+ def caller = reset { println(p(3)) }
+ def caller2 = reset { println(p2(3)) }
+
+ def p(i: Int): Int @cpsParam[Unit, Any] = {
+ val v= s1 + 3
+ return { println("enter return expr"); v }
+ }
+
+ def p2(i: Int): Int @cpsParam[Unit, Any] = {
+ val v = s1 + 3
+ if (v > 0) {
+ return { println("hi"); v }
+ } else {
+ return { println("hi"); 8 }
+ }
+ }
+}
+
+object Test extends App {
+ val repro = new ReturnRepro
+ repro.caller
+ repro.caller2
+}
diff --git a/test/files/continuations-run/ts-1681.check b/test/files/continuations-run/ts-1681.check
new file mode 100644
index 0000000000..85176d8e66
--- /dev/null
+++ b/test/files/continuations-run/ts-1681.check
@@ -0,0 +1,3 @@
+8
+hi
+8
diff --git a/test/files/continuations-run/ts-1681.scala b/test/files/continuations-run/ts-1681.scala
new file mode 100644
index 0000000000..efb1abae15
--- /dev/null
+++ b/test/files/continuations-run/ts-1681.scala
@@ -0,0 +1,29 @@
+import scala.util.continuations._
+
+class ReturnRepro {
+ def s1: Int @cpsParam[Any, Unit] = shift { k => k(5) }
+ def caller = reset { println(p(3)) }
+ def caller2 = reset { println(p2(3)) }
+
+ def p(i: Int): Int @cpsParam[Unit, Any] = {
+ val v= s1 + 3
+ return v
+ }
+
+ def p2(i: Int): Int @cpsParam[Unit, Any] = {
+ val v = s1 + 3
+ if (v > 0) {
+ println("hi")
+ return v
+ } else {
+ println("hi")
+ return 8
+ }
+ }
+}
+
+object Test extends App {
+ val repro = new ReturnRepro
+ repro.caller
+ repro.caller2
+}
diff --git a/test/files/jvm/actmig-PinS.check b/test/files/jvm/actmig-PinS.check
new file mode 100644
index 0000000000..bdbdf8a692
--- /dev/null
+++ b/test/files/jvm/actmig-PinS.check
@@ -0,0 +1,19 @@
+I'm acting!
+I'm acting!
+I'm acting!
+I'm acting!
+I'm acting!
+Post stop
+To be or not to be.
+To be or not to be.
+To be or not to be.
+To be or not to be.
+To be or not to be.
+That is the question.
+That is the question.
+That is the question.
+That is the question.
+That is the question.
+received message: hi there
+received message: 15
+Got an Int: 12
diff --git a/test/files/jvm/actmig-PinS.scala b/test/files/jvm/actmig-PinS.scala
new file mode 100644
index 0000000000..39f8f04b3b
--- /dev/null
+++ b/test/files/jvm/actmig-PinS.scala
@@ -0,0 +1,112 @@
+import scala.actors._
+import scala.concurrent.util.duration._
+import scala.concurrent.{ Promise, Await }
+
+import scala.actors.Actor._
+
+/* PinS, Listing 32.1: A simple actor
+ */
+object SillyActor extends Actor {
+ def act() {
+ for (i <- 1 to 5)
+ println("I'm acting!")
+
+ println("Post stop")
+ }
+}
+
+object SeriousActor extends Actor {
+ def act() {
+ for (i <- 1 to 5)
+ println("To be or not to be.")
+ }
+}
+
+/* PinS, Listing 32.3: An actor that calls react
+ */
+object NameResolver extends Actor {
+ import java.net.{InetAddress, UnknownHostException}
+
+ def act() {
+ react {
+ case (name: String, actor: Actor) =>
+ actor ! getIp(name)
+ act()
+ case "EXIT" =>
+ println("Name resolver exiting.")
+ // quit
+ case msg =>
+ println("Unhandled message: " + msg)
+ act()
+ }
+ }
+
+ def getIp(name: String): Option[InetAddress] = {
+ try {
+ Some(InetAddress.getByName(name))
+ } catch {
+ case _: UnknownHostException => None
+ }
+ }
+
+}
+
+object Test extends App {
+ /* PinS, Listing 32.2: An actor that calls receive
+ */
+ def makeEchoActor(): Actor = actor {
+ while (true) {
+ receive {
+ case 'stop =>
+ exit()
+ case msg =>
+ println("received message: " + msg)
+ }
+ }
+ }
+
+ /* PinS, page 696
+ */
+ def makeIntActor(): Actor = actor {
+ receive {
+ case x: Int => // I only want Ints
+ println("Got an Int: " + x)
+ }
+ }
+
+ actor {
+ self.trapExit = true
+ self.link(SillyActor)
+ SillyActor.start()
+ react {
+ case Exit(SillyActor, _) =>
+ self.link(SeriousActor)
+ SeriousActor.start()
+ react {
+ case Exit(SeriousActor, _) =>
+ val seriousPromise2 = Promise[Boolean]
+ // PinS, page 694
+ val seriousActor2 = actor {
+ for (i <- 1 to 5)
+ println("That is the question.")
+ seriousPromise2.success(true)
+ }
+
+ Await.ready(seriousPromise2.future, 5 seconds)
+ val echoActor = makeEchoActor()
+ self.link(echoActor)
+ echoActor ! "hi there"
+ echoActor ! 15
+ echoActor ! 'stop
+ react {
+ case Exit(_, _) =>
+ val intActor = makeIntActor()
+ intActor ! "hello"
+ intActor ! math.Pi
+ // only the following send leads to output
+ intActor ! 12
+ }
+ }
+ }
+ }
+}
diff --git a/test/files/jvm/actmig-PinS_1.check b/test/files/jvm/actmig-PinS_1.check
new file mode 100644
index 0000000000..bdbdf8a692
--- /dev/null
+++ b/test/files/jvm/actmig-PinS_1.check
@@ -0,0 +1,19 @@
+I'm acting!
+I'm acting!
+I'm acting!
+I'm acting!
+I'm acting!
+Post stop
+To be or not to be.
+To be or not to be.
+To be or not to be.
+To be or not to be.
+To be or not to be.
+That is the question.
+That is the question.
+That is the question.
+That is the question.
+That is the question.
+received message: hi there
+received message: 15
+Got an Int: 12
diff --git a/test/files/jvm/actmig-PinS_1.scala b/test/files/jvm/actmig-PinS_1.scala
new file mode 100644
index 0000000000..1fb50567b9
--- /dev/null
+++ b/test/files/jvm/actmig-PinS_1.scala
@@ -0,0 +1,135 @@
+import scala.actors._
+import scala.concurrent.util.duration._
+import scala.concurrent.{ Promise, Await }
+
+object SillyActor {
+ val startPromise = Promise[Boolean]()
+ val ref = MigrationSystem.actorOf(Props(() => new SillyActor, "akka.actor.default-stash-dispatcher"))
+}
+
+/* PinS, Listing 32.1: A simple actor
+ */
+class SillyActor extends Actor {
+
+ def act() {
+ Await.ready(SillyActor.startPromise.future, 5 seconds)
+ for (i <- 1 to 5)
+ println("I'm acting!")
+
+ println("Post stop")
+ }
+}
+
+object SeriousActor {
+ val startPromise = Promise[Boolean]()
+ val ref = MigrationSystem.actorOf(Props(() => new SeriousActor, "akka.actor.default-stash-dispatcher"))
+}
+
+class SeriousActor extends Actor {
+ def act() {
+ // used to make this test deterministic
+ Await.ready(SeriousActor.startPromise.future, 5 seconds)
+ for (i <- 1 to 5)
+ println("To be or not to be.")
+ }
+}
+
+/* PinS, Listing 32.3: An actor that calls react
+ */
+object NameResolver extends Actor {
+ import java.net.{ InetAddress, UnknownHostException }
+
+ def act() {
+ react {
+ case (name: String, actor: Actor) =>
+ actor ! getIp(name)
+ act()
+ case "EXIT" =>
+ println("Name resolver exiting.")
+ // quit
+ case msg =>
+ println("Unhandled message: " + msg)
+ act()
+ }
+ }
+
+ def getIp(name: String): Option[InetAddress] = {
+ try {
+ Some(InetAddress.getByName(name))
+ } catch {
+ case _: UnknownHostException => None
+ }
+ }
+
+}
+
+object Test extends App {
+
+ /* PinS, Listing 32.2: An actor that calls receive
+ */
+ def makeEchoActor(): ActorRef = MigrationSystem.actorOf(Props(() => new Actor {
+ def act() {
+ while (true) {
+ receive {
+ case 'stop =>
+ exit()
+ case msg =>
+ println("received message: " + msg)
+ }
+ }
+ }
+ }, "akka.actor.default-stash-dispatcher"))
+
+ /* PinS, page 696
+ */
+ def makeIntActor(): ActorRef = MigrationSystem.actorOf(Props(() => new Actor {
+ def act() {
+ receive {
+ case x: Int => // I only want Ints
+ println("Got an Int: " + x)
+ }
+ }
+ }, "akka.actor.default-stash-dispatcher"))
+
+ MigrationSystem.actorOf(Props(() => new Actor {
+ def act() {
+ trapExit = true
+ link(SillyActor.ref)
+ SillyActor.startPromise.success(true)
+ react {
+ case Exit(_: SillyActor, _) =>
+ link(SeriousActor.ref)
+ SeriousActor.startPromise.success(true)
+ react {
+ case Exit(_: SeriousActor, _) =>
+ val seriousPromise2 = Promise[Boolean]()
+ // PinS, page 694
+ val seriousActor2 = MigrationSystem.actorOf(Props(() =>
+ new Actor {
+ def act() {
+ for (i <- 1 to 5)
+ println("That is the question.")
+ seriousPromise2.success(true)
+ }
+ }
+ , "akka.actor.default-stash-dispatcher"))
+
+ Await.ready(seriousPromise2.future, 5 seconds)
+ val echoActor = makeEchoActor()
+ link(echoActor)
+ echoActor ! "hi there"
+ echoActor ! 15
+ echoActor ! 'stop
+ react {
+ case Exit(_, _) =>
+ val intActor = makeIntActor()
+ intActor ! "hello"
+ intActor ! math.Pi
+ // only the following send leads to output
+ intActor ! 12
+ }
+ }
+ }
+ }
+ }, "akka.actor.default-stash-dispatcher"))
+}
diff --git a/test/files/jvm/actmig-PinS_2.check b/test/files/jvm/actmig-PinS_2.check
new file mode 100644
index 0000000000..bdbdf8a692
--- /dev/null
+++ b/test/files/jvm/actmig-PinS_2.check
@@ -0,0 +1,19 @@
+I'm acting!
+I'm acting!
+I'm acting!
+I'm acting!
+I'm acting!
+Post stop
+To be or not to be.
+To be or not to be.
+To be or not to be.
+To be or not to be.
+To be or not to be.
+That is the question.
+That is the question.
+That is the question.
+That is the question.
+That is the question.
+received message: hi there
+received message: 15
+Got an Int: 12
diff --git a/test/files/jvm/actmig-PinS_2.scala b/test/files/jvm/actmig-PinS_2.scala
new file mode 100644
index 0000000000..46277efd43
--- /dev/null
+++ b/test/files/jvm/actmig-PinS_2.scala
@@ -0,0 +1,155 @@
+import scala.actors.{ MigrationSystem, StashingActor, ActorRef, Props, Exit }
+import scala.concurrent.util.duration._
+import scala.concurrent.{ Promise, Await }
+
+object SillyActor {
+ val startPromise = Promise[Boolean]()
+ val ref = MigrationSystem.actorOf(Props(() => new SillyActor, "default-stash-dispatcher"))
+}
+
+/* PinS, Listing 32.1: A simple actor
+ */
+class SillyActor extends StashingActor {
+
+ def receive = { case _ => println("Nop") }
+
+ override def act() {
+ Await.ready(SillyActor.startPromise.future, 5 seconds)
+ for (i <- 1 to 5)
+ println("I'm acting!")
+
+ println("Post stop")
+ }
+}
+
+object SeriousActor {
+ val startPromise = Promise[Boolean]()
+ val ref = MigrationSystem.actorOf(Props(() => new SeriousActor, "default-stash-dispatcher"))
+}
+
+class SeriousActor extends StashingActor {
+ def receive = { case _ => println("Nop") }
+ override def act() {
+ Await.ready(SeriousActor.startPromise.future, 5 seconds)
+ for (i <- 1 to 5)
+ println("To be or not to be.")
+ }
+}
+
+/* PinS, Listing 32.3: An actor that calls react
+ */
+object NameResolver {
+ val ref = MigrationSystem.actorOf(Props(() => new NameResolver, "default-stash-dispatcher"))
+}
+
+class NameResolver extends StashingActor {
+ import java.net.{ InetAddress, UnknownHostException }
+
+ def receive = { case _ => println("Nop") }
+
+ override def act() {
+ react {
+ case (name: String, actor: ActorRef) =>
+ actor ! getIp(name)
+ act()
+ case "EXIT" =>
+ println("Name resolver exiting.")
+ // quit
+ case msg =>
+ println("Unhandled message: " + msg)
+ act()
+ }
+ }
+
+ def getIp(name: String): Option[InetAddress] = {
+ try {
+ Some(InetAddress.getByName(name))
+ } catch {
+ case _: UnknownHostException => None
+ }
+ }
+
+}
+
+object Test extends App {
+
+ /* PinS, Listing 32.2: An actor that calls receive
+ */
+ def makeEchoActor(): ActorRef = MigrationSystem.actorOf(Props(() =>
+ new StashingActor {
+ def receive = { case _ => println("Nop") }
+
+ override def act() {
+ loop {
+ react {
+ case 'stop =>
+ exit()
+ case msg =>
+ println("received message: " + msg)
+ }
+ }
+ }
+ }, "default-stash-dispatcher"))
+
+ /* PinS, page 696
+ */
+ def makeIntActor(): ActorRef = MigrationSystem.actorOf(Props(() =>new StashingActor {
+
+ def receive = { case _ => println("Nop") }
+
+ override def act() {
+ react {
+ case x: Int => // I only want Ints
+ println("Got an Int: " + x)
+ }
+ }
+ }, "default-stash-dispatcher"))
+
+ MigrationSystem.actorOf(Props(() => new StashingActor {
+
+ def receive = { case _ => println("Nop") }
+
+ override def act() {
+ trapExit = true
+ link(SillyActor.ref)
+ SillyActor.startPromise.success(true)
+ react {
+ case Exit(_: SillyActor, _) =>
+ link(SeriousActor.ref)
+ SeriousActor.startPromise.success(true)
+ react {
+ case Exit(_: SeriousActor, _) =>
+ val seriousPromise2 = Promise[Boolean]()
+ // PinS, page 694
+ val seriousActor2 = MigrationSystem.actorOf(Props(() =>{
+ new StashingActor {
+
+ def receive = { case _ => println("Nop") }
+
+ override def act() {
+ for (i <- 1 to 5)
+ println("That is the question.")
+ seriousPromise2.success(true)
+ }
+ }
+ }, "default-stash-dispatcher"))
+
+ Await.ready(seriousPromise2.future, 5 seconds)
+ val echoActor = makeEchoActor()
+ link(echoActor)
+ echoActor ! "hi there"
+ echoActor ! 15
+ echoActor ! 'stop
+ react {
+ case Exit(_, _) =>
+ val intActor = makeIntActor()
+ intActor ! "hello"
+ intActor ! math.Pi
+ // only the following send leads to output
+ intActor ! 12
+ }
+ }
+ }
+ }
+ }, "default-stash-dispatcher"))
+}
diff --git a/test/files/jvm/actmig-PinS_3.check b/test/files/jvm/actmig-PinS_3.check
new file mode 100644
index 0000000000..bdbdf8a692
--- /dev/null
+++ b/test/files/jvm/actmig-PinS_3.check
@@ -0,0 +1,19 @@
+I'm acting!
+I'm acting!
+I'm acting!
+I'm acting!
+I'm acting!
+Post stop
+To be or not to be.
+To be or not to be.
+To be or not to be.
+To be or not to be.
+To be or not to be.
+That is the question.
+That is the question.
+That is the question.
+That is the question.
+That is the question.
+received message: hi there
+received message: 15
+Got an Int: 12
diff --git a/test/files/jvm/actmig-PinS_3.scala b/test/files/jvm/actmig-PinS_3.scala
new file mode 100644
index 0000000000..321e99b1c2
--- /dev/null
+++ b/test/files/jvm/actmig-PinS_3.scala
@@ -0,0 +1,161 @@
+import scala.actors.{ MigrationSystem, StashingActor, ActorRef, Terminated, Props }
+import scala.concurrent.util.duration._
+import scala.concurrent.{ Promise, Await }
+
+
+object SillyActor {
+ val startPromise = Promise[Boolean]()
+ val ref = MigrationSystem.actorOf(Props(() => new SillyActor, "default-stash-dispatcher"))
+}
+
+/* PinS, Listing 32.1: A simple actor
+ */
+class SillyActor extends StashingActor {
+ def receive = { case _ => println("Why are you not dead"); context.stop(self) }
+
+ override def preStart() {
+ Await.ready(SillyActor.startPromise.future, 5 seconds)
+ for (i <- 1 to 5)
+ println("I'm acting!")
+ context.stop(self)
+ }
+
+ override def postStop() {
+ println("Post stop")
+ }
+}
+
+object SeriousActor {
+ val startPromise = Promise[Boolean]()
+ val ref = MigrationSystem.actorOf(Props(() => new SeriousActor, "default-stash-dispatcher"))
+}
+
+class SeriousActor extends StashingActor {
+ def receive = { case _ => println("Nop") }
+ override def preStart() {
+ Await.ready(SeriousActor.startPromise.future, 5 seconds)
+ for (i <- 1 to 5)
+ println("To be or not to be.")
+ context.stop(self)
+ }
+}
+
+/* PinS, Listing 32.3: An actor that calls react
+ */
+object NameResolver {
+ val ref = MigrationSystem.actorOf(Props(() => new NameResolver, "default-stash-dispatcher"))
+}
+
+class NameResolver extends StashingActor {
+ import java.net.{ InetAddress, UnknownHostException }
+
+ def receive = {
+ case (name: String, actor: ActorRef) =>
+ actor ! getIp(name)
+ case "EXIT" =>
+ println("Name resolver exiting.")
+ context.stop(self) // quit
+ case msg =>
+ println("Unhandled message: " + msg)
+ }
+
+ def getIp(name: String): Option[InetAddress] = {
+ try {
+ Some(InetAddress.getByName(name))
+ } catch {
+ case _: UnknownHostException => None
+ }
+ }
+
+}
+
+object Test extends App {
+
+ /* PinS, Listing 32.2: An actor that calls receive
+ */
+ def makeEchoActor(): ActorRef = MigrationSystem.actorOf(Props(() => new StashingActor {
+
+ def receive = { // how to handle receive
+ case 'stop =>
+ context.stop(self)
+ case msg =>
+ println("received message: " + msg)
+ }
+ }, "default-stash-dispatcher"))
+
+ /* PinS, page 696
+ */
+ def makeIntActor(): ActorRef = MigrationSystem.actorOf(Props(() => new StashingActor {
+
+ def receive = {
+ case x: Int => // I only want Ints
+ unstashAll()
+ println("Got an Int: " + x)
+ context.stop(self)
+ case _ => stash()
+ }
+ }, "default-stash-dispatcher"))
+
+ MigrationSystem.actorOf(Props(() => new StashingActor {
+ val silly = SillyActor.ref
+
+ override def preStart() {
+ context.watch(SillyActor.ref)
+ SillyActor.startPromise.success(true)
+ }
+
+ def receive = {
+ case Terminated(`silly`) =>
+ unstashAll()
+ val serious = SeriousActor.ref
+ context.watch(SeriousActor.ref)
+ SeriousActor.startPromise.success(true)
+ context.become {
+ case Terminated(`serious`) =>
+ val seriousPromise2 = Promise[Boolean]()
+ // PinS, page 694
+ val seriousActor2 = MigrationSystem.actorOf(Props(() => {
+ new StashingActor {
+
+ def receive = { case _ => context.stop(self) }
+
+ override def preStart() = {
+ for (i <- 1 to 5)
+ println("That is the question.")
+ seriousPromise2.success(true)
+ context.stop(self)
+ }
+ }
+ }, "default-stash-dispatcher"))
+
+ Await.ready(seriousPromise2.future, 5 seconds)
+ val echoActor = makeEchoActor()
+ context.watch(echoActor)
+ echoActor ! "hi there"
+ echoActor ! 15
+ echoActor ! 'stop
+ context.become {
+ case Terminated(_) =>
+ unstashAll()
+ val intActor = makeIntActor()
+ intActor ! "hello"
+ intActor ! math.Pi
+ // only the following send leads to output
+ intActor ! 12
+ context.unbecome()
+ context.unbecome()
+ context.stop(self)
+ case m =>
+ println("Stash 1 " + m)
+ stash(m)
+ }
+ case m =>
+ println("Stash 2 " + m)
+ stash(m)
+ }
+ case m =>
+ println("Stash 3 " + m)
+ stash(m)
+ }
+ }, "default-stash-dispatcher"))
+} \ No newline at end of file
diff --git a/test/files/jvm/actmig-hierarchy.check b/test/files/jvm/actmig-hierarchy.check
new file mode 100644
index 0000000000..317e9677c3
--- /dev/null
+++ b/test/files/jvm/actmig-hierarchy.check
@@ -0,0 +1,2 @@
+hello
+hello
diff --git a/test/files/jvm/actmig-hierarchy_1.check b/test/files/jvm/actmig-hierarchy_1.check
new file mode 100644
index 0000000000..317e9677c3
--- /dev/null
+++ b/test/files/jvm/actmig-hierarchy_1.check
@@ -0,0 +1,2 @@
+hello
+hello
diff --git a/test/files/jvm/actmig-instantiation.check b/test/files/jvm/actmig-instantiation.check
new file mode 100644
index 0000000000..4c13d5c0a1
--- /dev/null
+++ b/test/files/jvm/actmig-instantiation.check
@@ -0,0 +1,8 @@
+OK error: java.lang.RuntimeException: In order to create StashingActor one must use actorOf.
+OK error: java.lang.RuntimeException: Only one actor can be created per actorOf call.
+0
+100
+200
+300
+400
+500
diff --git a/test/files/jvm/actmig-loop-react.check b/test/files/jvm/actmig-loop-react.check
new file mode 100644
index 0000000000..54cbe942c0
--- /dev/null
+++ b/test/files/jvm/actmig-loop-react.check
@@ -0,0 +1,15 @@
+do task
+do task
+do task
+do task
+working
+scala got exception
+working
+akka got exception
+do task 1
+do string I am a String
+do task 42
+after react
+do task 1
+do string I am a String
+do task 42
diff --git a/test/files/jvm/actmig-loop-react.scala b/test/files/jvm/actmig-loop-react.scala
new file mode 100644
index 0000000000..d714b26594
--- /dev/null
+++ b/test/files/jvm/actmig-loop-react.scala
@@ -0,0 +1,188 @@
+import scala.actors.MigrationSystem._
+import scala.actors.Actor._
+import scala.actors.{ Actor, StashingActor, ActorRef, Props, MigrationSystem, PoisonPill }
+import java.util.concurrent.{ TimeUnit, CountDownLatch }
+import scala.collection.mutable.ArrayBuffer
+import scala.concurrent.util.duration._
+import scala.concurrent.{ Promise, Await }
+
+
+object Test {
+ val finishedLWCR, finishedTNR, finishedEH = Promise[Boolean]
+ val finishedLWCR1, finishedTNR1, finishedEH1 = Promise[Boolean]
+
+ def testLoopWithConditionReact() = {
+ // Snippet showing composition of receives
+ // Loop with Condition Snippet - before
+ val myActor = actor {
+ var c = true
+ loopWhile(c) {
+ react {
+ case x: Int =>
+ // do task
+ println("do task")
+ if (x == 42) {
+ c = false
+ finishedLWCR1.success(true)
+ }
+ }
+ }
+ }
+
+ myActor.start()
+ myActor ! 1
+ myActor ! 42
+
+ Await.ready(finishedLWCR1.future, 5 seconds)
+
+ // Loop with Condition Snippet - migrated
+ val myAkkaActor = MigrationSystem.actorOf(Props(() => new StashingActor {
+
+ def receive = {
+ case x: Int =>
+ // do task
+ println("do task")
+ if (x == 42) {
+ finishedLWCR.success(true)
+ context.stop(self)
+ }
+ }
+ }, "default-stashing-dispatcher"))
+ myAkkaActor ! 1
+ myAkkaActor ! 42
+ }
+
+ def testNestedReact() = {
+ // Snippet showing composition of receives
+ // Loop with Condition Snippet - before
+ val myActor = actor {
+ var c = true
+ loopWhile(c) {
+ react {
+ case x: Int =>
+ // do task
+ println("do task " + x)
+ if (x == 42) {
+ c = false
+ finishedTNR1.success(true)
+ } else
+ react {
+ case y: String =>
+ println("do string " + y)
+ }
+ println("after react")
+ }
+ }
+ }
+ myActor.start()
+
+ myActor ! 1
+ myActor ! "I am a String"
+ myActor ! 42
+
+ Await.ready(finishedTNR1.future, 5 seconds)
+
+ // Loop with Condition Snippet - migrated
+ val myAkkaActor = MigrationSystem.actorOf(Props(() => new StashingActor {
+
+ def receive = {
+ case x: Int =>
+ // do task
+ println("do task " + x)
+ if (x == 42) {
+ finishedTNR.success(true)
+ context.stop(self)
+ } else
+ context.become(({
+ case y: String =>
+ println("do string " + y)
+ }: Receive).andThen(x => {
+ unstashAll()
+ context.unbecome()
+ }).orElse { case x => stash() })
+ }
+ }, "default-stashing-dispatcher"))
+
+ myAkkaActor ! 1
+ myAkkaActor ! "I am a String"
+ myAkkaActor ! 42
+
+ }
+
+ def exceptionHandling() = {
+ // Stashing actor with act and exception handler
+ val myActor = MigrationSystem.actorOf(Props(() => new StashingActor {
+
+ def receive = { case _ => println("Dummy method.") }
+ override def act() = {
+ loop {
+ react {
+ case "fail" =>
+ throw new Exception("failed")
+ case "work" =>
+ println("working")
+ case "die" =>
+ finishedEH1.success(true)
+ exit()
+ }
+ }
+ }
+
+ override def exceptionHandler = {
+ case x: Exception => println("scala got exception")
+ }
+
+ }, "default-stashing-dispatcher"))
+
+ myActor ! "work"
+ myActor ! "fail"
+ myActor ! "die"
+
+ Await.ready(finishedEH1.future, 5 seconds)
+ // Stashing actor in Akka style
+ val myAkkaActor = MigrationSystem.actorOf(Props(() => new StashingActor {
+ def receive = PFCatch({
+ case "fail" =>
+ throw new Exception("failed")
+ case "work" =>
+ println("working")
+ case "die" =>
+ finishedEH.success(true)
+ context.stop(self)
+ }, { case x: Exception => println("akka got exception") })
+ }, "default-stashing-dispatcher"))
+
+ myAkkaActor ! "work"
+ myAkkaActor ! "fail"
+ myAkkaActor ! "die"
+ }
+
+ def main(args: Array[String]) = {
+ testLoopWithConditionReact()
+ Await.ready(finishedLWCR.future, 5 seconds)
+ exceptionHandling()
+ Await.ready(finishedEH.future, 5 seconds)
+ testNestedReact()
+ Await.ready(finishedTNR.future, 5 seconds)
+ }
+
+}
+
+// As per Jim Mcbeath's blog (http://jim-mcbeath.blogspot.com/2008/07/actor-exceptions.html)
+class PFCatch(f: PartialFunction[Any, Unit], handler: PartialFunction[Exception, Unit])
+ extends PartialFunction[Any, Unit] {
+
+ def apply(x: Any) = {
+ try {
+ f(x)
+ } catch {
+ case e: Exception if handler.isDefinedAt(e) => handler(e)
+ }
+ }
+
+ def isDefinedAt(x: Any) = f.isDefinedAt(x)
+}
+
+object PFCatch {
+ def apply(f: PartialFunction[Any, Unit], handler: PartialFunction[Exception, Unit]) = new PFCatch(f, handler)
+}
diff --git a/test/files/jvm/actmig-public-methods.check b/test/files/jvm/actmig-public-methods.check
new file mode 100644
index 0000000000..bb6530c926
--- /dev/null
+++ b/test/files/jvm/actmig-public-methods.check
@@ -0,0 +1,6 @@
+None
+Some(bang qmark after 1)
+bang
+bang qmark after 0
+bang qmark in future after 0
+typed bang qmark in future after 0
diff --git a/test/files/jvm/actmig-public-methods_1.check b/test/files/jvm/actmig-public-methods_1.check
new file mode 100644
index 0000000000..bb6530c926
--- /dev/null
+++ b/test/files/jvm/actmig-public-methods_1.check
@@ -0,0 +1,6 @@
+None
+Some(bang qmark after 1)
+bang
+bang qmark after 0
+bang qmark in future after 0
+typed bang qmark in future after 0
diff --git a/test/files/jvm/actmig-public-methods_1.scala b/test/files/jvm/actmig-public-methods_1.scala
new file mode 100644
index 0000000000..7e5bc24210
--- /dev/null
+++ b/test/files/jvm/actmig-public-methods_1.scala
@@ -0,0 +1,88 @@
+import scala.collection.mutable.ArrayBuffer
+import scala.actors.Actor._
+import scala.actors._
+import scala.util._
+import java.util.concurrent.{ TimeUnit, CountDownLatch }
+import scala.concurrent.util.Duration
+import scala.actors.pattern._
+
+object Test {
+ val NUMBER_OF_TESTS = 6
+
+ // used for sorting non-deterministic output
+ val buff = ArrayBuffer[String]()
+ val latch = new CountDownLatch(NUMBER_OF_TESTS)
+ val toStop = ArrayBuffer[ActorRef]()
+
+ def append(v: String) = synchronized {
+ buff += v
+ }
+
+ def main(args: Array[String]) = {
+
+ val respActor = MigrationSystem.actorOf(Props(() => actor {
+ loop {
+ react {
+ case (x: String, time: Long) =>
+ Thread.sleep(time)
+ reply(x + " after " + time)
+ case str: String =>
+ append(str)
+ latch.countDown()
+ case x =>
+ exit()
+ }
+ }
+ }, "akka.actor.default-stash-dispatcher"))
+
+ toStop += respActor
+
+ respActor ! "bang"
+
+ implicit val timeout = Timeout(Duration(500, TimeUnit.MILLISECONDS))
+ val msg = ("bang qmark", 0L)
+ val res1 = respActor.?(msg)(Timeout(Duration.Inf))
+ append(res1().toString)
+ latch.countDown()
+
+ val msg1 = ("bang qmark", 1L)
+ val res2 = respActor.?(msg1)(Timeout(Duration(500, TimeUnit.MILLISECONDS)))
+ append((res2() match {
+ case x: AskTimeoutException => None
+ case v => Some(v)
+ }).toString)
+ latch.countDown()
+
+ // this one should time out
+ val msg11 = ("bang qmark", 500L)
+ val res21 = respActor.?(msg11)(Timeout(Duration(1, TimeUnit.MILLISECONDS)))
+ append((res21() match {
+ case x: AskTimeoutException => None
+ case v => Some(v)
+ }).toString)
+ latch.countDown()
+
+ val msg2 = ("bang qmark in future", 0L)
+ val fut1 = respActor.?(msg2)(Duration.Inf)
+ append(fut1().toString())
+ latch.countDown()
+
+ val handler: PartialFunction[Any, String] = {
+ case x: String => x.toString
+ }
+
+ val msg3 = ("typed bang qmark in future", 0L)
+ val fut2 = (respActor.?(msg3)(Duration.Inf))
+ append(Futures.future { handler.apply(fut2()) }().toString)
+ latch.countDown()
+
+ // output
+ latch.await(200, TimeUnit.MILLISECONDS)
+ if (latch.getCount() > 0) {
+ println("Error: Tasks have not finished!!!")
+ }
+
+ buff.sorted.foreach(println)
+ toStop.foreach(_ ! PoisonPill)
+ }
+}
diff --git a/test/files/jvm/actmig-react-receive.check b/test/files/jvm/actmig-react-receive.check
new file mode 100644
index 0000000000..cc2a426e68
--- /dev/null
+++ b/test/files/jvm/actmig-react-receive.check
@@ -0,0 +1,16 @@
+do before
+do task
+do after
+do before
+do task
+do after
+do before
+do task
+do in between
+do string
+do after
+do before
+do task
+do in between
+do string
+do after
diff --git a/test/files/jvm/actmig-react-receive.scala b/test/files/jvm/actmig-react-receive.scala
new file mode 100644
index 0000000000..8464a2af79
--- /dev/null
+++ b/test/files/jvm/actmig-react-receive.scala
@@ -0,0 +1,111 @@
+import scala.actors.MigrationSystem._
+import scala.actors.Actor._
+import scala.actors.{ Actor, StashingActor, ActorRef, Props, MigrationSystem, PoisonPill }
+import java.util.concurrent.{ TimeUnit, CountDownLatch }
+import scala.collection.mutable.ArrayBuffer
+import scala.concurrent.util.duration._
+import scala.concurrent.{ Promise, Await }
+
+object Test {
+ val finishedRS, finishedRS1, finishedRSC, finishedRSC1 = Promise[Boolean]
+ def testComposition() = {
+ // Snippet showing composition of receives
+ // React Snippet - before
+ val myActor = actor {
+ // do before
+ println("do before")
+ receive {
+ case x: Int =>
+ // do task
+ println("do task")
+ }
+ println("do in between")
+ receive {
+ case x: String =>
+ // do string now
+ println("do string")
+ }
+ println("do after")
+ finishedRSC1.success(true)
+ }
+ myActor.start()
+ myActor ! 1
+ myActor ! "1"
+ Await.ready(finishedRSC1.future, 5 seconds)
+
+ // React Snippet - migrated
+ val myAkkaActor = MigrationSystem.actorOf(Props(() => new StashingActor {
+ override def preStart() = {
+ println("do before")
+ }
+
+ def receive = ({
+ case x: Int =>
+ // do task
+ println("do task")
+ }: Receive) andThen { v =>
+ context.become {
+ case x: String =>
+ //do string
+ println("do string")
+ context.stop(self)
+ }
+ println("do in between")
+ }
+
+ override def postStop() = {
+ println("do after")
+ finishedRSC.success(true)
+ }
+
+ }, "default-stashing-dispatcher"))
+ myAkkaActor ! 1
+ myAkkaActor ! "1"
+ Await.ready(finishedRSC.future, 5 seconds)
+ }
+
+ def main(args: Array[String]) = {
+ // React Snippet - before
+ val myActor = actor {
+ // do before
+ println("do before")
+ receive {
+ case x: Int =>
+ // do task
+ println("do task")
+ }
+ println("do after")
+ finishedRS1.success(true)
+ }
+ myActor.start()
+ myActor ! 1
+
+ Await.ready(finishedRS1.future, 5 seconds)
+
+ // React Snippet - migrated
+ val myAkkaActor = MigrationSystem.actorOf(Props(() => new StashingActor {
+ override def preStart() = {
+ println("do before")
+ }
+
+ def receive = {
+ case x: Int =>
+ // do task
+ println("do task")
+ context.stop(self)
+ }
+
+ override def postStop() = {
+ println("do after")
+ finishedRS.success(true)
+ }
+
+ }, "default-stashing-dispatcher"))
+ myAkkaActor ! 1
+
+ Await.ready(finishedRS.future, 5 seconds)
+ // Starting composition test
+ testComposition()
+
+ }
+}
diff --git a/test/files/lib/scalacheck.jar.desired.sha1 b/test/files/lib/scalacheck.jar.desired.sha1
index 2be7479415..e6ed543d73 100644
--- a/test/files/lib/scalacheck.jar.desired.sha1
+++ b/test/files/lib/scalacheck.jar.desired.sha1
@@ -1 +1 @@
-f8cd51e0f78e30b3ac444b741b0b2249ac8248bb ?scalacheck.jar
+b6f4dbb29f0c2ec1eba682414f60d52fea84f703 ?scalacheck.jar
diff --git a/test/files/neg/t4270.check b/test/files/neg/t4270.check
new file mode 100644
index 0000000000..cfe0a93e00
--- /dev/null
+++ b/test/files/neg/t4270.check
@@ -0,0 +1,4 @@
+t4270.scala:5: error: could not find implicit value for parameter e: Int
+ implicitly[Int]
+ ^
+one error found
diff --git a/test/files/neg/t4270.scala b/test/files/neg/t4270.scala
new file mode 100644
index 0000000000..2c7c71d8c2
--- /dev/null
+++ b/test/files/neg/t4270.scala
@@ -0,0 +1,6 @@
+object Test1 {
+ object A { implicit val x: Int = 1 }
+ import A.x
+ def x: Int = 0
+ implicitly[Int]
+}
diff --git a/test/files/neg/t4541.check b/test/files/neg/t4541.check
new file mode 100644
index 0000000000..c01226685f
--- /dev/null
+++ b/test/files/neg/t4541.check
@@ -0,0 +1,7 @@
+t4541.scala:11: error: scala.reflect.internal.Types$TypeError: variable data in class Sparse cannot be accessed in Sparse[Int]
+ Access to protected method data not permitted because
+ prefix type Sparse[Int] does not conform to
+ class Sparse$mcI$sp where the access take place
+ that.data
+ ^
+one error found \ No newline at end of file
diff --git a/test/files/neg/t4541.scala b/test/files/neg/t4541.scala
new file mode 100644
index 0000000000..744af1c288
--- /dev/null
+++ b/test/files/neg/t4541.scala
@@ -0,0 +1,16 @@
+
+
+
+
+
+
+@SerialVersionUID(1L)
+final class Sparse[@specialized(Int) T](d: Array[T]) extends Serializable {
+ protected var data: Array[T] = d
+ def set(that: Sparse[T]) = {
+ that.data
+ }
+}
+
+
+
diff --git a/test/files/neg/t4541b.check b/test/files/neg/t4541b.check
new file mode 100644
index 0000000000..54d9c3d1ee
--- /dev/null
+++ b/test/files/neg/t4541b.check
@@ -0,0 +1,7 @@
+t4541b.scala:13: error: scala.reflect.internal.Types$TypeError: variable data in class SparseArray cannot be accessed in SparseArray[Int]
+ Access to protected method data not permitted because
+ prefix type SparseArray[Int] does not conform to
+ class SparseArray$mcI$sp where the access take place
+ use(that.data.clone)
+ ^
+one error found \ No newline at end of file
diff --git a/test/files/neg/t4541b.scala b/test/files/neg/t4541b.scala
new file mode 100644
index 0000000000..7a21ffc156
--- /dev/null
+++ b/test/files/neg/t4541b.scala
@@ -0,0 +1,16 @@
+
+
+
+
+
+@SerialVersionUID(1L)
+final class SparseArray[@specialized(Int) T](private var data: Array[T]) extends Serializable {
+ def use(inData: Array[T]) = {
+ data = inData;
+ }
+
+ def set(that: SparseArray[T]) = {
+ use(that.data.clone)
+ }
+}
+
diff --git a/test/files/neg/t4842a.check b/test/files/neg/t4842a.check
new file mode 100644
index 0000000000..39d77bfc48
--- /dev/null
+++ b/test/files/neg/t4842a.check
@@ -0,0 +1,4 @@
+t4842a.scala:2: error: self constructor arguments cannot reference unconstructed `this`
+ def this(x: Int) = this(new { println(Foo.this)}) // error
+ ^
+one error found
diff --git a/test/files/neg/t4842a.scala b/test/files/neg/t4842a.scala
new file mode 100644
index 0000000000..78360effb4
--- /dev/null
+++ b/test/files/neg/t4842a.scala
@@ -0,0 +1,3 @@
+class Foo (x: AnyRef) {
+ def this(x: Int) = this(new { println(Foo.this)}) // error
+}
diff --git a/test/files/neg/t4842b.check b/test/files/neg/t4842b.check
new file mode 100644
index 0000000000..c7ccd5e059
--- /dev/null
+++ b/test/files/neg/t4842b.check
@@ -0,0 +1,4 @@
+t4842b.scala:2: error: self constructor arguments cannot reference unconstructed `this`
+ def this() = { this(???)(new { println(TypeArg.this.x) } ); println("next") } // error
+ ^
+one error found
diff --git a/test/files/neg/t4842b.scala b/test/files/neg/t4842b.scala
new file mode 100644
index 0000000000..a7996cc061
--- /dev/null
+++ b/test/files/neg/t4842b.scala
@@ -0,0 +1,3 @@
+class TypeArg[X](val x: X)(a: AnyRef) {
+ def this() = { this(???)(new { println(TypeArg.this.x) } ); println("next") } // error
+}
diff --git a/test/files/neg/t5334_1.check b/test/files/neg/t5334_1.check
deleted file mode 100644
index eca854964a..0000000000
--- a/test/files/neg/t5334_1.check
+++ /dev/null
@@ -1,4 +0,0 @@
-t5334_1.scala:5: error: implementation restriction: cannot reify block of type C that involves a type declared inside the block being reified. consider casting the return value to a suitable type
- reify {
- ^
-one error found
diff --git a/test/files/neg/t5334_2.check b/test/files/neg/t5334_2.check
deleted file mode 100644
index e21f0d5967..0000000000
--- a/test/files/neg/t5334_2.check
+++ /dev/null
@@ -1,4 +0,0 @@
-t5334_2.scala:5: error: implementation restriction: cannot reify block of type List[(C, C)] that involves a type declared inside the block being reified. consider casting the return value to a suitable type
- reify {
- ^
-one error found
diff --git a/test/files/neg/t5376.check b/test/files/neg/t5376.check
new file mode 100644
index 0000000000..0376163c35
--- /dev/null
+++ b/test/files/neg/t5376.check
@@ -0,0 +1,11 @@
+t5376.scala:12: error: type mismatch;
+ found : String("a")
+ required: Int
+ "a": Int
+ ^
+t5376.scala:22: error: type mismatch;
+ found : String("a")
+ required: Int
+ "a": Int
+ ^
+two errors found
diff --git a/test/files/neg/t5376.scala b/test/files/neg/t5376.scala
new file mode 100644
index 0000000000..8da3868566
--- /dev/null
+++ b/test/files/neg/t5376.scala
@@ -0,0 +1,24 @@
+object Test {
+ object O1 { implicit def f(s: String): Int = 1 }
+ object O2 { implicit def f(s: String): Int = 2 }
+ object O3 { def f(s: String): Int = 3 }
+
+ // Import two implicits with the same name in the same scope.
+ def m1 = {
+ import O1._
+ import O2._
+
+ // Implicit usage compiles.
+ "a": Int
+ }
+
+ // Import one implict and one non-implicit method with the
+ // same name in the same scope.
+ def m2 = {
+ import O1._
+ import O3._
+
+ // Implicit usage compiles.
+ "a": Int
+ }
+} \ No newline at end of file
diff --git a/test/files/neg/t5429.check b/test/files/neg/t5429.check
index 1b89c59587..4350696bc8 100644
--- a/test/files/neg/t5429.check
+++ b/test/files/neg/t5429.check
@@ -46,14 +46,18 @@ t5429.scala:38: error: overriding method emptyArg in class A of type ()Int;
object emptyArg has incompatible type
override object emptyArg // fail
^
-t5429.scala:39: error: object oneArg overrides nothing
+t5429.scala:39: error: object oneArg overrides nothing.
+Note: the super classes of class C contain the following, non final members named oneArg:
+def oneArg(x: String): Int
override object oneArg // fail
^
t5429.scala:43: error: overriding lazy value lazyvalue in class A0 of type Any;
object lazyvalue must be declared lazy to override a concrete lazy value
override object lazyvalue // !!! this fails, but should succeed (lazy over lazy)
^
-t5429.scala:46: error: object oneArg overrides nothing
+t5429.scala:46: error: object oneArg overrides nothing.
+Note: the super classes of class C0 contain the following, non final members named oneArg:
+def oneArg(x: String): Any
override object oneArg // fail
^
t5429.scala:50: error: overriding value value in class A of type Int;
@@ -76,7 +80,9 @@ t5429.scala:58: error: overriding lazy value lazyvalue in class A0 of type Any;
value lazyvalue must be declared lazy to override a concrete lazy value
override val lazyvalue = 0 // fail (non-lazy)
^
-t5429.scala:61: error: value oneArg overrides nothing
+t5429.scala:61: error: value oneArg overrides nothing.
+Note: the super classes of class D0 contain the following, non final members named oneArg:
+def oneArg(x: String): Any
override val oneArg = 15 // fail
^
t5429.scala:65: error: overriding value value in class A of type Int;
@@ -103,7 +109,9 @@ t5429.scala:73: error: overriding lazy value lazyvalue in class A0 of type Any;
method lazyvalue needs to be a stable, immutable value
override def lazyvalue = 2 // fail
^
-t5429.scala:76: error: method oneArg overrides nothing
+t5429.scala:76: error: method oneArg overrides nothing.
+Note: the super classes of class E0 contain the following, non final members named oneArg:
+def oneArg(x: String): Any
override def oneArg = 15 // fail
^
t5429.scala:80: error: overriding value value in class A of type Int;
@@ -126,7 +134,9 @@ t5429.scala:87: error: overriding value value in class A0 of type Any;
lazy value value cannot override a concrete non-lazy value
override lazy val value = 0 // fail (strict over lazy)
^
-t5429.scala:91: error: value oneArg overrides nothing
+t5429.scala:91: error: value oneArg overrides nothing.
+Note: the super classes of class F0 contain the following, non final members named oneArg:
+def oneArg(x: String): Any
override lazy val oneArg = 15 // fail
^
34 errors found
diff --git a/test/files/neg/t5617.check b/test/files/neg/t5617.check
new file mode 100644
index 0000000000..79cc3a1e32
--- /dev/null
+++ b/test/files/neg/t5617.check
@@ -0,0 +1,8 @@
+t5617.scala:12: error: method foo overrides nothing.
+Note: the super classes of trait C contain the following, non final members named foo:
+def foo(u: Unit): Int
+def foo(x: Boolean): Int
+def foo(i: Int)(b: String): Int
+ override def foo(s: String): Int
+ ^
+one error found
diff --git a/test/files/neg/t5617.scala b/test/files/neg/t5617.scala
new file mode 100644
index 0000000000..41541b5e90
--- /dev/null
+++ b/test/files/neg/t5617.scala
@@ -0,0 +1,14 @@
+trait A {
+ def foo(i: Int)(b: String): Int
+ def foo(u: Unit): Int // not reported
+ def foo(x: Float): Int // not reported
+}
+trait B[X] extends A {
+ def foo(x: X): Int
+ def foo(u: Unit): Int
+ final def foo(x: Float): Int = 0 // not reported
+}
+trait C extends B[Boolean] {
+ override def foo(s: String): Int
+ val foo = 0 // not reported
+}
diff --git a/test/files/pos/nonlocal-unchecked.flags b/test/files/pos/nonlocal-unchecked.flags
new file mode 100644
index 0000000000..144ddac9d3
--- /dev/null
+++ b/test/files/pos/nonlocal-unchecked.flags
@@ -0,0 +1 @@
+-unchecked -Xfatal-warnings
diff --git a/test/files/pos/nonlocal-unchecked.scala b/test/files/pos/nonlocal-unchecked.scala
new file mode 100644
index 0000000000..6bd3dc479e
--- /dev/null
+++ b/test/files/pos/nonlocal-unchecked.scala
@@ -0,0 +1,6 @@
+class A {
+ def f: Boolean = {
+ val xs = Nil map (_ => return false)
+ true
+ }
+}
diff --git a/test/files/pos/t4842.scala b/test/files/pos/t4842.scala
new file mode 100644
index 0000000000..17ff684833
--- /dev/null
+++ b/test/files/pos/t4842.scala
@@ -0,0 +1,26 @@
+class Foo (x: AnyRef) {
+ def this() = {
+ this(new { } ) // okay
+ }
+}
+
+
+class Blerg (x: AnyRef) {
+ def this() = {
+ this(new { class Bar { println(Bar.this); new { println(Bar.this) } }; new Bar } ) // okay
+ }
+}
+
+
+class Outer {
+ class Inner (x: AnyRef) {
+ def this() = {
+ this(new { class Bar { println(Bar.this); new { println(Bar.this) } }; new Bar } ) // okay
+ }
+
+ def this(x: Boolean) = {
+ this(new { println(Outer.this) } ) // okay
+ }
+ }
+}
+
diff --git a/test/files/pos/t5910.java b/test/files/pos/t5910.java
new file mode 100644
index 0000000000..e007a1fbb5
--- /dev/null
+++ b/test/files/pos/t5910.java
@@ -0,0 +1,2 @@
+class Foo {
+};;;;;;; \ No newline at end of file
diff --git a/test/files/pos/t5953.scala b/test/files/pos/t5953.scala
new file mode 100644
index 0000000000..90e7d84646
--- /dev/null
+++ b/test/files/pos/t5953.scala
@@ -0,0 +1,16 @@
+import scala.collection.{ mutable, immutable, generic, GenTraversableOnce }
+
+package object foo {
+ @inline implicit class TravOps[A, CC[A] <: GenTraversableOnce[A]](val coll: CC[A]) extends AnyVal {
+ def build[CC2[X]](implicit cbf: generic.CanBuildFrom[Nothing, A, CC2[A]]): CC2[A] = {
+ cbf() ++= coll.toIterator result
+ }
+ }
+}
+
+package foo {
+ object Test {
+ def f1[T](xs: Traversable[T]) = xs.convertTo[immutable.Vector]
+ def f2[T](xs: Traversable[T]) = xs.build[immutable.Vector]
+ }
+}
diff --git a/test/files/presentation/ide-bug-1000531.check b/test/files/presentation/ide-bug-1000531.check
index e813ce119b..9d4674d7c7 100644
--- a/test/files/presentation/ide-bug-1000531.check
+++ b/test/files/presentation/ide-bug-1000531.check
@@ -3,7 +3,7 @@ reload: CrashOnLoad.scala
askTypeCompletion at CrashOnLoad.scala(6,12)
================================================================================
[response] aksTypeCompletion at (6,12)
-retrieved 124 members
+retrieved 126 members
[accessible: true] `class GroupedIteratorIterator[B]#GroupedIterator`
[accessible: true] `method !=(x$1: Any)Boolean`
[accessible: true] `method !=(x$1: AnyRef)Boolean`
@@ -25,6 +25,7 @@ retrieved 124 members
[accessible: true] `method collectFirst[B](pf: PartialFunction[B,B])Option[B]`
[accessible: true] `method collect[B](pf: PartialFunction[B,B])Iterator[B]`
[accessible: true] `method contains(elem: Any)Boolean`
+[accessible: true] `method convertTo[Col[_]](implicit cbf: scala.collection.generic.CanBuildFrom[Nothing,B,Col[B]])Col[B]`
[accessible: true] `method copyToArray[B >: B](xs: Array[B])Unit`
[accessible: true] `method copyToArray[B >: B](xs: Array[B], start: Int)Unit`
[accessible: true] `method copyToArray[B >: B](xs: Array[B], start: Int, len: Int)Unit`
@@ -109,6 +110,7 @@ retrieved 124 members
[accessible: true] `method toStream=> scala.collection.immutable.Stream[B]`
[accessible: true] `method toString()String`
[accessible: true] `method toTraversable=> Traversable[B]`
+[accessible: true] `method toVector=> Vector[B]`
[accessible: true] `method wait()Unit`
[accessible: true] `method wait(x$1: Long)Unit`
[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
diff --git a/test/files/run/collection-conversions.check b/test/files/run/collection-conversions.check
new file mode 100644
index 0000000000..08d0fa32c5
--- /dev/null
+++ b/test/files/run/collection-conversions.check
@@ -0,0 +1,104 @@
+-- Testing iterator ---
+ :[Direct] Vector : OK
+ :[Copy] Vector : OK
+ :[Direct] Buffer : OK
+ :[Copy] Buffer : OK
+ :[Direct] GenSeq : OK
+ :[Copy] GenSeq : OK
+ :[Copy] Seq : OK
+ :[Direct] Stream : OK
+ :[Copy] Stream : OK
+ :[Direct] Array : OK
+ :[Copy] Array : OK
+ :[Copy] ParVector: OK
+-- Testing Vector ---
+ :[Direct] Vector : OK
+ :[Copy] Vector : OK
+ :[Direct] Buffer : OK
+ :[Copy] Buffer : OK
+ :[Direct] GenSeq : OK
+ :[Copy] GenSeq : OK
+ :[Copy] Seq : OK
+ :[Direct] Stream : OK
+ :[Copy] Stream : OK
+ :[Direct] Array : OK
+ :[Copy] Array : OK
+ :[Copy] ParVector: OK
+-- Testing List ---
+ :[Direct] Vector : OK
+ :[Copy] Vector : OK
+ :[Direct] Buffer : OK
+ :[Copy] Buffer : OK
+ :[Direct] GenSeq : OK
+ :[Copy] GenSeq : OK
+ :[Copy] Seq : OK
+ :[Direct] Stream : OK
+ :[Copy] Stream : OK
+ :[Direct] Array : OK
+ :[Copy] Array : OK
+ :[Copy] ParVector: OK
+-- Testing Buffer ---
+ :[Direct] Vector : OK
+ :[Copy] Vector : OK
+ :[Direct] Buffer : OK
+ :[Copy] Buffer : OK
+ :[Direct] GenSeq : OK
+ :[Copy] GenSeq : OK
+ :[Copy] Seq : OK
+ :[Direct] Stream : OK
+ :[Copy] Stream : OK
+ :[Direct] Array : OK
+ :[Copy] Array : OK
+ :[Copy] ParVector: OK
+-- Testing ParVector ---
+ :[Direct] Vector : OK
+ :[Copy] Vector : OK
+ :[Direct] Buffer : OK
+ :[Copy] Buffer : OK
+ :[Direct] GenSeq : OK
+ :[Copy] GenSeq : OK
+ :[Copy] Seq : OK
+ :[Direct] Stream : OK
+ :[Copy] Stream : OK
+ :[Direct] Array : OK
+ :[Copy] Array : OK
+ :[Copy] ParVector: OK
+-- Testing Set ---
+ :[Direct] Vector : OK
+ :[Copy] Vector : OK
+ :[Direct] Buffer : OK
+ :[Copy] Buffer : OK
+ :[Direct] GenSeq : OK
+ :[Copy] GenSeq : OK
+ :[Copy] Seq : OK
+ :[Direct] Stream : OK
+ :[Copy] Stream : OK
+ :[Direct] Array : OK
+ :[Copy] Array : OK
+ :[Copy] ParVector: OK
+-- Testing SetView ---
+ :[Direct] Vector : OK
+ :[Copy] Vector : OK
+ :[Direct] Buffer : OK
+ :[Copy] Buffer : OK
+ :[Direct] GenSeq : OK
+ :[Copy] GenSeq : OK
+ :[Copy] Seq : OK
+ :[Direct] Stream : OK
+ :[Copy] Stream : OK
+ :[Direct] Array : OK
+ :[Copy] Array : OK
+ :[Copy] ParVector: OK
+-- Testing BufferView ---
+ :[Direct] Vector : OK
+ :[Copy] Vector : OK
+ :[Direct] Buffer : OK
+ :[Copy] Buffer : OK
+ :[Direct] GenSeq : OK
+ :[Copy] GenSeq : OK
+ :[Copy] Seq : OK
+ :[Direct] Stream : OK
+ :[Copy] Stream : OK
+ :[Direct] Array : OK
+ :[Copy] Array : OK
+ :[Copy] ParVector: OK
diff --git a/test/files/run/collection-conversions.scala b/test/files/run/collection-conversions.scala
new file mode 100644
index 0000000000..b5c4d8e261
--- /dev/null
+++ b/test/files/run/collection-conversions.scala
@@ -0,0 +1,60 @@
+import collection._
+import mutable.Buffer
+import parallel.immutable.ParVector
+import reflect.ClassTag
+
+object Test {
+
+ def printResult[A,B](msg: String, obj: A, expected: B)(implicit tag: ClassTag[A], tag2: ClassTag[B]) = {
+ print(" :" + msg +": ")
+ val isArray = obj match {
+ case x: Array[Int] => true
+ case _ => false
+ }
+ val expectedEquals =
+ if(isArray) obj.asInstanceOf[Array[Int]].toSeq == expected.asInstanceOf[Array[Int]].toSeq
+ else obj == expected
+ val tagEquals = tag == tag2
+ if(expectedEquals && tagEquals) print("OK")
+ else print("FAILED")
+ if(!expectedEquals) print(", " + obj + " != " + expected)
+ if(!tagEquals) print(", " + tag + " != " + tag2)
+ println("")
+ }
+
+ val testVector = Vector(1,2,3)
+ val testBuffer = Buffer(1,2,3)
+ val testGenSeq = GenSeq(1,2,3)
+ val testSeq = Seq(1,2,3)
+ val testStream = Stream(1,2,3)
+ val testArray = Array(1,2,3)
+ val testParVector = ParVector(1,2,3)
+
+ def testConversion[A: ClassTag](name: String, col: => GenTraversableOnce[A]): Unit = {
+ val tmp = col
+ println("-- Testing " + name + " ---")
+ printResult("[Direct] Vector ", col.toVector, testVector)
+ printResult("[Copy] Vector ", col.convertTo[Vector], testVector)
+ printResult("[Direct] Buffer ", col.toBuffer, testBuffer)
+ printResult("[Copy] Buffer ", col.convertTo[Buffer], testBuffer)
+ printResult("[Direct] GenSeq ", col.toSeq, testGenSeq)
+ printResult("[Copy] GenSeq ", col.convertTo[GenSeq], testGenSeq)
+ printResult("[Copy] Seq ", col.convertTo[Seq], testSeq)
+ printResult("[Direct] Stream ", col.toStream, testStream)
+ printResult("[Copy] Stream ", col.convertTo[Stream], testStream)
+ printResult("[Direct] Array ", col.toArray, testArray)
+ printResult("[Copy] Array ", col.convertTo[Array], testArray)
+ printResult("[Copy] ParVector", col.convertTo[ParVector], testParVector)
+ }
+
+ def main(args: Array[String]): Unit = {
+ testConversion("iterator", (1 to 3).iterator)
+ testConversion("Vector", Vector(1,2,3))
+ testConversion("List", List(1,2,3))
+ testConversion("Buffer", Buffer(1,2,3))
+ testConversion("ParVector", ParVector(1,2,3))
+ testConversion("Set", Set(1,2,3))
+ testConversion("SetView", Set(1,2,3).view)
+ testConversion("BufferView", Buffer(1,2,3).view)
+ }
+}
diff --git a/test/files/run/showraw_mods.check b/test/files/run/showraw_mods.check
new file mode 100644
index 0000000000..83055f2b70
--- /dev/null
+++ b/test/files/run/showraw_mods.check
@@ -0,0 +1 @@
+Block(List(ClassDef(Modifiers(ABSTRACT | DEFAULTPARAM/TRAIT), newTypeName("C"), List(), Template(List(Ident(java.lang.Object)), emptyValDef, List(DefDef(Modifiers(), newTermName("$init$"), List(), List(List()), TypeTree(), Block(List(), Literal(Constant(())))), ValDef(Modifiers(PRIVATE | LOCAL), newTermName("x"), TypeTree(), Literal(Constant(2))), ValDef(Modifiers(MUTABLE), newTermName("y"), TypeTree(), Select(This(newTypeName("C")), newTermName("x"))), ValDef(Modifiers(LAZY), newTermName("z"), TypeTree(), Select(This(newTypeName("C")), newTermName("y"))))))), Literal(Constant(())))
diff --git a/test/files/run/showraw_mods.scala b/test/files/run/showraw_mods.scala
new file mode 100644
index 0000000000..a10e4821dc
--- /dev/null
+++ b/test/files/run/showraw_mods.scala
@@ -0,0 +1,6 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ val tree = reify{trait C { private[this] val x = 2; var y = x; lazy val z = y }}
+ println(showRaw(tree.tree))
+} \ No newline at end of file
diff --git a/test/files/run/showraw_tree.check b/test/files/run/showraw_tree.check
new file mode 100644
index 0000000000..82724cae44
--- /dev/null
+++ b/test/files/run/showraw_tree.check
@@ -0,0 +1,2 @@
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.immutable.HashMap), List(Ident(java.lang.String), Ident(java.lang.String)))), nme.CONSTRUCTOR), List())
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.mutable.HashMap), List(Ident(java.lang.String), Ident(java.lang.String)))), nme.CONSTRUCTOR), List())
diff --git a/test/files/run/showraw_tree.scala b/test/files/run/showraw_tree.scala
new file mode 100644
index 0000000000..3624a24d6a
--- /dev/null
+++ b/test/files/run/showraw_tree.scala
@@ -0,0 +1,8 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ val tree1 = reify(new collection.immutable.HashMap[String, String])
+ val tree2 = reify(new collection.mutable.HashMap[String, String])
+ println(showRaw(tree1.tree))
+ println(showRaw(tree2.tree))
+} \ No newline at end of file
diff --git a/test/files/run/showraw_tree_ids.check b/test/files/run/showraw_tree_ids.check
new file mode 100644
index 0000000000..c6dbd6f1ce
--- /dev/null
+++ b/test/files/run/showraw_tree_ids.check
@@ -0,0 +1,2 @@
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.immutable.HashMap#1903), List(Ident(java.lang.String#129), Ident(java.lang.String#129)))), nme.CONSTRUCTOR), List())
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.mutable.HashMap#1908), List(Ident(java.lang.String#129), Ident(java.lang.String#129)))), nme.CONSTRUCTOR), List())
diff --git a/test/files/run/showraw_tree_ids.scala b/test/files/run/showraw_tree_ids.scala
new file mode 100644
index 0000000000..b56b8b4476
--- /dev/null
+++ b/test/files/run/showraw_tree_ids.scala
@@ -0,0 +1,8 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ val tree1 = reify(new collection.immutable.HashMap[String, String])
+ val tree2 = reify(new collection.mutable.HashMap[String, String])
+ println(showRaw(tree1.tree, printIds = true))
+ println(showRaw(tree2.tree, printIds = true))
+} \ No newline at end of file
diff --git a/test/files/run/showraw_tree_kinds.check b/test/files/run/showraw_tree_kinds.check
new file mode 100644
index 0000000000..a12e21c611
--- /dev/null
+++ b/test/files/run/showraw_tree_kinds.check
@@ -0,0 +1,2 @@
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.immutable.HashMap#CLS), List(Ident(java.lang.String#CLS), Ident(java.lang.String#CLS)))), nme.CONSTRUCTOR), List())
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.mutable.HashMap#CLS), List(Ident(java.lang.String#CLS), Ident(java.lang.String#CLS)))), nme.CONSTRUCTOR), List())
diff --git a/test/files/run/showraw_tree_kinds.scala b/test/files/run/showraw_tree_kinds.scala
new file mode 100644
index 0000000000..0ca5a387da
--- /dev/null
+++ b/test/files/run/showraw_tree_kinds.scala
@@ -0,0 +1,8 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ val tree1 = reify(new collection.immutable.HashMap[String, String])
+ val tree2 = reify(new collection.mutable.HashMap[String, String])
+ println(showRaw(tree1.tree, printKinds = true))
+ println(showRaw(tree2.tree, printKinds = true))
+} \ No newline at end of file
diff --git a/test/files/run/showraw_tree_types_ids.check b/test/files/run/showraw_tree_types_ids.check
new file mode 100644
index 0000000000..02e7aeed7c
--- /dev/null
+++ b/test/files/run/showraw_tree_types_ids.check
@@ -0,0 +1,10 @@
+Apply[1](Select[2](New[1](TypeTree[1]().setOriginal(AppliedTypeTree(Ident[3](scala.collection.immutable.HashMap#1903), List(TypeTree[4]().setOriginal(Ident[4](java.lang.String#129)), TypeTree[4]().setOriginal(Ident[4](java.lang.String#129)))))), nme.CONSTRUCTOR#1913), List())
+[1] TypeRef(ThisType(scala.collection.immutable#1898), scala.collection.immutable.HashMap#1903, List(TypeRef(ThisType(java.lang#128), java.lang.String#129, List()), TypeRef(ThisType(java.lang#128), java.lang.String#129, List())))
+[2] MethodType(List(), TypeRef(ThisType(scala.collection.immutable#1898), scala.collection.immutable.HashMap#1903, List(TypeRef(ThisType(java.lang#128), java.lang.String#129, List()), TypeRef(ThisType(java.lang#128), java.lang.String#129, List()))))
+[3] TypeRef(ThisType(scala.collection.immutable#1898), scala.collection.immutable.HashMap#1903, List())
+[4] TypeRef(ThisType(java.lang#128), java.lang.String#129, List())
+Apply[5](Select[6](New[5](TypeTree[5]().setOriginal(AppliedTypeTree(Ident[7](scala.collection.mutable.HashMap#1908), List(TypeTree[4]().setOriginal(Ident[4](java.lang.String#129)), TypeTree[4]().setOriginal(Ident[4](java.lang.String#129)))))), nme.CONSTRUCTOR#2231), List())
+[4] TypeRef(ThisType(java.lang#128), java.lang.String#129, List())
+[5] TypeRef(ThisType(scala.collection.mutable#1907), scala.collection.mutable.HashMap#1908, List(TypeRef(ThisType(java.lang#128), java.lang.String#129, List()), TypeRef(ThisType(java.lang#128), java.lang.String#129, List())))
+[6] MethodType(List(), TypeRef(ThisType(scala.collection.mutable#1907), scala.collection.mutable.HashMap#1908, List(TypeRef(ThisType(java.lang#128), java.lang.String#129, List()), TypeRef(ThisType(java.lang#128), java.lang.String#129, List()))))
+[7] TypeRef(ThisType(scala.collection.mutable#1907), scala.collection.mutable.HashMap#1908, List())
diff --git a/test/files/run/showraw_tree_types_ids.scala b/test/files/run/showraw_tree_types_ids.scala
new file mode 100644
index 0000000000..cb2c2bfb0f
--- /dev/null
+++ b/test/files/run/showraw_tree_types_ids.scala
@@ -0,0 +1,10 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val tb = runtimeMirror(getClass.getClassLoader).mkToolBox()
+ val tree1 = reify(new collection.immutable.HashMap[String, String])
+ val tree2 = reify(new collection.mutable.HashMap[String, String])
+ println(showRaw(tb.typeCheck(tree1.tree), printIds = true, printTypes = true))
+ println(showRaw(tb.typeCheck(tree2.tree), printIds = true, printTypes = true))
+} \ No newline at end of file
diff --git a/test/files/run/showraw_tree_types_typed.check b/test/files/run/showraw_tree_types_typed.check
new file mode 100644
index 0000000000..60176c7192
--- /dev/null
+++ b/test/files/run/showraw_tree_types_typed.check
@@ -0,0 +1,10 @@
+Apply[1](Select[2](New[1](TypeTree[1]().setOriginal(AppliedTypeTree(Ident[3](scala.collection.immutable.HashMap), List(TypeTree[4]().setOriginal(Ident[4](java.lang.String)), TypeTree[4]().setOriginal(Ident[4](java.lang.String)))))), nme.CONSTRUCTOR), List())
+[1] TypeRef(ThisType(scala.collection.immutable), scala.collection.immutable.HashMap, List(TypeRef(ThisType(java.lang), java.lang.String, List()), TypeRef(ThisType(java.lang), java.lang.String, List())))
+[2] MethodType(List(), TypeRef(ThisType(scala.collection.immutable), scala.collection.immutable.HashMap, List(TypeRef(ThisType(java.lang), java.lang.String, List()), TypeRef(ThisType(java.lang), java.lang.String, List()))))
+[3] TypeRef(ThisType(scala.collection.immutable), scala.collection.immutable.HashMap, List())
+[4] TypeRef(ThisType(java.lang), java.lang.String, List())
+Apply[5](Select[6](New[5](TypeTree[5]().setOriginal(AppliedTypeTree(Ident[7](scala.collection.mutable.HashMap), List(TypeTree[4]().setOriginal(Ident[4](java.lang.String)), TypeTree[4]().setOriginal(Ident[4](java.lang.String)))))), nme.CONSTRUCTOR), List())
+[4] TypeRef(ThisType(java.lang), java.lang.String, List())
+[5] TypeRef(ThisType(scala.collection.mutable), scala.collection.mutable.HashMap, List(TypeRef(ThisType(java.lang), java.lang.String, List()), TypeRef(ThisType(java.lang), java.lang.String, List())))
+[6] MethodType(List(), TypeRef(ThisType(scala.collection.mutable), scala.collection.mutable.HashMap, List(TypeRef(ThisType(java.lang), java.lang.String, List()), TypeRef(ThisType(java.lang), java.lang.String, List()))))
+[7] TypeRef(ThisType(scala.collection.mutable), scala.collection.mutable.HashMap, List())
diff --git a/test/files/run/showraw_tree_types_typed.scala b/test/files/run/showraw_tree_types_typed.scala
new file mode 100644
index 0000000000..d7ccc84ea3
--- /dev/null
+++ b/test/files/run/showraw_tree_types_typed.scala
@@ -0,0 +1,10 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val tb = runtimeMirror(getClass.getClassLoader).mkToolBox()
+ val tree1 = reify(new collection.immutable.HashMap[String, String])
+ val tree2 = reify(new collection.mutable.HashMap[String, String])
+ println(showRaw(tb.typeCheck(tree1.tree), printTypes = true))
+ println(showRaw(tb.typeCheck(tree2.tree), printTypes = true))
+} \ No newline at end of file
diff --git a/test/files/run/showraw_tree_types_untyped.check b/test/files/run/showraw_tree_types_untyped.check
new file mode 100644
index 0000000000..82724cae44
--- /dev/null
+++ b/test/files/run/showraw_tree_types_untyped.check
@@ -0,0 +1,2 @@
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.immutable.HashMap), List(Ident(java.lang.String), Ident(java.lang.String)))), nme.CONSTRUCTOR), List())
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.mutable.HashMap), List(Ident(java.lang.String), Ident(java.lang.String)))), nme.CONSTRUCTOR), List())
diff --git a/test/files/run/showraw_tree_types_untyped.scala b/test/files/run/showraw_tree_types_untyped.scala
new file mode 100644
index 0000000000..4df2eb66b2
--- /dev/null
+++ b/test/files/run/showraw_tree_types_untyped.scala
@@ -0,0 +1,8 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ val tree1 = reify(new collection.immutable.HashMap[String, String])
+ val tree2 = reify(new collection.mutable.HashMap[String, String])
+ println(showRaw(tree1.tree, printTypes = true))
+ println(showRaw(tree2.tree, printTypes = true))
+} \ No newline at end of file
diff --git a/test/files/run/showraw_tree_ultimate.check b/test/files/run/showraw_tree_ultimate.check
new file mode 100644
index 0000000000..0b409554a0
--- /dev/null
+++ b/test/files/run/showraw_tree_ultimate.check
@@ -0,0 +1,10 @@
+Apply[1](Select[2](New[1](TypeTree[1]().setOriginal(AppliedTypeTree(Ident[3](scala.collection.immutable.HashMap#1903#CLS), List(TypeTree[4]().setOriginal(Ident[4](java.lang.String#129#CLS)), TypeTree[4]().setOriginal(Ident[4](java.lang.String#129#CLS)))))), nme.CONSTRUCTOR#1913#PCTOR), List())
+[1] TypeRef(ThisType(scala.collection.immutable#1898#PK), scala.collection.immutable.HashMap#1903#CLS, List(TypeRef(ThisType(java.lang#128#PK), java.lang.String#129#CLS, List()), TypeRef(ThisType(java.lang#128#PK), java.lang.String#129#CLS, List())))
+[2] MethodType(List(), TypeRef(ThisType(scala.collection.immutable#1898#PK), scala.collection.immutable.HashMap#1903#CLS, List(TypeRef(ThisType(java.lang#128#PK), java.lang.String#129#CLS, List()), TypeRef(ThisType(java.lang#128#PK), java.lang.String#129#CLS, List()))))
+[3] TypeRef(ThisType(scala.collection.immutable#1898#PK), scala.collection.immutable.HashMap#1903#CLS, List())
+[4] TypeRef(ThisType(java.lang#128#PK), java.lang.String#129#CLS, List())
+Apply[5](Select[6](New[5](TypeTree[5]().setOriginal(AppliedTypeTree(Ident[7](scala.collection.mutable.HashMap#1908#CLS), List(TypeTree[4]().setOriginal(Ident[4](java.lang.String#129#CLS)), TypeTree[4]().setOriginal(Ident[4](java.lang.String#129#CLS)))))), nme.CONSTRUCTOR#2231#CTOR), List())
+[4] TypeRef(ThisType(java.lang#128#PK), java.lang.String#129#CLS, List())
+[5] TypeRef(ThisType(scala.collection.mutable#1907#PK), scala.collection.mutable.HashMap#1908#CLS, List(TypeRef(ThisType(java.lang#128#PK), java.lang.String#129#CLS, List()), TypeRef(ThisType(java.lang#128#PK), java.lang.String#129#CLS, List())))
+[6] MethodType(List(), TypeRef(ThisType(scala.collection.mutable#1907#PK), scala.collection.mutable.HashMap#1908#CLS, List(TypeRef(ThisType(java.lang#128#PK), java.lang.String#129#CLS, List()), TypeRef(ThisType(java.lang#128#PK), java.lang.String#129#CLS, List()))))
+[7] TypeRef(ThisType(scala.collection.mutable#1907#PK), scala.collection.mutable.HashMap#1908#CLS, List())
diff --git a/test/files/run/showraw_tree_ultimate.scala b/test/files/run/showraw_tree_ultimate.scala
new file mode 100644
index 0000000000..dfd7abde52
--- /dev/null
+++ b/test/files/run/showraw_tree_ultimate.scala
@@ -0,0 +1,10 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val tb = runtimeMirror(getClass.getClassLoader).mkToolBox()
+ val tree1 = reify(new collection.immutable.HashMap[String, String])
+ val tree2 = reify(new collection.mutable.HashMap[String, String])
+ println(showRaw(tb.typeCheck(tree1.tree), printIds = true, printKinds = true, printTypes = true))
+ println(showRaw(tb.typeCheck(tree2.tree), printIds = true, printKinds = true, printTypes = true))
+} \ No newline at end of file
diff --git a/test/files/run/t4954.scala b/test/files/run/t4954.scala
new file mode 100644
index 0000000000..b4916e651d
--- /dev/null
+++ b/test/files/run/t4954.scala
@@ -0,0 +1,45 @@
+
+
+import collection._
+
+
+object Test {
+
+ def main(args: Array[String]) {
+ val m = scala.collection.mutable.LinkedHashMap("one" -> 1, "two" -> 2, "three" -> 3, "four" -> 4, "five" -> 5)
+ val expected = List("one", "two", "three", "four", "five")
+ assert(m.keys.iterator.toList == expected)
+ assert(m.keys.drop(0).iterator.toList == expected)
+ assert(m.keys.drop(1).iterator.toList == expected.drop(1))
+ assert(m.keys.drop(2).iterator.toList == expected.drop(2))
+ assert(m.keys.drop(3).iterator.toList == expected.drop(3))
+ assert(m.keys.drop(4).iterator.toList == expected.drop(4))
+ assert(m.keys.drop(5).iterator.toList == expected.drop(5))
+
+ val expvals = List(1, 2, 3, 4, 5)
+ assert(m.values.iterator.toList == expvals)
+ assert(m.values.drop(0).iterator.toList == expvals)
+ assert(m.values.drop(1).iterator.toList == expvals.drop(1))
+ assert(m.values.drop(2).iterator.toList == expvals.drop(2))
+ assert(m.values.drop(3).iterator.toList == expvals.drop(3))
+ assert(m.values.drop(4).iterator.toList == expvals.drop(4))
+ assert(m.values.drop(5).iterator.toList == expvals.drop(5))
+
+ val pred = (x: String) => x.length < 6
+ val filtered = m.filterKeys(pred)
+ assert(filtered.drop(0).keys.toList == expected.filter(pred))
+ assert(filtered.drop(1).keys.toList == expected.filter(pred).drop(1))
+ assert(filtered.drop(2).keys.toList == expected.filter(pred).drop(2))
+ assert(filtered.drop(3).keys.toList == expected.filter(pred).drop(3))
+ assert(filtered.drop(4).keys.toList == expected.filter(pred).drop(4))
+
+ val mapped = m.mapValues(-_)
+ assert(mapped.drop(0).keys.toList == expected)
+ assert(mapped.drop(1).keys.toList == expected.drop(1))
+ assert(mapped.drop(2).keys.toList == expected.drop(2))
+ assert(mapped.drop(3).keys.toList == expected.drop(3))
+ assert(mapped.drop(4).keys.toList == expected.drop(4))
+ assert(mapped.drop(5).keys.toList == expected.drop(5))
+ }
+
+}
diff --git a/test/files/run/t5912.scala b/test/files/run/t5912.scala
new file mode 100644
index 0000000000..7710d04396
--- /dev/null
+++ b/test/files/run/t5912.scala
@@ -0,0 +1,6 @@
+object Test extends App{
+ import scala.reflect.runtime.{currentMirror=>cm}
+ import scala.tools.reflect._
+ import scala.reflect.runtime.universe._
+ val tree = cm.mkToolBox().typeCheck( Literal(Constant("test")) )
+} \ No newline at end of file
diff --git a/test/files/scalacheck/CheckEither.scala b/test/files/scalacheck/CheckEither.scala
index 0145d3321f..4e8480d72e 100644
--- a/test/files/scalacheck/CheckEither.scala
+++ b/test/files/scalacheck/CheckEither.scala
@@ -3,7 +3,6 @@ import org.scalacheck.Arbitrary.{arbitrary, arbThrowable}
import org.scalacheck.Gen.oneOf
import org.scalacheck.util.StdRand
import org.scalacheck.Prop._
-import org.scalacheck.ConsoleReporter.{testReport, propReport}
import org.scalacheck.Test.{Params, check}
import org.scalacheck.ConsoleReporter.testStatsEx
import Function.tupled
diff --git a/test/files/neg/t5334_1.scala b/test/pending/run/t5334_1.scala
index b75badb145..b75badb145 100644
--- a/test/files/neg/t5334_1.scala
+++ b/test/pending/run/t5334_1.scala
diff --git a/test/files/neg/t5334_2.scala b/test/pending/run/t5334_2.scala
index e082e3b8e3..e082e3b8e3 100644
--- a/test/files/neg/t5334_2.scala
+++ b/test/pending/run/t5334_2.scala