summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--build.xml64
-rw-r--r--docs/README14
-rw-r--r--project/Build.scala4
-rw-r--r--src/actors/scala/actors/Actor.scala484
-rw-r--r--src/actors/scala/actors/ActorCanReply.scala2
-rw-r--r--src/actors/scala/actors/ActorTask.scala9
-rw-r--r--src/actors/scala/actors/Channel.scala2
-rw-r--r--src/actors/scala/actors/Combinators.scala2
-rw-r--r--src/actors/scala/actors/InternalActor.scala509
-rw-r--r--src/actors/scala/actors/InternalReplyReactor.scala161
-rw-r--r--src/actors/scala/actors/OutputChannel.scala2
-rw-r--r--src/actors/scala/actors/ReactChannel.scala2
-rw-r--r--src/actors/scala/actors/Reactor.scala2
-rw-r--r--src/actors/scala/actors/ReactorCanReply.scala2
-rw-r--r--src/actors/scala/actors/ReplyReactor.scala165
-rw-r--r--src/actors/scala/actors/ReplyReactorTask.scala4
-rw-r--r--src/actors/scala/actors/UncaughtException.scala2
-rw-r--r--src/build/maven/continuations-plugin-pom.xml9
-rw-r--r--src/build/maven/jline-pom.xml9
-rw-r--r--src/build/maven/maven-deploy.xml23
-rw-r--r--src/build/maven/scala-actors-pom.xml61
-rw-r--r--src/build/maven/scala-compiler-pom.xml9
-rw-r--r--src/build/maven/scala-dbc-pom.xml9
-rw-r--r--src/build/maven/scala-dotnet-library-pom.xml9
-rw-r--r--src/build/maven/scala-library-pom.xml9
-rw-r--r--src/build/maven/scala-partest-pom.xml9
-rw-r--r--src/build/maven/scala-swing-pom.xml9
-rw-r--r--src/build/maven/scalap-pom.xml9
-rw-r--r--src/build/pack.xml26
-rw-r--r--src/compiler/scala/reflect/internal/Definitions.scala18
-rw-r--r--src/compiler/scala/reflect/internal/Flags.scala2
-rw-r--r--src/compiler/scala/reflect/internal/Importers.scala2
-rw-r--r--src/compiler/scala/reflect/internal/Kinds.scala23
-rw-r--r--src/compiler/scala/reflect/internal/StdNames.scala2
-rw-r--r--src/compiler/scala/reflect/internal/Symbols.scala83
-rw-r--r--src/compiler/scala/reflect/internal/TreeInfo.scala20
-rw-r--r--src/compiler/scala/reflect/internal/TreePrinters.scala2
-rw-r--r--src/compiler/scala/reflect/internal/Trees.scala5
-rw-r--r--src/compiler/scala/reflect/internal/Types.scala50
-rw-r--r--src/compiler/scala/reflect/internal/settings/MutableSettings.scala1
-rw-r--r--src/compiler/scala/reflect/runtime/Settings.scala3
-rw-r--r--src/compiler/scala/reflect/runtime/ToolBoxes.scala4
-rw-r--r--src/compiler/scala/reflect/runtime/Universe.scala5
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/DocComments.scala59
-rw-r--r--src/compiler/scala/tools/nsc/ast/NodePrinters.scala565
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala147
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeDSL.scala5
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeGen.scala8
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala8
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala65
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala14
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/GenICode.scala60
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala4
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/Inliners.scala2
-rw-r--r--src/compiler/scala/tools/nsc/doc/DocFactory.scala18
-rw-r--r--src/compiler/scala/tools/nsc/doc/Settings.scala2
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala2
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala12
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala12
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/Template.scala29
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css24
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala5
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala60
-rw-r--r--src/compiler/scala/tools/nsc/interactive/RangePositions.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/Naming.scala20
-rw-r--r--src/compiler/scala/tools/nsc/matching/ParallelMatching.scala2
-rw-r--r--src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala4
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaSettings.scala75
-rw-r--r--src/compiler/scala/tools/nsc/symtab/Positions.scala9
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala10
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala6
-rw-r--r--src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala24
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala184
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala4
-rw-r--r--src/compiler/scala/tools/nsc/transform/Mixin.scala286
-rw-r--r--src/compiler/scala/tools/nsc/transform/TailCalls.scala131
-rw-r--r--src/compiler/scala/tools/nsc/transform/UnCurry.scala88
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala25
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Duplicators.scala12
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala12
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala66
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala432
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala10
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala50
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala288
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Unapplies.scala5
-rw-r--r--src/compiler/scala/tools/nsc/util/Position.scala13
-rw-r--r--src/compiler/scala/tools/util/color/CString.scala19
-rw-r--r--src/compiler/scala/tools/util/color/package.scala1
-rw-r--r--src/library/scala/Specializable.scala12
-rw-r--r--src/library/scala/collection/GenIterableLike.scala64
-rw-r--r--src/library/scala/collection/GenMapLike.scala2
-rw-r--r--src/library/scala/collection/GenSeqLike.scala157
-rw-r--r--src/library/scala/collection/GenTraversableLike.scala112
-rw-r--r--src/library/scala/collection/GenTraversableOnce.scala65
-rw-r--r--src/library/scala/collection/IterableLike.scala1
-rw-r--r--src/library/scala/collection/Iterator.scala14
-rw-r--r--src/library/scala/collection/JavaConversions.scala883
-rwxr-xr-xsrc/library/scala/collection/JavaConverters.scala486
-rw-r--r--src/library/scala/collection/MapLike.scala14
-rw-r--r--src/library/scala/collection/SeqLike.scala49
-rw-r--r--src/library/scala/collection/TraversableLike.scala76
-rw-r--r--src/library/scala/collection/concurrent/TrieMap.scala2
-rw-r--r--src/library/scala/collection/convert/DecorateAsJava.scala296
-rw-r--r--src/library/scala/collection/convert/DecorateAsScala.scala189
-rw-r--r--src/library/scala/collection/convert/Decorators.scala46
-rw-r--r--src/library/scala/collection/convert/WrapAsJava.scala256
-rw-r--r--src/library/scala/collection/convert/WrapAsScala.scala193
-rw-r--r--src/library/scala/collection/convert/Wrappers.scala422
-rw-r--r--src/library/scala/collection/convert/package.scala18
-rw-r--r--src/library/scala/collection/generic/GenericTraversableTemplate.scala25
-rw-r--r--src/library/scala/collection/immutable/List.scala16
-rw-r--r--src/library/scala/collection/immutable/StringLike.scala15
-rw-r--r--src/library/scala/collection/mutable/FlatArray.scala11
-rw-r--r--src/library/scala/collection/mutable/WeakHashMap.scala5
-rw-r--r--src/library/scala/collection/parallel/ParIterableLike.scala3
-rw-r--r--src/library/scala/collection/parallel/ParSeqLike.scala16
-rw-r--r--src/library/scala/concurrent/Awaitable.scala2
-rw-r--r--src/library/scala/concurrent/ConcurrentPackageObject.scala7
-rw-r--r--src/library/scala/concurrent/DelayedLazyVal.scala3
-rw-r--r--src/library/scala/concurrent/ExecutionContext.scala2
-rw-r--r--src/library/scala/concurrent/Future.scala3
-rw-r--r--src/library/scala/concurrent/Scheduler.scala2
-rw-r--r--src/library/scala/concurrent/default/SchedulerImpl.scala.disabled2
-rw-r--r--src/library/scala/concurrent/default/TaskImpl.scala.disabled2
-rw-r--r--src/library/scala/concurrent/impl/ExecutionContextImpl.scala3
-rw-r--r--src/library/scala/concurrent/impl/Promise.scala2
-rw-r--r--src/library/scala/concurrent/package.scala3
-rw-r--r--src/library/scala/concurrent/util/Duration.scala578
-rw-r--r--src/library/scala/reflect/api/Positions.scala18
-rw-r--r--src/library/scala/reflect/api/TreePrinters.scala2
-rw-r--r--src/library/scala/reflect/api/Trees.scala45
-rw-r--r--src/library/scala/testing/Show.scala2
-rw-r--r--src/library/scala/util/Duration.scala485
-rw-r--r--src/library/scala/util/Timeout.scala33
-rw-r--r--src/library/scala/util/matching/Regex.scala58
-rwxr-xr-xsrc/library/scala/xml/Elem.scala8
-rwxr-xr-xsrc/library/scala/xml/XML.scala17
-rw-r--r--src/library/scala/xml/factory/LoggedNodeFactory.scala2
-rwxr-xr-xsrc/library/scala/xml/parsing/MarkupParser.scala22
-rw-r--r--src/partest/scala/tools/partest/PartestTask.scala11
-rw-r--r--src/partest/scala/tools/partest/ScaladocModelTest.scala124
-rw-r--r--src/partest/scala/tools/partest/nest/AntRunner.scala1
-rw-r--r--src/partest/scala/tools/partest/nest/ConsoleFileManager.scala9
-rw-r--r--src/partest/scala/tools/partest/nest/DirectRunner.scala9
-rw-r--r--src/partest/scala/tools/partest/nest/FileManager.scala1
-rw-r--r--src/partest/scala/tools/partest/nest/ReflectiveRunner.scala4
-rw-r--r--src/partest/scala/tools/partest/nest/SBTRunner.scala4
-rw-r--r--src/partest/scala/tools/partest/nest/Worker.scala1
-rw-r--r--test/files/jvm/scala-concurrent-tck.scala2
-rw-r--r--test/files/neg/saferJavaConversions.check6
-rw-r--r--test/files/neg/saferJavaConversions.scala20
-rw-r--r--test/files/neg/t1364.check2
-rw-r--r--test/files/neg/t1477.check2
-rw-r--r--test/files/neg/t2070.check3
-rw-r--r--test/files/neg/t3015.check2
-rw-r--r--test/files/neg/t3481.check6
-rw-r--r--test/files/neg/t4044.check9
-rw-r--r--test/files/neg/t4515.check2
-rw-r--r--test/files/neg/t4987.check4
-rw-r--r--test/files/neg/t4987.scala2
-rw-r--r--test/files/neg/t5063.check4
-rw-r--r--test/files/neg/t5063.scala3
-rw-r--r--test/files/neg/t5152.check6
-rw-r--r--test/files/neg/t5189b.check11
-rw-r--r--test/files/neg/t5189b.scala18
-rw-r--r--test/files/neg/t5580a.check6
-rw-r--r--test/files/neg/t5580a.scala11
-rw-r--r--test/files/neg/t5589neg.check37
-rw-r--r--test/files/neg/t5589neg.scala6
-rw-r--r--test/files/neg/t5589neg2.check9
-rw-r--r--test/files/neg/t5589neg2.scala13
-rw-r--r--test/files/neg/t708.check2
-rw-r--r--test/files/neg/t742.check3
-rw-r--r--test/files/neg/tcpoly_override.check3
-rw-r--r--test/files/neg/tcpoly_typealias.check9
-rw-r--r--test/files/neg/tcpoly_variance_enforce.check34
-rw-r--r--test/files/neg/valueclasses.check5
-rw-r--r--test/files/neg/valueclasses.scala23
-rw-r--r--test/files/pos/irrefutable.scala22
-rw-r--r--test/files/pos/lookupswitch.scala37
-rw-r--r--test/files/pos/t1336.scala10
-rw-r--r--test/files/pos/t5580b.scala19
-rw-r--r--test/files/pos/t5589.scala22
-rw-r--r--test/files/pos/t5604/ReplConfig.scala53
-rw-r--r--test/files/pos/t5604/ReplReporter.scala30
-rw-r--r--test/files/pos/virtpatmat_anonfun_for.flags1
-rw-r--r--test/files/pos/virtpatmat_anonfun_for.scala8
-rw-r--r--test/files/pos/virtpatmat_exist4.scala35
-rw-r--r--test/files/pos/virtpatmat_instof_valuetype.flags1
-rw-r--r--test/files/pos/virtpatmat_instof_valuetype.scala8
-rw-r--r--test/files/presentation/shutdown-deadlock/ShutdownDeadlockTest.scala2
-rw-r--r--test/files/run/delay-bad.check47
-rw-r--r--test/files/run/delay-bad.scala77
-rw-r--r--test/files/run/delay-good.check41
-rw-r--r--test/files/run/delay-good.scala77
-rw-r--r--test/files/run/existentials-in-compiler.check104
-rw-r--r--test/files/run/reify_ann1a.check4
-rw-r--r--test/files/run/reify_ann1b.check4
-rw-r--r--test/files/run/si4750.check1
-rw-r--r--test/files/run/si4750.scala7
-rw-r--r--test/files/run/t4574.check2
-rw-r--r--test/files/run/t4574.scala13
-rw-r--r--test/files/run/valueclasses-constr.check10
-rw-r--r--test/files/run/valueclasses-constr.scala84
-rw-r--r--test/files/run/virtpatmat_extends_product.check1
-rw-r--r--test/files/run/virtpatmat_extends_product.flags1
-rw-r--r--test/files/run/virtpatmat_extends_product.scala11
-rw-r--r--test/files/run/virtpatmat_partial.check17
-rw-r--r--test/files/run/virtpatmat_partial.scala257
-rw-r--r--test/files/run/virtpatmat_tailcalls_verifyerror.check1
-rw-r--r--test/files/run/virtpatmat_tailcalls_verifyerror.flags1
-rw-r--r--test/files/run/virtpatmat_tailcalls_verifyerror.scala14
-rw-r--r--test/files/scalacheck/t2460.scala32
-rw-r--r--test/pending/run/implicit-classes.scala17
-rw-r--r--test/pending/run/virtpatmat_anonfun_underscore.check0
-rw-r--r--test/pending/run/virtpatmat_anonfun_underscore.flags1
-rw-r--r--test/pending/run/virtpatmat_anonfun_underscore.scala4
-rw-r--r--test/scaladoc/resources/code-indent.scala37
-rw-r--r--test/scaladoc/run/SI-5373.check1
-rw-r--r--test/scaladoc/run/SI-5373.scala34
-rw-r--r--test/scaladoc/scala/html.flags1
-rw-r--r--test/scaladoc/scalacheck/CommentFactoryTest.scala (renamed from test/scaladoc/scala/model/CommentFactoryTest.scala)0
-rw-r--r--test/scaladoc/scalacheck/HtmlFactoryTest.flags (renamed from test/scaladoc/scala/html/HtmlFactoryTest.flags)0
-rw-r--r--test/scaladoc/scalacheck/HtmlFactoryTest.scala (renamed from test/scaladoc/scala/html/HtmlFactoryTest.scala)36
-rw-r--r--test/scaladoc/scalacheck/IndexScriptTest.scala (renamed from test/scaladoc/scala/IndexScriptTest.scala)2
-rw-r--r--test/scaladoc/scalacheck/IndexTest.scala (renamed from test/scaladoc/scala/IndexTest.scala)2
-rwxr-xr-xtools/get-scala-commit-date3
-rwxr-xr-xtools/get-scala-commit-drift40
-rw-r--r--tools/get-scala-commit-drift.bat21
-rwxr-xr-xtools/get-scala-commit-sha26
-rw-r--r--tools/get-scala-commit-sha.bat2
-rwxr-xr-xtools/make-release-notes49
237 files changed, 6724 insertions, 4514 deletions
diff --git a/build.xml b/build.xml
index 53bd9c3011..d4a6bbfbaa 100644
--- a/build.xml
+++ b/build.xml
@@ -266,7 +266,7 @@ INITIALISATION
<property name="scalac.args.optimise" value=""/>
<!-- scalac.args.quickonly are added to quick.* targets but not others (particularly, locker.)
This is to facilitate testing new command line options which do not yet exist in starr. -->
- <property name="scalac.args.quickonly" value=""/>
+ <property name="scalac.args.quickonly" value=""/>
<property name="scalac.args.all" value="${scalac.args} ${scalac.args.optimise}"/>
<property name="scalac.args.quick" value="${scalac.args.all} ${scalac.args.quickonly}"/>
<!-- Setting-up Ant contrib tasks -->
@@ -282,13 +282,10 @@ INITIALISATION
</condition>
<exec osfamily="unix" executable="tools/get-scala-commit-sha" outputproperty="git.commit.sha" failifexecutionfails="false" />
- <exec osfamily="windows" executable="tools/get-scala-commit-sha.bat" outputproperty="git.commit.sha" failifexecutionfails="false" />
<exec osfamily="unix" executable="tools/get-scala-commit-date" outputproperty="git.commit.date" failifexecutionfails="false" />
- <exec osfamily="windows" executable="tools/get-scala-commit-date.bat" outputproperty="git.commit.date" failifexecutionfails="false" />
- <exec osfamily="unix" executable="tools/get-scala-commit-drift" outputproperty="git.commit.drift" failifexecutionfails="false" />
- <exec osfamily="windows" executable="tools/get-scala-commit-drift.bat" outputproperty="git.commit.drift" failifexecutionfails="false" />
<!-- some default in case something went wrong getting the revision -->
- <property name="git.describe" value="-unknown-"/>
+ <property name="git.commit.sha" value="unknown"/>
+ <property name="git.commit.date" value="unknown"/>
<!-- We use the git describe to determine the OSGi modifier for our build. -->
<property
@@ -296,7 +293,7 @@ INITIALISATION
value="${version.major}.${version.minor}.${version.patch}${version.suffix}${maven.version.suffix}"/>
<property
name="version.number"
- value="${maven.version.number}-${git.commit.date}-${git.commit.drift}-${git.commit.sha}"/>
+ value="${version.major}.${version.minor}.${version.patch}${version.suffix}-${git.commit.date}-${git.commit.sha}"/>
<property
name="osgi.version.number"
value="${version.major}.${version.minor}.${version.patch}.v${git.commit.date}${version.suffix}-${git.commit.sha}"/>
@@ -331,7 +328,9 @@ INITIALISATION
<echo message=" java version: ${java.vm.name} ${java.version}" />
<echo message=" java args: ${env.ANT_OPTS} ${jvm.opts}" />
<echo message=" javac args: ${javac.args}" />
- <echo message=" scalac args: ${scalac.args}" />
+ <echo message=" scalac args: ${scalac.args.all}" />
+ <echo message=" git date: ${git.commit.date}" />
+ <echo message=" git hash: ${git.commit.sha}" />
<echo message=" maven version: ${maven.version.number}"/>
<echo message=" OSGi version: ${osgi.version.number}" />
<echo message="canonical version: ${version.number}" />
@@ -610,9 +609,9 @@ QUICK BUILD (QUICK)
<srcfiles dir="${src.dir}">
<include name="library/**"/>
<include name="dbc/**"/>
- <include name="actors/**"/>
<include name="continuations/**"/>
<include name="swing/**"/>
+ <include name="actors/**"/>
</srcfiles>
</uptodate>
</target>
@@ -991,6 +990,7 @@ PACKED QUICK BUILD (PACK)
<fileset dir="${build-quick.dir}/classes/library">
<exclude name="scala/dbc/**"/>
<exclude name="scala/swing/**"/>
+ <exclude name="scala/actors/**"/>
</fileset>
<zipfileset dirmode="755" filemode="644" src="${forkjoin.jar}"/>
</jar>
@@ -1004,6 +1004,11 @@ PACKED QUICK BUILD (PACK)
<include name="scala/swing/**"/>
</fileset>
</jar>
+ <jar destfile="${build-pack.dir}/lib/scala-actors.jar">
+ <fileset dir="${build-quick.dir}/classes/library">
+ <include name="scala/actors/**"/>
+ </fileset>
+ </jar>
</target>
<target name="pack.pre-comp" depends="pack.lib">
@@ -1133,6 +1138,7 @@ PACKED QUICK BUILD (PACK)
<pathelement location="${build-pack.dir}/lib/scala-compiler.jar"/>
<pathelement location="${build-pack.dir}/lib/scala-partest.jar"/>
<pathelement location="${build-pack.dir}/lib/scalap.jar"/>
+ <pathelement location="${build-pack.dir}/lib/scala-actors.jar"/>
<pathelement location="${ant.jar}"/>
<pathelement location="${jline.jar}"/>
<path refid="lib.extra"/>
@@ -1156,8 +1162,8 @@ BOOTSTRAPPING BUILD (STRAP)
<srcfiles dir="${src.dir}">
<include name="library/**"/>
<include name="dbc/**"/>
- <include name="actors/**"/>
<include name="swing/**"/>
+ <include name="actors/**"/>
</srcfiles>
</uptodate>
</target>
@@ -1578,6 +1584,21 @@ DOCUMENTATION
</uptodate>
</sequential>
</macrodef>
+
+ <!-- Set the github commit scaladoc sources point to -->
+ <!-- For releases, look for the tag with the same name as the maven version -->
+ <condition property="scaladoc.git.commit" value="v${maven.version.number}">
+ <isset property="build.release"/>
+ </condition>
+ <!-- For snapshots, if we know the commit, point scaladoc to that particular commit instead of master -->
+ <condition property="scaladoc.git.commit" value="${git.commit.sha}">
+ <not><equals arg1="${git.commit.sha}" arg2="unknown"/></not>
+ </condition>
+ <!-- Fallback: point scaladoc to master -->
+ <property name="scaladoc.git.commit" value="master"/>
+ <!-- Compute the URL and show it -->
+ <property name="scaladoc.url" value="https://github.com/scala/scala/tree/${scaladoc.git.commit}/src"/>
+ <echo message="Scaladoc will point to ${scaladoc.url} for source files."/>
</target>
<target name="docs.pre-lib" depends="docs.start">
@@ -1585,8 +1606,8 @@ DOCUMENTATION
<source-includes>
<include name="library/**"/>
<include name="dbc/**"/>
- <include name="actors/**"/>
<include name="swing/**"/>
+ <include name="actors/**"/>
</source-includes>
</doc-uptodate-check>
</target>
@@ -1596,15 +1617,15 @@ DOCUMENTATION
<mkdir dir="${build-docs.dir}/library"/>
<scaladoc
destdir="${build-docs.dir}/library"
- doctitle="Scala Standard Library"
+ doctitle="Scala Standard Library API (Scaladoc)"
docversion="${version.number}"
docfooter="epfl"
- docsourceurl="https://lampsvn.epfl.ch/trac/scala/browser/scala/trunk/src/€{FILE_PATH}.scala#L1"
+ docsourceurl="${scaladoc.url}€{FILE_PATH}.scala#L1"
docUncompilable="${src.dir}/library-aux"
sourcepath="${src.dir}"
classpathref="pack.classpath"
addparams="${scalac.args.all}"
- docRootContent="${src.dir}/library/rootdoc.txt">
+ docRootContent="${src.dir}/library/rootdoc.txt">
<src>
<files includes="${src.dir}/actors"/>
<files includes="${src.dir}/library/scala"/>
@@ -1650,7 +1671,7 @@ DOCUMENTATION
<taskdef name="genman"
classname="scala.tools.docutil.ManMaker"
classpathref="manual.classpath"/>
- <genman command="fsc, sbaz, scala, scalac, scaladoc, scalap"
+ <genman command="fsc, scala, scalac, scaladoc, scalap"
htmlout="${build-docs.dir}/manual/html"
manout="${build-docs.dir}/manual/genman"/>
<!-- On Windows source and target files can't be the same ! -->
@@ -1680,7 +1701,7 @@ DOCUMENTATION
destdir="${build-docs.dir}/compiler"
doctitle="Scala Compiler"
docversion="${version.number}"
- docsourceurl="https://lampsvn.epfl.ch/trac/scala/browser/scala/trunk/src/€{FILE_PATH}.scala#L1"
+ docsourceurl="${scaladoc.url}€{FILE_PATH}.scala#L1"
sourcepath="${src.dir}"
classpathref="pack.classpath"
srcdir="${src.dir}/compiler"
@@ -1725,7 +1746,7 @@ DOCUMENTATION
destdir="${build-docs.dir}/scalap"
doctitle="Scalap"
docversion="${version.number}"
- docsourceurl="https://lampsvn.epfl.ch/trac/scala/browser/scala/trunk/src/€{FILE_PATH}.scala#L1"
+ docsourceurl="${scaladoc.url}€{FILE_PATH}.scala#L1"
sourcepath="${src.dir}"
classpathref="pack.classpath"
srcdir="${src.dir}/scalap"
@@ -1895,7 +1916,8 @@ BOOTRAPING TEST AND TEST SUITE
<path refid="pack.classpath"/>
<fileset dir="${partest.dir}/files/lib" includes="*.jar" />
</compilationpath>
- <scalachecktests dir="${partest.dir}/scaladoc/scala" includes="**/*.scala" />
+ <runtests dir="${partest.dir}/scaladoc/run" includes="*.scala" />
+ <scalachecktests dir="${partest.dir}/scaladoc/scalacheck" includes="*.scala" />
</partest>
</target>
@@ -1963,7 +1985,6 @@ DISTRIBUTION
<mkdir dir="${dist.dir}/src"/>
<jar destfile="${dist.dir}/src/scala-library-src.jar">
<fileset dir="${src.dir}/library"/>
- <fileset dir="${src.dir}/actors"/>
<fileset dir="${src.dir}/continuations/library"/>
</jar>
<jar destfile="${dist.dir}/src/scala-dbc-src.jar">
@@ -1975,6 +1996,9 @@ DISTRIBUTION
<jar destfile="${dist.dir}/src/scala-compiler-src.jar">
<fileset dir="${src.dir}/compiler"/>
</jar>
+ <jar destfile="${dist.dir}/src/scala-actors-src.jar">
+ <fileset dir="${src.dir}/actors"/>
+ </jar>
<jar destfile="${dist.dir}/src/scalap-src.jar">
<fileset dir="${src.dir}/scalap"/>
</jar>
@@ -2051,8 +2075,8 @@ STABLE REFERENCE (STARR)
<target name="starr.src" depends="starr.comp">
<jar destfile="${basedir}/lib/scala-library-src.jar">
<fileset dir="${basedir}/src/library"/>
- <fileset dir="${basedir}/src/actors"/>
<fileset dir="${basedir}/src/swing"/>
+ <fileset dir="${basedir}/src/actors"/>
<fileset dir="${basedir}/src/dbc"/>
</jar>
</target>
diff --git a/docs/README b/docs/README
index bcc7108399..6af633444d 100644
--- a/docs/README
+++ b/docs/README
@@ -2,8 +2,7 @@
Scala Software Distributions
----------------------------
-- scala-<major>.<minor>.<patch>.tar.bz2 Unix distribution
-- scala-<major>.<minor>.<patch>.tar.gz Unix distribution
+- scala-<major>.<minor>.<patch>.tgz Unix distribution
- scala-<major>.<minor>.<patch>.zip Windows distribution
The standard distributions require Java 1.5 or above. If you don't
@@ -32,15 +31,10 @@ directly accessible.
You may test the distribution by running the following commands:
-$ ./bin/sbaz install scala-devel-docs
-$ ./bin/scalac doc/scala-devel-docs/examples/sort.scala
-$ ./bin/scala examples.sort
-[6,2,8,5,1]
-[1,2,5,6,8]
$ ./bin/scala
-scala> examples.sort.main(null)
-[6,2,8,5,1]
-[1,2,5,6,8]
+scala> Array(4,3,2,1).sorted
+res0: Array[Int] = Array(1, 2, 3, 4)
+
scala>:quit
$
diff --git a/project/Build.scala b/project/Build.scala
index 5b09e053f0..9f73563f8e 100644
--- a/project/Build.scala
+++ b/project/Build.scala
@@ -44,7 +44,7 @@ object ScalaBuild extends Build with Layers {
</license>
</licenses>
<scm>
- <connection>scm:svn:http://lampsvn.epfl.ch/svn-repos/scala/scala/trunk</connection>
+ <connection>scm:git:git://github.com/scala/scala.git</connection>
</scm>
<issueManagement>
<system>jira</system>
@@ -92,7 +92,7 @@ object ScalaBuild extends Build with Layers {
</license>
</licenses>
<scm>
- <connection>scm:svn:http://lampsvn.epfl.ch/svn-repos/scala/scala/trunk</connection>
+ <connection>scm:git:git://github.com/scala/scala.git</connection>
</scm>
<issueManagement>
<system>jira</system>
diff --git a/src/actors/scala/actors/Actor.scala b/src/actors/scala/actors/Actor.scala
index b746f68268..aab533ae8d 100644
--- a/src/actors/scala/actors/Actor.scala
+++ b/src/actors/scala/actors/Actor.scala
@@ -46,7 +46,7 @@ object Actor extends Combinators {
Terminated = Value
}
- private[actors] val tl = new ThreadLocal[ReplyReactor]
+ private[actors] val tl = new ThreadLocal[InternalReplyReactor]
// timer thread runs as daemon
private[actors] val timer = new Timer(true)
@@ -59,15 +59,15 @@ object Actor extends Combinators {
*
* @return returns the currently executing actor.
*/
- def self: Actor = self(Scheduler)
+ def self: Actor = self(Scheduler).asInstanceOf[Actor]
- private[actors] def self(sched: IScheduler): Actor =
- rawSelf(sched).asInstanceOf[Actor]
+ private[actors] def self(sched: IScheduler): InternalActor =
+ rawSelf(sched).asInstanceOf[InternalActor]
- private[actors] def rawSelf: ReplyReactor =
+ private[actors] def rawSelf: InternalReplyReactor =
rawSelf(Scheduler)
- private[actors] def rawSelf(sched: IScheduler): ReplyReactor = {
+ private[actors] def rawSelf(sched: IScheduler): InternalReplyReactor = {
val s = tl.get
if (s eq null) {
val r = new ActorProxy(Thread.currentThread, sched)
@@ -245,7 +245,7 @@ object Actor extends Combinators {
def eventloop(f: PartialFunction[Any, Unit]): Nothing =
rawSelf.react(new RecursiveProxyHandler(rawSelf, f))
- private class RecursiveProxyHandler(a: ReplyReactor, f: PartialFunction[Any, Unit])
+ private class RecursiveProxyHandler(a: InternalReplyReactor, f: PartialFunction[Any, Unit])
extends scala.runtime.AbstractPartialFunction[Any, Unit] {
def _isDefinedAt(m: Any): Boolean =
true // events are immediately removed from the mailbox
@@ -259,7 +259,7 @@ object Actor extends Combinators {
* Returns the actor which sent the last received message.
*/
def sender: OutputChannel[Any] =
- rawSelf.sender
+ rawSelf.internalSender
/**
* Sends `msg` to the actor waiting in a call to `!?`.
@@ -302,7 +302,7 @@ object Actor extends Combinators {
def andThen[b](other: => b): Unit
}
- implicit def mkBody[a](body: => a) = new Body[a] {
+ implicit def mkBody[a](body: => a) = new InternalActor.Body[a] {
def andThen[b](other: => b): Unit = rawSelf.seq(body, other)
}
@@ -397,476 +397,12 @@ object Actor extends Combinators {
* @define channel actor's mailbox
*/
@SerialVersionUID(-781154067877019505L)
-trait Actor extends AbstractActor with ReplyReactor with ActorCanReply with InputChannel[Any] with Serializable {
-
- /* The following two fields are only used when the actor
- * suspends by blocking its underlying thread, for example,
- * when waiting in a receive or synchronous send.
- */
- @volatile
- private var isSuspended = false
-
- /* This field is used to communicate the received message from
- * the invocation of send to the place where the thread of
- * the receiving actor resumes inside receive/receiveWithin.
- */
- @volatile
- private var received: Option[Any] = None
-
- protected[actors] override def scheduler: IScheduler = Scheduler
-
- private[actors] override def startSearch(msg: Any, replyTo: OutputChannel[Any], handler: PartialFunction[Any, Any]) =
- if (isSuspended) {
- () => synchronized {
- mailbox.append(msg, replyTo)
- resumeActor()
- }
- } else super.startSearch(msg, replyTo, handler)
-
- // we override this method to check `shouldExit` before suspending
- private[actors] override def searchMailbox(startMbox: MQueue[Any],
- handler: PartialFunction[Any, Any],
- resumeOnSameThread: Boolean) {
- var tmpMbox = startMbox
- var done = false
- while (!done) {
- val qel = tmpMbox.extractFirst((msg: Any, replyTo: OutputChannel[Any]) => {
- senders = List(replyTo)
- handler.isDefinedAt(msg)
- })
- if (tmpMbox ne mailbox)
- tmpMbox.foreach((m, s) => mailbox.append(m, s))
- if (null eq qel) {
- synchronized {
- // in mean time new stuff might have arrived
- if (!sendBuffer.isEmpty) {
- tmpMbox = new MQueue[Any]("Temp")
- drainSendBuffer(tmpMbox)
- // keep going
- } else {
- // very important to check for `shouldExit` at this point
- // since linked actors might have set it after we checked
- // last time (e.g., at the beginning of `react`)
- if (shouldExit) exit()
- waitingFor = handler
- // see Reactor.searchMailbox
- throw Actor.suspendException
- }
- }
- } else {
- resumeReceiver((qel.msg, qel.session), handler, resumeOnSameThread)
- done = true
- }
- }
- }
-
- private[actors] override def makeReaction(fun: () => Unit, handler: PartialFunction[Any, Any], msg: Any): Runnable =
- new ActorTask(this, fun, handler, msg)
-
- /** See the companion object's `receive` method. */
- def receive[R](f: PartialFunction[Any, R]): R = {
- assert(Actor.self(scheduler) == this, "receive from channel belonging to other actor")
-
- synchronized {
- if (shouldExit) exit() // links
- drainSendBuffer(mailbox)
- }
-
- var done = false
- while (!done) {
- val qel = mailbox.extractFirst((m: Any, replyTo: OutputChannel[Any]) => {
- senders = replyTo :: senders
- val matches = f.isDefinedAt(m)
- senders = senders.tail
- matches
- })
- if (null eq qel) {
- synchronized {
- // in mean time new stuff might have arrived
- if (!sendBuffer.isEmpty) {
- drainSendBuffer(mailbox)
- // keep going
- } else {
- waitingFor = f
- isSuspended = true
- scheduler.managedBlock(blocker)
- drainSendBuffer(mailbox)
- // keep going
- }
- }
- } else {
- received = Some(qel.msg)
- senders = qel.session :: senders
- done = true
- }
- }
-
- val result = f(received.get)
- received = None
- senders = senders.tail
- result
- }
-
- /** See the companion object's `receiveWithin` method. */
- def receiveWithin[R](msec: Long)(f: PartialFunction[Any, R]): R = {
- assert(Actor.self(scheduler) == this, "receive from channel belonging to other actor")
-
- synchronized {
- if (shouldExit) exit() // links
- drainSendBuffer(mailbox)
- }
-
- // first, remove spurious TIMEOUT message from mailbox if any
- mailbox.extractFirst((m: Any, replyTo: OutputChannel[Any]) => m == TIMEOUT)
-
- val receiveTimeout = () => {
- if (f.isDefinedAt(TIMEOUT)) {
- received = Some(TIMEOUT)
- senders = this :: senders
- } else
- sys.error("unhandled timeout")
- }
-
- var done = false
- while (!done) {
- val qel = mailbox.extractFirst((m: Any, replyTo: OutputChannel[Any]) => {
- senders = replyTo :: senders
- val matches = f.isDefinedAt(m)
- senders = senders.tail
- matches
- })
- if (null eq qel) {
- val todo = synchronized {
- // in mean time new stuff might have arrived
- if (!sendBuffer.isEmpty) {
- drainSendBuffer(mailbox)
- // keep going
- () => {}
- } else if (msec == 0L) {
- done = true
- receiveTimeout
- } else {
- if (onTimeout.isEmpty) {
- if (!f.isDefinedAt(TIMEOUT))
- sys.error("unhandled timeout")
-
- val thisActor = this
- onTimeout = Some(new TimerTask {
- def run() {
- thisActor.send(TIMEOUT, thisActor)
- }
- })
- Actor.timer.schedule(onTimeout.get, msec)
- }
-
- // It is possible that !onTimeout.isEmpty, but TIMEOUT is not yet in mailbox
- // See SI-4759
- waitingFor = f
- received = None
- isSuspended = true
- scheduler.managedBlock(blocker)
- drainSendBuffer(mailbox)
- // keep going
- () => {}
- }
- }
- todo()
- } else {
- synchronized {
- if (!onTimeout.isEmpty) {
- onTimeout.get.cancel()
- onTimeout = None
- }
- }
- received = Some(qel.msg)
- senders = qel.session :: senders
- done = true
- }
- }
-
- val result = f(received.get)
- received = None
- senders = senders.tail
- result
- }
-
- /** See the companion object's `react` method. */
- override def react(handler: PartialFunction[Any, Unit]): Nothing = {
- synchronized {
- if (shouldExit) exit()
- }
- super.react(handler)
- }
-
- /** See the companion object's `reactWithin` method. */
- override def reactWithin(msec: Long)(handler: PartialFunction[Any, Unit]): Nothing = {
- synchronized {
- if (shouldExit) exit()
- }
- super.reactWithin(msec)(handler)
- }
-
- /** Receives the next message from the mailbox */
- def ? : Any = receive {
- case x => x
- }
-
- // guarded by lock of this
- // never throws SuspendActorControl
- private[actors] override def scheduleActor(f: PartialFunction[Any, Any], msg: Any) =
- if (f eq null) {
- // do nothing (timeout is handled instead)
- }
- else {
- val task = new ActorTask(this, null, f, msg)
- scheduler executeFromActor task
- }
-
- /* Used for notifying scheduler when blocking inside receive/receiveWithin. */
- private object blocker extends scala.concurrent.ManagedBlocker {
- def block() = {
- Actor.this.suspendActor()
- true
- }
- def isReleasable =
- !Actor.this.isSuspended
- }
-
- private def suspendActor() = synchronized {
- while (isSuspended) {
- try {
- wait()
- } catch {
- case _: InterruptedException =>
- }
- }
- // links: check if we should exit
- if (shouldExit) exit()
- }
-
- private def resumeActor() {
- isSuspended = false
- notify()
- }
-
- private[actors] override def exiting = synchronized {
- _state == Actor.State.Terminated
- }
-
- // guarded by this
- private[actors] override def dostart() {
- // Reset various flags.
- //
- // Note that we do *not* reset `trapExit`. The reason is that
- // users should be able to set the field in the constructor
- // and before `act` is called.
- exitReason = 'normal
- shouldExit = false
-
- super.dostart()
- }
+trait Actor extends InternalActor with ReplyReactor {
override def start(): Actor = synchronized {
super.start()
this
}
- /** State of this actor */
- override def getState: Actor.State.Value = synchronized {
- if (isSuspended) {
- if (onTimeout.isEmpty)
- Actor.State.Blocked
- else
- Actor.State.TimedBlocked
- } else
- super.getState
- }
-
- // guarded by this
- private[actors] var links: List[AbstractActor] = Nil
-
- /**
- * Links `self` to actor `to`.
- *
- * @param to the actor to link to
- * @return the parameter actor
- */
- def link(to: AbstractActor): AbstractActor = {
- assert(Actor.self(scheduler) == this, "link called on actor different from self")
- this linkTo to
- to linkTo this
- to
- }
-
- /**
- * Links `self` to the actor defined by `body`.
- *
- * @param body the body of the actor to link to
- * @return the parameter actor
- */
- def link(body: => Unit): Actor = {
- assert(Actor.self(scheduler) == this, "link called on actor different from self")
- val a = new Actor {
- def act() = body
- override final val scheduler: IScheduler = Actor.this.scheduler
- }
- link(a)
- a.start()
- a
- }
-
- private[actors] def linkTo(to: AbstractActor) = synchronized {
- links = to :: links
- }
-
- /**
- * Unlinks `self` from actor `from`.
- */
- def unlink(from: AbstractActor) {
- assert(Actor.self(scheduler) == this, "unlink called on actor different from self")
- this unlinkFrom from
- from unlinkFrom this
- }
-
- private[actors] def unlinkFrom(from: AbstractActor) = synchronized {
- links = links.filterNot(from.==)
- }
-
- @volatile
- var trapExit = false
- // guarded by this
- private var exitReason: AnyRef = 'normal
- // guarded by this
- private[actors] var shouldExit = false
-
- /**
- * Terminates execution of `self` with the following effect on
- * linked actors:
- *
- * For each linked actor `a` with `trapExit` set to `'''true'''`,
- * send message `Exit(self, reason)` to `a`.
- *
- * For each linked actor `a` with `trapExit` set to `'''false'''`
- * (default), call `a.exit(reason)` if `reason != 'normal`.
- */
- protected[actors] def exit(reason: AnyRef): Nothing = {
- synchronized {
- exitReason = reason
- }
- exit()
- }
-
- /**
- * Terminates with exit reason `'normal`.
- */
- protected[actors] override def exit(): Nothing = {
- val todo = synchronized {
- if (!links.isEmpty)
- exitLinked()
- else
- () => {}
- }
- todo()
- super.exit()
- }
-
- // Assume !links.isEmpty
- // guarded by this
- private[actors] def exitLinked(): () => Unit = {
- _state = Actor.State.Terminated
- // reset waitingFor, otherwise getState returns Suspended
- waitingFor = Reactor.waitingForNone
- // remove this from links
- val mylinks = links.filterNot(this.==)
- // unlink actors
- mylinks.foreach(unlinkFrom(_))
- // return closure that locks linked actors
- () => {
- mylinks.foreach((linked: AbstractActor) => {
- linked.synchronized {
- if (!linked.exiting) {
- linked.unlinkFrom(this)
- linked.exit(this, exitReason)
- }
- }
- })
- }
- }
-
- // Assume !links.isEmpty
- // guarded by this
- private[actors] def exitLinked(reason: AnyRef): () => Unit = {
- exitReason = reason
- exitLinked()
- }
-
- // Assume !this.exiting
- private[actors] def exit(from: AbstractActor, reason: AnyRef) {
- if (trapExit) {
- this ! Exit(from, reason)
- }
- else if (reason != 'normal)
- synchronized {
- shouldExit = true
- exitReason = reason
- // resume this Actor in a way that
- // causes it to exit
- // (because shouldExit == true)
- if (isSuspended)
- resumeActor()
- else if (waitingFor ne Reactor.waitingForNone) {
- waitingFor = Reactor.waitingForNone
- // it doesn't matter what partial function we are passing here
- scheduleActor(waitingFor, null)
- /* Here we should not throw a SuspendActorControl,
- since the current method is called from an actor that
- is in the process of exiting.
-
- Therefore, the contract for scheduleActor is that
- it never throws a SuspendActorControl.
- */
- }
- }
- }
-
- /** Requires qualified private, because `RemoteActor` must
- * register a termination handler.
- */
- private[actors] def onTerminate(f: => Unit) {
- scheduler.onTerminate(this) { f }
- }
}
-
-/**
- * Used as the timeout pattern in
- * <a href="Actor.html#receiveWithin(Long)" target="contentFrame">
- * <code>receiveWithin</code></a> and
- * <a href="Actor.html#reactWithin(Long)" target="contentFrame">
- * <code>reactWithin</code></a>.
- *
- * @example {{{
- * receiveWithin(500) {
- * case (x, y) => ...
- * case TIMEOUT => ...
- * }
- * }}}
- *
- * @author Philipp Haller
- */
-case object TIMEOUT
-
-
-/** Sent to an actor with `trapExit` set to `'''true'''` whenever one of its
- * linked actors terminates.
- *
- * @param from the actor that terminated
- * @param reason the reason that caused the actor to terminate
- */
-case class Exit(from: AbstractActor, reason: AnyRef)
-
-/** Manages control flow of actor executions.
- *
- * @author Philipp Haller
- */
-private[actors] class SuspendActorControl extends ControlThrowable
diff --git a/src/actors/scala/actors/ActorCanReply.scala b/src/actors/scala/actors/ActorCanReply.scala
index b307aafa57..d92fb183c0 100644
--- a/src/actors/scala/actors/ActorCanReply.scala
+++ b/src/actors/scala/actors/ActorCanReply.scala
@@ -18,7 +18,7 @@ import scala.concurrent.SyncVar
* @author Philipp Haller
*/
private[actors] trait ActorCanReply extends ReactorCanReply {
- this: AbstractActor with ReplyReactor =>
+ this: AbstractActor with InternalReplyReactor =>
override def !?(msg: Any): Any = {
val replyCh = new Channel[Any](Actor.self(scheduler))
diff --git a/src/actors/scala/actors/ActorTask.scala b/src/actors/scala/actors/ActorTask.scala
index 090d0448f0..bb04302238 100644
--- a/src/actors/scala/actors/ActorTask.scala
+++ b/src/actors/scala/actors/ActorTask.scala
@@ -17,7 +17,7 @@ package scala.actors
* changes to the underlying var invisible.) I can't figure out what's supposed
* to happen, so I renamed the constructor parameter to at least be less confusing.
*/
-private[actors] class ActorTask(actor: Actor,
+private[actors] class ActorTask(actor: InternalActor,
fun: () => Unit,
handler: PartialFunction[Any, Any],
initialMsg: Any)
@@ -32,7 +32,7 @@ private[actors] class ActorTask(actor: Actor,
}
protected override def terminateExecution(e: Throwable) {
- val senderInfo = try { Some(actor.sender) } catch {
+ val senderInfo = try { Some(actor.internalSender) } catch {
case _: Exception => None
}
// !!! If this is supposed to be setting the current contents of the
@@ -45,13 +45,16 @@ private[actors] class ActorTask(actor: Actor,
e)
val todo = actor.synchronized {
- if (!actor.links.isEmpty)
+ val res = if (!actor.links.isEmpty)
actor.exitLinked(uncaught)
else {
super.terminateExecution(e)
() => {}
}
+ actor.internalPostStop
+ res
}
+
todo()
}
diff --git a/src/actors/scala/actors/Channel.scala b/src/actors/scala/actors/Channel.scala
index 62331239e8..36cee66b42 100644
--- a/src/actors/scala/actors/Channel.scala
+++ b/src/actors/scala/actors/Channel.scala
@@ -34,7 +34,7 @@ case class ! [a](ch: Channel[a], msg: a)
* @define actor channel
* @define channel channel
*/
-class Channel[Msg](val receiver: Actor) extends InputChannel[Msg] with OutputChannel[Msg] with CanReply[Msg, Any] {
+class Channel[Msg](val receiver: InternalActor) extends InputChannel[Msg] with OutputChannel[Msg] with CanReply[Msg, Any] {
type Future[+P] = scala.actors.Future[P]
diff --git a/src/actors/scala/actors/Combinators.scala b/src/actors/scala/actors/Combinators.scala
index 5276c7843e..c1a9095614 100644
--- a/src/actors/scala/actors/Combinators.scala
+++ b/src/actors/scala/actors/Combinators.scala
@@ -16,7 +16,7 @@ private[actors] trait Combinators {
* Enables the composition of suspendable closures using `andThen`,
* `loop`, `loopWhile`, etc.
*/
- implicit def mkBody[a](body: => a): Actor.Body[a]
+ implicit def mkBody[a](body: => a): InternalActor.Body[a]
/**
* Repeatedly executes `body`.
diff --git a/src/actors/scala/actors/InternalActor.scala b/src/actors/scala/actors/InternalActor.scala
new file mode 100644
index 0000000000..c94da5b9fd
--- /dev/null
+++ b/src/actors/scala/actors/InternalActor.scala
@@ -0,0 +1,509 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+package scala.actors
+import java.util.TimerTask
+import scala.util.control.ControlThrowable
+
+private[actors] object InternalActor {
+ private[actors] trait Body[a] {
+ def andThen[b](other: => b): Unit
+ }
+}
+
+private[actors] trait InternalActor extends AbstractActor with InternalReplyReactor with ActorCanReply with InputChannel[Any] with Serializable {
+
+ /* The following two fields are only used when the actor
+ * suspends by blocking its underlying thread, for example,
+ * when waiting in a receive or synchronous send.
+ */
+ @volatile
+ private[actors] var isSuspended = false
+
+ /* This field is used to communicate the received message from
+ * the invocation of send to the place where the thread of
+ * the receiving actor resumes inside receive/receiveWithin.
+ */
+ @volatile
+ private var received: Option[Any] = None
+
+ protected[actors] override def scheduler: IScheduler = Scheduler
+
+ private[actors] override def startSearch(msg: Any, replyTo: OutputChannel[Any], handler: PartialFunction[Any, Any]) =
+ if (isSuspended) {
+ () =>
+ synchronized {
+ mailbox.append(msg, replyTo)
+ resumeActor()
+ }
+ } else super.startSearch(msg, replyTo, handler)
+
+ // we override this method to check `shouldExit` before suspending
+ private[actors] override def searchMailbox(startMbox: MQueue[Any],
+ handler: PartialFunction[Any, Any],
+ resumeOnSameThread: Boolean) {
+ var tmpMbox = startMbox
+ var done = false
+ while (!done) {
+ val qel = tmpMbox.extractFirst((msg: Any, replyTo: OutputChannel[Any]) => {
+ senders = List(replyTo)
+ handler.isDefinedAt(msg)
+ })
+ if (tmpMbox ne mailbox)
+ tmpMbox.foreach((m, s) => mailbox.append(m, s))
+ if (null eq qel) {
+ synchronized {
+ // in mean time new stuff might have arrived
+ if (!sendBuffer.isEmpty) {
+ tmpMbox = new MQueue[Any]("Temp")
+ drainSendBuffer(tmpMbox)
+ // keep going
+ } else {
+ // very important to check for `shouldExit` at this point
+ // since linked actors might have set it after we checked
+ // last time (e.g., at the beginning of `react`)
+ if (shouldExit) exit()
+ waitingFor = handler
+ // see Reactor.searchMailbox
+ throw Actor.suspendException
+ }
+ }
+ } else {
+ resumeReceiver((qel.msg, qel.session), handler, resumeOnSameThread)
+ done = true
+ }
+ }
+ }
+
+ private[actors] override def makeReaction(fun: () => Unit, handler: PartialFunction[Any, Any], msg: Any): Runnable =
+ new ActorTask(this, fun, handler, msg)
+
+ /** See the companion object's `receive` method. */
+ def receive[R](f: PartialFunction[Any, R]): R = {
+ assert(Actor.self(scheduler) == this, "receive from channel belonging to other actor")
+
+ synchronized {
+ if (shouldExit) exit() // links
+ drainSendBuffer(mailbox)
+ }
+
+ var done = false
+ while (!done) {
+ val qel = mailbox.extractFirst((m: Any, replyTo: OutputChannel[Any]) => {
+ senders = replyTo :: senders
+ val matches = f.isDefinedAt(m)
+ senders = senders.tail
+ matches
+ })
+ if (null eq qel) {
+ synchronized {
+ // in mean time new stuff might have arrived
+ if (!sendBuffer.isEmpty) {
+ drainSendBuffer(mailbox)
+ // keep going
+ } else {
+ waitingFor = f
+ isSuspended = true
+ scheduler.managedBlock(blocker)
+ drainSendBuffer(mailbox)
+ // keep going
+ }
+ }
+ } else {
+ received = Some(qel.msg)
+ senders = qel.session :: senders
+ done = true
+ }
+ }
+
+ val result = f(received.get)
+ received = None
+ senders = senders.tail
+ result
+ }
+
+ /** See the companion object's `receiveWithin` method. */
+ def receiveWithin[R](msec: Long)(f: PartialFunction[Any, R]): R = {
+ assert(Actor.self(scheduler) == this, "receive from channel belonging to other actor")
+
+ synchronized {
+ if (shouldExit) exit() // links
+ drainSendBuffer(mailbox)
+ }
+
+ // first, remove spurious TIMEOUT message from mailbox if any
+ mailbox.extractFirst((m: Any, replyTo: OutputChannel[Any]) => m == TIMEOUT)
+
+ val receiveTimeout = () => {
+ if (f.isDefinedAt(TIMEOUT)) {
+ received = Some(TIMEOUT)
+ senders = this :: senders
+ } else
+ sys.error("unhandled timeout")
+ }
+
+ var done = false
+ while (!done) {
+ val qel = mailbox.extractFirst((m: Any, replyTo: OutputChannel[Any]) => {
+ senders = replyTo :: senders
+ val matches = f.isDefinedAt(m)
+ senders = senders.tail
+ matches
+ })
+ if (null eq qel) {
+ val todo = synchronized {
+ // in mean time new stuff might have arrived
+ if (!sendBuffer.isEmpty) {
+ drainSendBuffer(mailbox)
+ // keep going
+ () => {}
+ } else if (msec == 0L) {
+ done = true
+ receiveTimeout
+ } else {
+ if (onTimeout.isEmpty) {
+ if (!f.isDefinedAt(TIMEOUT))
+ sys.error("unhandled timeout")
+
+ val thisActor = this
+ onTimeout = Some(new TimerTask {
+ def run() {
+ thisActor.send(TIMEOUT, thisActor)
+ }
+ })
+ Actor.timer.schedule(onTimeout.get, msec)
+ }
+
+ // It is possible that !onTimeout.isEmpty, but TIMEOUT is not yet in mailbox
+ // See SI-4759
+ waitingFor = f
+ received = None
+ isSuspended = true
+ scheduler.managedBlock(blocker)
+ drainSendBuffer(mailbox)
+ // keep going
+ () => {}
+ }
+ }
+ todo()
+ } else {
+ synchronized {
+ if (!onTimeout.isEmpty) {
+ onTimeout.get.cancel()
+ onTimeout = None
+ }
+ }
+ received = Some(qel.msg)
+ senders = qel.session :: senders
+ done = true
+ }
+ }
+
+ val result = f(received.get)
+ received = None
+ senders = senders.tail
+ result
+ }
+
+ /** See the companion object's `react` method. */
+ override def react(handler: PartialFunction[Any, Unit]): Nothing = {
+ synchronized {
+ if (shouldExit) exit()
+ }
+ super.react(handler)
+ }
+
+ /** See the companion object's `reactWithin` method. */
+ override def reactWithin(msec: Long)(handler: PartialFunction[Any, Unit]): Nothing = {
+ synchronized {
+ if (shouldExit) exit()
+ }
+ super.reactWithin(msec)(handler)
+ }
+
+ /** Receives the next message from the mailbox */
+ def ? : Any = receive {
+ case x => x
+ }
+
+ // guarded by lock of this
+ // never throws SuspendActorControl
+ private[actors] override def scheduleActor(f: PartialFunction[Any, Any], msg: Any) =
+ if (f eq null) {
+ // do nothing (timeout is handled instead)
+ } else {
+ val task = new ActorTask(this, null, f, msg)
+ scheduler executeFromActor task
+ }
+
+ /* Used for notifying scheduler when blocking inside receive/receiveWithin. */
+ private object blocker extends scala.concurrent.ManagedBlocker {
+ def block() = {
+ InternalActor.this.suspendActor()
+ true
+ }
+ def isReleasable =
+ !InternalActor.this.isSuspended
+ }
+
+ private def suspendActor() = synchronized {
+ while (isSuspended) {
+ try {
+ wait()
+ } catch {
+ case _: InterruptedException =>
+ }
+ }
+ // links: check if we should exit
+ if (shouldExit) exit()
+ }
+
+ private def resumeActor() {
+ isSuspended = false
+ notify()
+ }
+
+ private[actors] override def exiting = synchronized {
+ _state == Actor.State.Terminated
+ }
+
+ // guarded by this
+ private[actors] override def dostart() {
+ // Reset various flags.
+ //
+ // Note that we do *not* reset `trapExit`. The reason is that
+ // users should be able to set the field in the constructor
+ // and before `act` is called.
+ exitReason = 'normal
+ shouldExit = false
+
+ super.dostart()
+ }
+
+ override def start(): InternalActor = synchronized {
+ super.start()
+ this
+ }
+
+ /** State of this actor */
+ override def getState: Actor.State.Value = synchronized {
+ if (isSuspended) {
+ if (onTimeout.isEmpty)
+ Actor.State.Blocked
+ else
+ Actor.State.TimedBlocked
+ } else
+ super.getState
+ }
+
+ // guarded by this
+ private[actors] var links: List[AbstractActor] = Nil
+
+ /**
+ * Links <code>self</code> to actor <code>to</code>.
+ *
+ * @param to the actor to link to
+ * @return the parameter actor
+ */
+ def link(to: AbstractActor): AbstractActor = {
+ assert(Actor.self(scheduler) == this, "link called on actor different from self")
+ this linkTo to
+ to linkTo this
+ to
+ }
+
+ /**
+ * Links <code>self</code> to the actor defined by <code>body</code>.
+ *
+ * @param body the body of the actor to link to
+ * @return the parameter actor
+ */
+ def link(body: => Unit): Actor = {
+ assert(Actor.self(scheduler) == this, "link called on actor different from self")
+ val a = new Actor {
+ def act() = body
+ override final val scheduler: IScheduler = InternalActor.this.scheduler
+ }
+ link(a)
+ a.start()
+ a
+ }
+
+ private[actors] def linkTo(to: AbstractActor) = synchronized {
+ links = to :: links
+ }
+
+ /**
+ * Unlinks <code>self</code> from actor <code>from</code>.
+ */
+ def unlink(from: AbstractActor) {
+ assert(Actor.self(scheduler) == this, "unlink called on actor different from self")
+ this unlinkFrom from
+ from unlinkFrom this
+ }
+
+ private[actors] def unlinkFrom(from: AbstractActor) = synchronized {
+ links = links.filterNot(from.==)
+ }
+
+ @volatile
+ private[actors] var _trapExit = false
+
+ def trapExit = _trapExit
+
+ def trapExit_=(value: Boolean) = _trapExit = value
+
+ // guarded by this
+ private var exitReason: AnyRef = 'normal
+ // guarded by this
+ private[actors] var shouldExit = false
+
+ /**
+ * <p>
+ * Terminates execution of <code>self</code> with the following
+ * effect on linked actors:
+ * </p>
+ * <p>
+ * For each linked actor <code>a</code> with
+ * <code>trapExit</code> set to <code>true</code>, send message
+ * <code>Exit(self, reason)</code> to <code>a</code>.
+ * </p>
+ * <p>
+ * For each linked actor <code>a</code> with
+ * <code>trapExit</code> set to <code>false</code> (default),
+ * call <code>a.exit(reason)</code> if
+ * <code>reason != 'normal</code>.
+ * </p>
+ */
+ protected[actors] def exit(reason: AnyRef): Nothing = {
+ synchronized {
+ exitReason = reason
+ }
+ exit()
+ }
+
+ /**
+ * Terminates with exit reason <code>'normal</code>.
+ */
+ protected[actors] override def exit(): Nothing = {
+ val todo = synchronized {
+ if (!links.isEmpty)
+ exitLinked()
+ else
+ () => {}
+ }
+ todo()
+ super.exit()
+ }
+
+ // Assume !links.isEmpty
+ // guarded by this
+ private[actors] def exitLinked(): () => Unit = {
+ _state = Actor.State.Terminated
+ // reset waitingFor, otherwise getState returns Suspended
+ waitingFor = Reactor.waitingForNone
+ // remove this from links
+ val mylinks = links.filterNot(this.==)
+ // unlink actors
+ mylinks.foreach(unlinkFrom(_))
+ // return closure that locks linked actors
+ () => {
+ mylinks.foreach((linked: AbstractActor) => {
+ linked.synchronized {
+ if (!linked.exiting) {
+ linked.unlinkFrom(this)
+ linked.exit(this, exitReason)
+ }
+ }
+ })
+ }
+ }
+
+ // Assume !links.isEmpty
+ // guarded by this
+ private[actors] def exitLinked(reason: AnyRef): () => Unit = {
+ exitReason = reason
+ exitLinked()
+ }
+
+ // Assume !this.exiting
+ private[actors] def exit(from: AbstractActor, reason: AnyRef) {
+ if (trapExit) {
+ this ! Exit(from, reason)
+ } else if (reason != 'normal)
+ stop(reason)
+ }
+
+ /* Requires qualified private, because <code>RemoteActor</code> must
+ * register a termination handler.
+ */
+ private[actors] def onTerminate(f: => Unit) {
+ scheduler.onTerminate(this) { f }
+ }
+
+ private[actors] def internalPostStop() = {}
+
+ private[actors] def stop(reason: AnyRef): Unit = {
+ synchronized {
+ shouldExit = true
+ exitReason = reason
+ // resume this Actor in a way that
+ // causes it to exit
+ // (because shouldExit == true)
+ if (isSuspended)
+ resumeActor()
+ else if (waitingFor ne Reactor.waitingForNone) {
+ waitingFor = Reactor.waitingForNone
+ // it doesn't matter what partial function we are passing here
+ val task = new ActorTask(this, null, waitingFor, null)
+ scheduler execute task
+ /* Here we should not throw a SuspendActorControl,
+ since the current method is called from an actor that
+ is in the process of exiting.
+
+ Therefore, the contract for scheduleActor is that
+ it never throws a SuspendActorControl.
+ */
+ }
+ }
+ }
+}
+
+/**
+ * Used as the timeout pattern in
+ * <a href="Actor.html#receiveWithin(Long)" target="contentFrame">
+ * <code>receiveWithin</code></a> and
+ * <a href="Actor.html#reactWithin(Long)" target="contentFrame">
+ * <code>reactWithin</code></a>.
+ *
+ * @example {{{
+ * receiveWithin(500) {
+ * case (x, y) => ...
+ * case TIMEOUT => ...
+ * }
+ * }}}
+ *
+ * @author Philipp Haller
+ */
+case object TIMEOUT
+
+/**
+ * Sent to an actor
+ * with `trapExit` set to `true` whenever one of its linked actors
+ * terminates.
+ *
+ * @param from the actor that terminated
+ * @param reason the reason that caused the actor to terminate
+ */
+case class Exit(from: AbstractActor, reason: AnyRef)
+
+/**
+ * Manages control flow of actor executions.
+ *
+ * @author Philipp Haller
+ */
+private[actors] class SuspendActorControl extends ControlThrowable
diff --git a/src/actors/scala/actors/InternalReplyReactor.scala b/src/actors/scala/actors/InternalReplyReactor.scala
new file mode 100644
index 0000000000..38295138d4
--- /dev/null
+++ b/src/actors/scala/actors/InternalReplyReactor.scala
@@ -0,0 +1,161 @@
+package scala.actors
+
+import java.util.{TimerTask}
+
+/**
+ * Extends the [[scala.actors.Reactor]]
+ * trait with methods to reply to the sender of a message.
+ * Sending a message to a <code>ReplyReactor</code> implicitly
+ * passes a reference to the sender together with the message.
+ *
+ * @author Philipp Haller
+ *
+ * @define actor `ReplyReactor`
+ */
+trait InternalReplyReactor extends Reactor[Any] with ReactorCanReply {
+
+ /* A list of the current senders. The head of the list is
+ * the sender of the message that was received last.
+ */
+ @volatile
+ private[actors] var senders: List[OutputChannel[Any]] = List()
+
+ /* This option holds a TimerTask when the actor waits in a
+ * reactWithin. The TimerTask is cancelled when the actor
+ * resumes.
+ *
+ * guarded by this
+ */
+ private[actors] var onTimeout: Option[TimerTask] = None
+
+ /**
+ * Returns the $actor which sent the last received message.
+ */
+ protected[actors] def internalSender: OutputChannel[Any] = senders.head
+
+ /**
+ * Replies with <code>msg</code> to the sender.
+ */
+ protected[actors] def reply(msg: Any) {
+ internalSender ! msg
+ }
+
+ override def !(msg: Any) {
+ send(msg, Actor.rawSelf(scheduler))
+ }
+
+ override def forward(msg: Any) {
+ send(msg, Actor.sender)
+ }
+
+ private[actors] override def resumeReceiver(item: (Any, OutputChannel[Any]), handler: PartialFunction[Any, Any], onSameThread: Boolean) {
+ synchronized {
+ if (!onTimeout.isEmpty) {
+ onTimeout.get.cancel()
+ onTimeout = None
+ }
+ }
+ senders = List(item._2)
+ super.resumeReceiver(item, handler, onSameThread)
+ }
+
+ private[actors] override def searchMailbox(startMbox: MQueue[Any],
+ handler: PartialFunction[Any, Any],
+ resumeOnSameThread: Boolean) {
+ var tmpMbox = startMbox
+ var done = false
+ while (!done) {
+ val qel = tmpMbox.extractFirst((msg: Any, replyTo: OutputChannel[Any]) => {
+ senders = List(replyTo)
+ handler.isDefinedAt(msg)
+ })
+ if (tmpMbox ne mailbox)
+ tmpMbox.foreach((m, s) => mailbox.append(m, s))
+ if (null eq qel) {
+ synchronized {
+ // in mean time new stuff might have arrived
+ if (!sendBuffer.isEmpty) {
+ tmpMbox = new MQueue[Any]("Temp")
+ drainSendBuffer(tmpMbox)
+ // keep going
+ } else {
+ waitingFor = handler
+ // see Reactor.searchMailbox
+ throw Actor.suspendException
+ }
+ }
+ } else {
+ resumeReceiver((qel.msg, qel.session), handler, resumeOnSameThread)
+ done = true
+ }
+ }
+ }
+
+ private[actors] override def makeReaction(fun: () => Unit, handler: PartialFunction[Any, Any], msg: Any): Runnable =
+ new ReplyReactorTask(this, fun, handler, msg)
+
+ protected[actors] override def react(handler: PartialFunction[Any, Unit]): Nothing = {
+ assert(Actor.rawSelf(scheduler) == this, "react on channel belonging to other actor")
+ super.react(handler)
+ }
+
+
+ /**
+ * Receives a message from this $actor's mailbox within a certain
+ * time span.
+ *
+ * This method never returns. Therefore, the rest of the computation
+ * has to be contained in the actions of the partial function.
+ *
+ * @param msec the time span before timeout
+ * @param handler a partial function with message patterns and actions
+ */
+ protected[actors] def reactWithin(msec: Long)(handler: PartialFunction[Any, Unit]): Nothing = {
+ assert(Actor.rawSelf(scheduler) == this, "react on channel belonging to other actor")
+
+ synchronized { drainSendBuffer(mailbox) }
+
+ // first, remove spurious TIMEOUT message from mailbox if any
+ mailbox.extractFirst((m: Any, replyTo: OutputChannel[Any]) => m == TIMEOUT)
+
+ while (true) {
+ val qel = mailbox.extractFirst((m: Any, replyTo: OutputChannel[Any]) => {
+ senders = List(replyTo)
+ handler isDefinedAt m
+ })
+ if (null eq qel) {
+ synchronized {
+ // in mean time new messages might have arrived
+ if (!sendBuffer.isEmpty) {
+ drainSendBuffer(mailbox)
+ // keep going
+ } else if (msec == 0L) {
+ // throws Actor.suspendException
+ resumeReceiver((TIMEOUT, this), handler, false)
+ } else {
+ waitingFor = handler
+ val thisActor = this
+ onTimeout = Some(new TimerTask {
+ def run() { thisActor.send(TIMEOUT, thisActor) }
+ })
+ Actor.timer.schedule(onTimeout.get, msec)
+ throw Actor.suspendException
+ }
+ }
+ } else
+ resumeReceiver((qel.msg, qel.session), handler, false)
+ }
+ throw Actor.suspendException
+ }
+
+ override def getState: Actor.State.Value = synchronized {
+ if (waitingFor ne Reactor.waitingForNone) {
+ if (onTimeout.isEmpty)
+ Actor.State.Suspended
+ else
+ Actor.State.TimedSuspended
+ } else
+ _state
+ }
+
+}
diff --git a/src/actors/scala/actors/OutputChannel.scala b/src/actors/scala/actors/OutputChannel.scala
index 089b3d0981..1fba684975 100644
--- a/src/actors/scala/actors/OutputChannel.scala
+++ b/src/actors/scala/actors/OutputChannel.scala
@@ -43,5 +43,5 @@ trait OutputChannel[-Msg] {
/**
* Returns the `Actor` that is receiving from this $actor.
*/
- def receiver: Actor
+ def receiver: InternalActor
}
diff --git a/src/actors/scala/actors/ReactChannel.scala b/src/actors/scala/actors/ReactChannel.scala
index fccde34272..81a166c1a4 100644
--- a/src/actors/scala/actors/ReactChannel.scala
+++ b/src/actors/scala/actors/ReactChannel.scala
@@ -12,7 +12,7 @@ package scala.actors
/**
* @author Philipp Haller
*/
-private[actors] class ReactChannel[Msg](receiver: ReplyReactor) extends InputChannel[Msg] {
+private[actors] class ReactChannel[Msg](receiver: InternalReplyReactor) extends InputChannel[Msg] {
private case class SendToReactor(channel: ReactChannel[Msg], msg: Msg)
diff --git a/src/actors/scala/actors/Reactor.scala b/src/actors/scala/actors/Reactor.scala
index 7d21e9f91e..8fc7578344 100644
--- a/src/actors/scala/actors/Reactor.scala
+++ b/src/actors/scala/actors/Reactor.scala
@@ -253,7 +253,7 @@ trait Reactor[Msg >: Null] extends OutputChannel[Msg] with Combinators {
_state
}
- implicit def mkBody[A](body: => A) = new Actor.Body[A] {
+ implicit def mkBody[A](body: => A) = new InternalActor.Body[A] {
def andThen[B](other: => B): Unit = Reactor.this.seq(body, other)
}
diff --git a/src/actors/scala/actors/ReactorCanReply.scala b/src/actors/scala/actors/ReactorCanReply.scala
index 68f9999776..dabd0832f0 100644
--- a/src/actors/scala/actors/ReactorCanReply.scala
+++ b/src/actors/scala/actors/ReactorCanReply.scala
@@ -16,7 +16,7 @@ package scala.actors
* @author Philipp Haller
*/
private[actors] trait ReactorCanReply extends CanReply[Any, Any] {
- _: ReplyReactor =>
+ _: InternalReplyReactor =>
type Future[+P] = scala.actors.Future[P]
diff --git a/src/actors/scala/actors/ReplyReactor.scala b/src/actors/scala/actors/ReplyReactor.scala
index 0e5ce00c91..0ffbbd3cce 100644
--- a/src/actors/scala/actors/ReplyReactor.scala
+++ b/src/actors/scala/actors/ReplyReactor.scala
@@ -5,165 +5,12 @@
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.actors
-import java.util.{Timer, TimerTask}
-
-/**
- * Extends the [[scala.actors.Reactor]] trait with methods to reply to the
- * sender of a message.
- *
- * Sending a message to a `ReplyReactor` implicitly passes a reference to
- * the sender together with the message.
- *
- * @author Philipp Haller
- *
- * @define actor `ReplyReactor`
- */
-trait ReplyReactor extends Reactor[Any] with ReactorCanReply {
-
- /* A list of the current senders. The head of the list is
- * the sender of the message that was received last.
- */
- @volatile
- private[actors] var senders: List[OutputChannel[Any]] = List()
-
- /* This option holds a TimerTask when the actor waits in a
- * reactWithin. The TimerTask is cancelled when the actor
- * resumes.
- *
- * guarded by this
- */
- private[actors] var onTimeout: Option[TimerTask] = None
-
- /**
- * Returns the $actor which sent the last received message.
- */
- protected[actors] def sender: OutputChannel[Any] = senders.head
-
- /**
- * Replies with `msg` to the sender.
- */
- protected[actors] def reply(msg: Any) {
- sender ! msg
- }
-
- override def !(msg: Any) {
- send(msg, Actor.rawSelf(scheduler))
- }
-
- override def forward(msg: Any) {
- send(msg, Actor.sender)
- }
-
- private[actors] override def resumeReceiver(item: (Any, OutputChannel[Any]), handler: PartialFunction[Any, Any], onSameThread: Boolean) {
- synchronized {
- if (!onTimeout.isEmpty) {
- onTimeout.get.cancel()
- onTimeout = None
- }
- }
- senders = List(item._2)
- super.resumeReceiver(item, handler, onSameThread)
- }
-
- private[actors] override def searchMailbox(startMbox: MQueue[Any],
- handler: PartialFunction[Any, Any],
- resumeOnSameThread: Boolean) {
- var tmpMbox = startMbox
- var done = false
- while (!done) {
- val qel = tmpMbox.extractFirst((msg: Any, replyTo: OutputChannel[Any]) => {
- senders = List(replyTo)
- handler.isDefinedAt(msg)
- })
- if (tmpMbox ne mailbox)
- tmpMbox.foreach((m, s) => mailbox.append(m, s))
- if (null eq qel) {
- synchronized {
- // in mean time new stuff might have arrived
- if (!sendBuffer.isEmpty) {
- tmpMbox = new MQueue[Any]("Temp")
- drainSendBuffer(tmpMbox)
- // keep going
- } else {
- waitingFor = handler
- // see Reactor.searchMailbox
- throw Actor.suspendException
- }
- }
- } else {
- resumeReceiver((qel.msg, qel.session), handler, resumeOnSameThread)
- done = true
- }
- }
- }
-
- private[actors] override def makeReaction(fun: () => Unit, handler: PartialFunction[Any, Any], msg: Any): Runnable =
- new ReplyReactorTask(this, fun, handler, msg)
-
- protected[actors] override def react(handler: PartialFunction[Any, Unit]): Nothing = {
- assert(Actor.rawSelf(scheduler) == this, "react on channel belonging to other actor")
- super.react(handler)
- }
-
- /**
- * Receives a message from this $actor's mailbox within a certain
- * time span.
- *
- * This method never returns. Therefore, the rest of the computation
- * has to be contained in the actions of the partial function.
- *
- * @param msec the time span before timeout
- * @param handler a partial function with message patterns and actions
- */
- protected[actors] def reactWithin(msec: Long)(handler: PartialFunction[Any, Unit]): Nothing = {
- assert(Actor.rawSelf(scheduler) == this, "react on channel belonging to other actor")
-
- synchronized { drainSendBuffer(mailbox) }
-
- // first, remove spurious TIMEOUT message from mailbox if any
- mailbox.extractFirst((m: Any, replyTo: OutputChannel[Any]) => m == TIMEOUT)
-
- while (true) {
- val qel = mailbox.extractFirst((m: Any, replyTo: OutputChannel[Any]) => {
- senders = List(replyTo)
- handler isDefinedAt m
- })
- if (null eq qel) {
- synchronized {
- // in mean time new messages might have arrived
- if (!sendBuffer.isEmpty) {
- drainSendBuffer(mailbox)
- // keep going
- } else if (msec == 0L) {
- // throws Actor.suspendException
- resumeReceiver((TIMEOUT, this), handler, false)
- } else {
- waitingFor = handler
- val thisActor = this
- onTimeout = Some(new TimerTask {
- def run() { thisActor.send(TIMEOUT, thisActor) }
- })
- Actor.timer.schedule(onTimeout.get, msec)
- throw Actor.suspendException
- }
- }
- } else
- resumeReceiver((qel.msg, qel.session), handler, false)
- }
- throw Actor.suspendException
- }
-
- override def getState: Actor.State.Value = synchronized {
- if (waitingFor ne Reactor.waitingForNone) {
- if (onTimeout.isEmpty)
- Actor.State.Suspended
- else
- Actor.State.TimedSuspended
- } else
- _state
- }
-
+@deprecated("Scala Actors are beeing removed from the standard library. Please refer to the migration guide.", "2.10")
+trait ReplyReactor extends InternalReplyReactor {
+
+ protected[actors] def sender: OutputChannel[Any] = super.internalSender
+
}
+
diff --git a/src/actors/scala/actors/ReplyReactorTask.scala b/src/actors/scala/actors/ReplyReactorTask.scala
index cb63d7e000..d38eb50381 100644
--- a/src/actors/scala/actors/ReplyReactorTask.scala
+++ b/src/actors/scala/actors/ReplyReactorTask.scala
@@ -17,13 +17,13 @@ package scala.actors
* changes to the underlying var invisible.) I can't figure out what's supposed
* to happen, so I renamed the constructor parameter to at least be less confusing.
*/
-private[actors] class ReplyReactorTask(replyReactor: ReplyReactor,
+private[actors] class ReplyReactorTask(replyReactor: InternalReplyReactor,
fun: () => Unit,
handler: PartialFunction[Any, Any],
msg: Any)
extends ReactorTask(replyReactor, fun, handler, msg) {
- var saved: ReplyReactor = _
+ var saved: InternalReplyReactor = _
protected override def beginExecution() {
saved = Actor.tl.get
diff --git a/src/actors/scala/actors/UncaughtException.scala b/src/actors/scala/actors/UncaughtException.scala
index 3e6efe3b7c..a3e7f795f1 100644
--- a/src/actors/scala/actors/UncaughtException.scala
+++ b/src/actors/scala/actors/UncaughtException.scala
@@ -20,7 +20,7 @@ package scala.actors
* @author Philipp Haller
* @author Erik Engbrecht
*/
-case class UncaughtException(actor: Actor,
+case class UncaughtException(actor: InternalActor,
message: Option[Any],
sender: Option[OutputChannel[Any]],
thread: Thread,
diff --git a/src/build/maven/continuations-plugin-pom.xml b/src/build/maven/continuations-plugin-pom.xml
index aca519b87e..9abb0a36f0 100644
--- a/src/build/maven/continuations-plugin-pom.xml
+++ b/src/build/maven/continuations-plugin-pom.xml
@@ -23,13 +23,12 @@
</license>
</licenses>
<scm>
- <connection>scm:svn:http://lampsvn.epfl.ch/svn-repos/scala/scala/trunk</connection>
- <url>https://lampsvn.epfl.ch/trac/scala/browser/scala/trunk</url>
+ <connection>scm:git:git://github.com/scala/scala.git</connection>
+ <url>https://github.com/scala/scala.git</url>
</scm>
<issueManagement>
- <system>trac</system>
- <url>http://lampsvn.epfl.ch/trac/scala
- </url>
+ <system>JIRA</system>
+ <url>https://issues.scala-lang.org/</url>
</issueManagement>
<dependencies>
diff --git a/src/build/maven/jline-pom.xml b/src/build/maven/jline-pom.xml
index 4752deb5e0..0d6e801551 100644
--- a/src/build/maven/jline-pom.xml
+++ b/src/build/maven/jline-pom.xml
@@ -28,13 +28,12 @@
</license>
</licenses>
<scm>
- <connection>scm:svn:http://lampsvn.epfl.ch/svn-repos/scala/scala/trunk</connection>
- <url>https://lampsvn.epfl.ch/trac/scala/browser/scala/trunk</url>
+ <connection>scm:git:git://github.com/scala/scala.git</connection>
+ <url>https://github.com/scala/scala.git</url>
</scm>
<issueManagement>
- <system>trac</system>
- <url>http://lampsvn.epfl.ch/trac/scala
- </url>
+ <system>JIRA</system>
+ <url>https://issues.scala-lang.org/</url>
</issueManagement>
<dependencies>
diff --git a/src/build/maven/maven-deploy.xml b/src/build/maven/maven-deploy.xml
index e0f31a5db2..7f8343a84e 100644
--- a/src/build/maven/maven-deploy.xml
+++ b/src/build/maven/maven-deploy.xml
@@ -111,11 +111,12 @@
<deploy-local name="scala-library" version="@{version}" repository="@{repository}" />
<deploy-local name="scala-compiler" version="@{version}" repository="@{repository}" />
<deploy-local-plugin name="continuations" version="@{version}" repository="@{repository}"/>
+ <deploy-local name="scala-actors" version="@{version}" repository="@{repository}" />
<deploy-local name="scala-dbc" version="@{version}" repository="@{repository}" />
<deploy-local name="scala-swing" version="@{version}" repository="@{repository}"/>
- <deploy-local name="scalap" version="@{version}" repository="@{repository}"/>
- <deploy-local name="scala-partest" version="@{version}" repository="@{repository}"/>
- <deploy-local name="jline" version="@{version}" repository="@{repository}"/>
+ <deploy-local name="scalap" version="@{version}" repository="@{repository}"/>
+ <deploy-local name="scala-partest" version="@{version}" repository="@{repository}"/>
+ <deploy-local name="jline" version="@{version}" repository="@{repository}"/>
</sequential>
</macrodef>
</target>
@@ -168,12 +169,13 @@
<artifact:attach type="jar" file="scala-library/scala-library-docs.jar" classifier="javadoc" />
</extra-attachments>
</deploy-remote>
- <deploy-remote name="jline" version="@{version}" repository="@{repository}"/>
+ <deploy-remote name="jline" version="@{version}" repository="@{repository}"/>
<deploy-remote name="scala-compiler" version="@{version}" repository="@{repository}" />
<deploy-remote name="scala-dbc" version="@{version}" repository="@{repository}" />
<deploy-remote name="scala-swing" version="@{version}" repository="@{repository}"/>
- <deploy-remote name="scalap" version="@{version}" repository="@{repository}"/>
- <deploy-remote name="scala-partest" version="@{version}" repository="@{repository}"/>
+ <deploy-remote name="scala-actors" version="@{version}" repository="@{repository}"/>
+ <deploy-remote name="scalap" version="@{version}" repository="@{repository}"/>
+ <deploy-remote name="scala-partest" version="@{version}" repository="@{repository}"/>
<deploy-remote-plugin name="continuations" version="@{version}" repository="@{repository}"/>
</sequential>
</macrodef>
@@ -234,13 +236,14 @@
<attribute name="version" />
<sequential>
<deploy-remote-plugin-signed name="continuations" version="@{version}" repository="@{repository}"/>
- <deploy-remote-signed name="scala-library" version="@{version}" repository="@{repository}"/>
- <deploy-remote-signed name="jline" version="@{version}" repository="@{repository}"/>
+ <deploy-remote-signed name="scala-library" version="@{version}" repository="@{repository}"/>
+ <deploy-remote-signed name="jline" version="@{version}" repository="@{repository}"/>
<deploy-remote-signed name="scala-compiler" version="@{version}" repository="@{repository}" />
<deploy-remote-signed name="scala-dbc" version="@{version}" repository="@{repository}" />
<deploy-remote-signed name="scala-swing" version="@{version}" repository="@{repository}"/>
- <deploy-remote-signed name="scalap" version="@{version}" repository="@{repository}"/>
- <deploy-remote-signed name="scala-partest" version="@{version}" repository="@{repository}"/>
+ <deploy-remote-signed name="scala-actors" version="@{version}" repository="@{repository}"/>
+ <deploy-remote-signed name="scalap" version="@{version}" repository="@{repository}"/>
+ <deploy-remote-signed name="scala-partest" version="@{version}" repository="@{repository}"/>
</sequential>
</macrodef>
</target>
diff --git a/src/build/maven/scala-actors-pom.xml b/src/build/maven/scala-actors-pom.xml
new file mode 100644
index 0000000000..e8c6649721
--- /dev/null
+++ b/src/build/maven/scala-actors-pom.xml
@@ -0,0 +1,61 @@
+<project
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-actors</artifactId>
+ <packaging>jar</packaging>
+ <version>@VERSION@</version>
+ <name>Scala Actors library</name>
+ <description>Deprecated Actors Library for Scala</description>
+ <url>http://www.scala-lang.org/</url>
+ <inceptionYear>2006</inceptionYear>
+ <organization>
+ <name>LAMP/EPFL</name>
+ <url>http://lamp.epfl.ch/</url>
+ </organization>
+ <licenses>
+ <license>
+ <name>BSD-like</name>
+ <url>http://www.scala-lang.org/downloads/license.html
+ </url>
+ <distribution>repo</distribution>
+ </license>
+ </licenses>
+ <scm>
+ <connection>scm:git:git://github.com/scala/scala.git</connection>
+ <url>https://github.com/scala/scala.git</url>
+ </scm>
+ <issueManagement>
+ <system>JIRA</system>
+ <url>https://issues.scala-lang.org/</url>
+ </issueManagement>
+ <dependencies>
+ <dependency>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-library</artifactId>
+ <version>@VERSION@</version>
+ </dependency>
+ </dependencies>
+ <distributionManagement>
+ <repository>
+ <id>scala-tools.org</id>
+ <url>@RELEASE_REPOSITORY@</url>
+ </repository>
+ <snapshotRepository>
+ <id>scala-tools.org</id>
+ <url>@SNAPSHOT_REPOSITORY@</url>
+ <uniqueVersion>false</uniqueVersion>
+ </snapshotRepository>
+ </distributionManagement>
+ <developers>
+ <developer>
+ <id>lamp</id>
+ <name>EPFL LAMP</name>
+ </developer>
+ <developer>
+ <id>Typesafe</id>
+ <name>Typesafe, Inc.</name>
+ </developer>
+ </developers>
+</project>
diff --git a/src/build/maven/scala-compiler-pom.xml b/src/build/maven/scala-compiler-pom.xml
index f9bcb6719d..82d0ed7c91 100644
--- a/src/build/maven/scala-compiler-pom.xml
+++ b/src/build/maven/scala-compiler-pom.xml
@@ -23,13 +23,12 @@
</license>
</licenses>
<scm>
- <connection>scm:svn:http://lampsvn.epfl.ch/svn-repos/scala/scala/trunk</connection>
- <url>https://lampsvn.epfl.ch/trac/scala/browser/scala/trunk</url>
+ <connection>scm:git:git://github.com/scala/scala.git</connection>
+ <url>https://github.com/scala/scala.git</url>
</scm>
<issueManagement>
- <system>trac</system>
- <url>http://lampsvn.epfl.ch/trac/scala
- </url>
+ <system>JIRA</system>
+ <url>https://issues.scala-lang.org/</url>
</issueManagement>
<dependencies>
diff --git a/src/build/maven/scala-dbc-pom.xml b/src/build/maven/scala-dbc-pom.xml
index 23092d10ad..aa3d050c1e 100644
--- a/src/build/maven/scala-dbc-pom.xml
+++ b/src/build/maven/scala-dbc-pom.xml
@@ -23,13 +23,12 @@
</license>
</licenses>
<scm>
- <connection>scm:svn:http://lampsvn.epfl.ch/svn-repos/scala/scala/trunk</connection>
- <url>https://lampsvn.epfl.ch/trac/scala/browser/scala/trunk</url>
+ <connection>scm:git:git://github.com/scala/scala.git</connection>
+ <url>https://github.com/scala/scala.git</url>
</scm>
<issueManagement>
- <system>trac</system>
- <url>http://lampsvn.epfl.ch/trac/scala
- </url>
+ <system>JIRA</system>
+ <url>https://issues.scala-lang.org/</url>
</issueManagement>
<dependencies>
<dependency>
diff --git a/src/build/maven/scala-dotnet-library-pom.xml b/src/build/maven/scala-dotnet-library-pom.xml
index 339460924a..007e8be173 100644
--- a/src/build/maven/scala-dotnet-library-pom.xml
+++ b/src/build/maven/scala-dotnet-library-pom.xml
@@ -24,13 +24,12 @@
</license>
</licenses>
<scm>
- <connection>scm:svn:http://lampsvn.epfl.ch/svn-repos/scala/scala/trunk</connection>
- <url>https://lampsvn.epfl.ch/trac/scala/browser/scala/trunk</url>
+ <connection>scm:git:git://github.com/scala/scala.git</connection>
+ <url>https://github.com/scala/scala.git</url>
</scm>
<issueManagement>
- <system>trac</system>
- <url>http://lampsvn.epfl.ch/trac/scala
- </url>
+ <system>JIRA</system>
+ <url>https://issues.scala-lang.org/</url>
</issueManagement>
<distributionManagement>
<repository>
diff --git a/src/build/maven/scala-library-pom.xml b/src/build/maven/scala-library-pom.xml
index 8e0abd4937..673c3dfada 100644
--- a/src/build/maven/scala-library-pom.xml
+++ b/src/build/maven/scala-library-pom.xml
@@ -23,13 +23,12 @@
</license>
</licenses>
<scm>
- <connection>scm:svn:http://lampsvn.epfl.ch/svn-repos/scala/scala/trunk</connection>
- <url>https://lampsvn.epfl.ch/trac/scala/browser/scala/trunk</url>
+ <connection>scm:git:git://github.com/scala/scala.git</connection>
+ <url>https://github.com/scala/scala.git</url>
</scm>
<issueManagement>
- <system>trac</system>
- <url>http://lampsvn.epfl.ch/trac/scala
- </url>
+ <system>JIRA</system>
+ <url>https://issues.scala-lang.org/</url>
</issueManagement>
<distributionManagement>
<repository>
diff --git a/src/build/maven/scala-partest-pom.xml b/src/build/maven/scala-partest-pom.xml
index f18ca46c50..ac05f242d5 100644
--- a/src/build/maven/scala-partest-pom.xml
+++ b/src/build/maven/scala-partest-pom.xml
@@ -23,13 +23,12 @@
</license>
</licenses>
<scm>
- <connection>scm:svn:http://lampsvn.epfl.ch/svn-repos/scala/scala/trunk</connection>
- <url>https://lampsvn.epfl.ch/trac/scala/browser/scala/trunk</url>
+ <connection>scm:git:git://github.com/scala/scala.git</connection>
+ <url>https://github.com/scala/scala.git</url>
</scm>
<issueManagement>
- <system>trac</system>
- <url>http://lampsvn.epfl.ch/trac/scala
- </url>
+ <system>JIRA</system>
+ <url>https://issues.scala-lang.org/</url>
</issueManagement>
<dependencies>
diff --git a/src/build/maven/scala-swing-pom.xml b/src/build/maven/scala-swing-pom.xml
index a03bc07ab0..3df5db5b21 100644
--- a/src/build/maven/scala-swing-pom.xml
+++ b/src/build/maven/scala-swing-pom.xml
@@ -23,13 +23,12 @@
</license>
</licenses>
<scm>
- <connection>scm:svn:http://lampsvn.epfl.ch/svn-repos/scala/scala/trunk</connection>
- <url>https://lampsvn.epfl.ch/trac/scala/browser/scala/trunk</url>
+ <connection>scm:git:git://github.com/scala/scala.git</connection>
+ <url>https://github.com/scala/scala.git</url>
</scm>
<issueManagement>
- <system>trac</system>
- <url>http://lampsvn.epfl.ch/trac/scala
- </url>
+ <system>JIRA</system>
+ <url>https://issues.scala-lang.org/</url>
</issueManagement>
<dependencies>
<dependency>
diff --git a/src/build/maven/scalap-pom.xml b/src/build/maven/scalap-pom.xml
index d7f867d4a1..50c08e899c 100644
--- a/src/build/maven/scalap-pom.xml
+++ b/src/build/maven/scalap-pom.xml
@@ -23,13 +23,12 @@
</license>
</licenses>
<scm>
- <connection>scm:svn:http://lampsvn.epfl.ch/svn-repos/scala/scala/trunk</connection>
- <url>https://lampsvn.epfl.ch/trac/scala/browser/scala/trunk</url>
+ <connection>scm:git:git://github.com/scala/scala.git</connection>
+ <url>https://github.com/scala/scala.git</url>
</scm>
<issueManagement>
- <system>trac</system>
- <url>http://lampsvn.epfl.ch/trac/scala
- </url>
+ <system>JIRA</system>
+ <url>https://issues.scala-lang.org/</url>
</issueManagement>
<dependencies>
diff --git a/src/build/pack.xml b/src/build/pack.xml
index e79895e3a8..723fd1d25d 100644
--- a/src/build/pack.xml
+++ b/src/build/pack.xml
@@ -78,6 +78,7 @@ MAIN DISTRIBUTION PACKAGING
<exclude name="logs/**"/>
<exclude name="sandbox/**"/>
<exclude name="test/partest"/>
+ <exclude name=".git"/>
</tarfileset>
<tarfileset dir="${basedir}" prefix="${dist.name}-sources" filemode="755">
<include name="test/partest"/>
@@ -95,8 +96,22 @@ MAIN DISTRIBUTION PACKAGING
</fileset>
</checksum>
</target>
+
+ <target name="pack-archives.latest.unix" depends="pack-archives.src" unless="os.win">
+ <symlink link="${dists.dir}/archives/scala-latest-sources.tgz"
+ resource="${dists.dir}/archives/scala-${version.number}-sources.tgz"
+ overwrite="yes"/>
+ </target>
+
+ <target name="pack-archives.latest.win" depends="pack-archives.src" if="os.win">
+ <copy tofile="${dists.dir}/archives/scala-latest-sources.tgz">
+ <fileset dir="${dists.dir}/archives">
+ <include name="scala-${version.number}-sources.tgz"/>
+ </fileset>
+ </copy>
+ </target>
- <target name="pack-archives.done" depends="pack-archives.src"/>
+ <target name="pack-archives.done" depends="pack-archives.src, pack-archives.latest.win, pack-archives.latest.unix"/>
<!-- ===========================================================================
MAIN DISTRIBUTION SBAZ
@@ -114,8 +129,8 @@ MAIN DISTRIBUTION SBAZ
version="${version.number}"
desc="The Scala library. This is the minimal requirement to run any Scala program."
link="${sbaz.universe}/scala-library-${version.number}.sbp">
- <libset dir="${dist.dir}/lib" includes="scala-library.jar,scala-dbc.jar,scala-swing.jar"/>
- <srcset dir="${dist.dir}/src" includes="scala-library-src.jar,scala-dbc-src.jar,scala-swing-src.jar"/>
+ <libset dir="${dist.dir}/lib" includes="scala-library.jar,scala-dbc.jar,scala-swing.jar,scala-actors.jar"/>
+ <srcset dir="${dist.dir}/src" includes="scala-library-src.jar,scala-dbc-src.jar,scala-swing-src.jar,scala-actors-src.jar"/>
<looseset destination="doc">
<fileset dir="${dist.dir}/doc" includes="LICENSE,README"/>
</looseset>
@@ -228,6 +243,7 @@ MAIN DISTRIBUTION SBAZ
<mvn-copy-lib mvn.artifact.name="scala-compiler"/>
<mvn-copy-lib mvn.artifact.name="scala-dbc"/>
<mvn-copy-lib mvn.artifact.name="scala-swing"/>
+ <mvn-copy-lib mvn.artifact.name="scala-actors"/>
<mvn-copy-lib mvn.artifact.name="scala-partest"/>
<mvn-copy-lib mvn.artifact.name="scalap"/>
</target>
@@ -290,11 +306,13 @@ MAIN DISTRIBUTION SBAZ
basedir="${build-docs.dir}/continuations-plugin">
<include name="**/*"/>
</jar>
- <!-- TODO - Scala swing, dbc should maybe have thier own jar, but creating it is SLOW. -->
+ <!-- TODO - Scala swing, dbc and actors should maybe have thier own jar, but creating it is SLOW. -->
<copy tofile="${dists.dir}/maven/${version.number}/scala-swing/scala-swing-docs.jar"
file="${dists.dir}/maven/${version.number}/scala-library/scala-library-docs.jar"/>
<copy tofile="${dists.dir}/maven/${version.number}/scala-dbc/scala-dbc-docs.jar"
file="${dists.dir}/maven/${version.number}/scala-library/scala-library-docs.jar"/>
+ <copy tofile="${dists.dir}/maven/${version.number}/scala-actors/scala-actors-docs.jar"
+ file="${dists.dir}/maven/${version.number}/scala-library/scala-library-docs.jar"/>
</target>
<target name="pack-maven.latest.unix" depends="pack-maven.docs" unless="os.win">
diff --git a/src/compiler/scala/reflect/internal/Definitions.scala b/src/compiler/scala/reflect/internal/Definitions.scala
index bd823c3128..a2dd6fc4c3 100644
--- a/src/compiler/scala/reflect/internal/Definitions.scala
+++ b/src/compiler/scala/reflect/internal/Definitions.scala
@@ -764,9 +764,25 @@ trait Definitions extends reflect.api.StandardDefinitions {
else
removeRedundantObjects(parents)
}
+
+ def typeStringNoPackage(tp: Type) =
+ "" + tp stripPrefix tp.typeSymbol.enclosingPackage.fullName + "."
+
+ def briefParentsString(parents: List[Type]) =
+ normalizedParents(parents) map typeStringNoPackage mkString " with "
+
def parentsString(parents: List[Type]) =
normalizedParents(parents) mkString " with "
+ def typeParamsString(tp: Type) = tp match {
+ case PolyType(tparams, _) => tparams map (_.defString) mkString ("[", ",", "]")
+ case _ => ""
+ }
+ def valueParamsString(tp: Type) = tp match {
+ case MethodType(params, _) => params map (_.defString) mkString ("(", ",", ")")
+ case _ => ""
+ }
+
// members of class java.lang.{ Object, String }
lazy val Object_## = enterNewMethod(ObjectClass, nme.HASHHASH, Nil, inttype, FINAL)
lazy val Object_== = enterNewMethod(ObjectClass, nme.EQ, anyrefparam, booltype, FINAL)
@@ -970,7 +986,7 @@ trait Definitions extends reflect.api.StandardDefinitions {
case (_, restpe) => NullaryMethodType(restpe)
}
- msym setInfoAndEnter polyType(tparams, mtpe)
+ msym setInfoAndEnter genPolyType(tparams, mtpe)
}
/** T1 means one type parameter.
diff --git a/src/compiler/scala/reflect/internal/Flags.scala b/src/compiler/scala/reflect/internal/Flags.scala
index 8aae80eed4..3110d73461 100644
--- a/src/compiler/scala/reflect/internal/Flags.scala
+++ b/src/compiler/scala/reflect/internal/Flags.scala
@@ -107,7 +107,7 @@ class ModifierFlags {
// pre: PRIVATE or PROTECTED are also set
final val JAVA = 0x00100000 // symbol was defined by a Java class
final val STATIC = 0x00800000 // static field, method or class
- final val CASEACCESSOR = 0x01000000 // symbol is a case parameter (or its accessor)
+ final val CASEACCESSOR = 0x01000000 // symbol is a case parameter (or its accessor, or a GADT skolem)
final val TRAIT = 0x02000000 // symbol is a trait
final val DEFAULTPARAM = 0x02000000 // the parameter has a default value
final val PARAMACCESSOR = 0x20000000 // for field definitions generated for primary constructor
diff --git a/src/compiler/scala/reflect/internal/Importers.scala b/src/compiler/scala/reflect/internal/Importers.scala
index 1003fa804f..04381937d1 100644
--- a/src/compiler/scala/reflect/internal/Importers.scala
+++ b/src/compiler/scala/reflect/internal/Importers.scala
@@ -351,6 +351,8 @@ trait Importers { self: SymbolTable =>
new ApplyToImplicitArgs(importTree(fun), args map importTree)
case _: from.ApplyImplicitView =>
new ApplyImplicitView(importTree(fun), args map importTree)
+ case _: from.ApplyConstructor =>
+ new ApplyConstructor(importTree(fun), args map importTree)
case _ =>
new Apply(importTree(fun), args map importTree)
}
diff --git a/src/compiler/scala/reflect/internal/Kinds.scala b/src/compiler/scala/reflect/internal/Kinds.scala
index 23bff950b8..eca63c7c15 100644
--- a/src/compiler/scala/reflect/internal/Kinds.scala
+++ b/src/compiler/scala/reflect/internal/Kinds.scala
@@ -49,9 +49,15 @@ trait Kinds {
private def kindMessage(a: Symbol, p: Symbol)(f: (String, String) => String): String =
f(a+qualify(a,p), p+qualify(p,a))
+ // Normally it's nicer to print nothing rather than '>: Nothing <: Any' all over
+ // the place, but here we need it for the message to make sense.
private def strictnessMessage(a: Symbol, p: Symbol) =
- kindMessage(a, p)("%s's bounds %s are stricter than %s's declared bounds %s".format(
- _, a.info, _, p.info))
+ kindMessage(a, p)("%s's bounds%s are stricter than %s's declared bounds%s".format(
+ _, a.info, _, p.info match {
+ case tb @ TypeBounds(_, _) if tb.isEmptyBounds => " >: Nothing <: Any"
+ case tb => "" + tb
+ })
+ )
private def varianceMessage(a: Symbol, p: Symbol) =
kindMessage(a, p)("%s is %s, but %s is declared %s".format(_, varStr(a), _, varStr(p)))
@@ -62,11 +68,16 @@ trait Kinds {
_, countAsString(p.typeParams.length))
)
+ private def buildMessage(xs: List[SymPair], f: (Symbol, Symbol) => String) = (
+ if (xs.isEmpty) ""
+ else xs map f.tupled mkString ("\n", ", ", "")
+ )
+
def errorMessage(targ: Type, tparam: Symbol): String = (
- (targ+"'s type parameters do not match "+tparam+"'s expected parameters: ")
- + (arity map { case (a, p) => arityMessage(a, p) } mkString ", ")
- + (variance map { case (a, p) => varianceMessage(a, p) } mkString ", ")
- + (strictness map { case (a, p) => strictnessMessage(a, p) } mkString ", ")
+ (targ+"'s type parameters do not match "+tparam+"'s expected parameters:")
+ + buildMessage(arity, arityMessage)
+ + buildMessage(variance, varianceMessage)
+ + buildMessage(strictness, strictnessMessage)
)
}
val NoKindErrors = KindErrors(Nil, Nil, Nil)
diff --git a/src/compiler/scala/reflect/internal/StdNames.scala b/src/compiler/scala/reflect/internal/StdNames.scala
index 84007425ed..9fb7b5b747 100644
--- a/src/compiler/scala/reflect/internal/StdNames.scala
+++ b/src/compiler/scala/reflect/internal/StdNames.scala
@@ -405,6 +405,8 @@ trait StdNames extends NameManglers { self: SymbolTable =>
val withFilter: NameType = "withFilter"
val zip: NameType = "zip"
+ val synthSwitch: NameType = "$synthSwitch"
+
// unencoded operators
object raw {
final val AMP : NameType = "&"
diff --git a/src/compiler/scala/reflect/internal/Symbols.scala b/src/compiler/scala/reflect/internal/Symbols.scala
index 9678d2b8cd..f4039cf6d3 100644
--- a/src/compiler/scala/reflect/internal/Symbols.scala
+++ b/src/compiler/scala/reflect/internal/Symbols.scala
@@ -269,11 +269,17 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** Create a new existential type skolem with this symbol its owner,
* based on the given symbol and origin.
*/
- def newExistentialSkolem(basis: Symbol, origin: AnyRef, name: TypeName = null, info: Type = null): TypeSkolem = {
- val skolem = newTypeSkolemSymbol(if (name eq null) basis.name.toTypeName else name, origin, basis.pos, (basis.flags | EXISTENTIAL) & ~PARAM)
- skolem setInfo (if (info eq null) basis.info cloneInfo skolem else info)
+ def newExistentialSkolem(basis: Symbol, origin: AnyRef): TypeSkolem = {
+ val skolem = newTypeSkolemSymbol(basis.name.toTypeName, origin, basis.pos, (basis.flags | EXISTENTIAL) & ~PARAM)
+ skolem setInfo (basis.info cloneInfo skolem)
}
+ // flags set up to maintain TypeSkolem's invariant: origin.isInstanceOf[Symbol] == !hasFlag(EXISTENTIAL)
+ // CASEACCESSOR | SYNTHETIC used to single this symbol out in deskolemizeGADT
+ def newGADTSkolem(name: TypeName, origin: Symbol, info: Type): TypeSkolem =
+ newTypeSkolemSymbol(name, origin, origin.pos, origin.flags & ~(EXISTENTIAL | PARAM) | CASEACCESSOR | SYNTHETIC) setInfo info
+
+
final def newExistential(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): Symbol =
newAbstractType(name, pos, EXISTENTIAL | newFlags)
@@ -495,6 +501,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
// List[T] forSome { type T }
final def isExistentialSkolem = isExistentiallyBound && isSkolem
final def isExistentialQuantified = isExistentiallyBound && !isSkolem
+ final def isGADTSkolem = isSkolem && hasFlag(CASEACCESSOR | SYNTHETIC)
// class C extends D( { class E { ... } ... } ). Here, E is a class local to a constructor
final def isClassLocalToConstructor = isClass && hasFlag(INCONSTRUCTOR)
@@ -1865,7 +1872,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def unpackLocation: AnyRef = null
/** Remove private modifier from symbol `sym`s definition. If `sym` is a
- * term symbol rename it by expanding its name to avoid name clashes
+ * is not a constructor nor a static module rename it by expanding its name to avoid name clashes
+ * @param base the fully qualified name of this class will be appended if name expansion is needed
*/
final def makeNotPrivate(base: Symbol) {
if (this.isPrivate) {
@@ -1985,6 +1993,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
else if (isTrait) ("trait", "trait", "TRT")
else if (isClass) ("class", "class", "CLS")
else if (isType) ("type", "type", "TPE")
+ else if (isClassConstructor && isPrimaryConstructor) ("primary constructor", "constructor", "PCTOR")
else if (isClassConstructor) ("constructor", "constructor", "CTOR")
else if (isSourceMethod) ("method", "method", "METH")
else if (isTerm) ("value", "value", "VAL")
@@ -2070,47 +2079,32 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
case s => " in " + s
}
def fullLocationString: String = toString + locationString
- def signatureString: String = if (hasRawInfo) infoString(rawInfo) else "<_>"
+ def signatureString: String = if (hasRawInfo) infoString(rawInfo) else "<_>"
/** String representation of symbol's definition following its name */
final def infoString(tp: Type): String = {
- def typeParamsString: String = tp match {
- case PolyType(tparams, _) if tparams.nonEmpty =>
- (tparams map (_.defString)).mkString("[", ",", "]")
- case _ =>
- ""
- }
- if (isClass)
- typeParamsString + " extends " + tp.resultType
- else if (isAliasType)
- typeParamsString + " = " + tp.resultType
- else if (isAbstractType)
- typeParamsString + {
- tp.resultType match {
- case TypeBounds(lo, hi) =>
- (if (lo.typeSymbol == NothingClass) "" else " >: " + lo) +
- (if (hi.typeSymbol == AnyClass) "" else " <: " + hi)
- case rtp =>
- "<: " + rtp
- }
- }
- else if (isModule)
- moduleClass.infoString(tp)
- else
- tp match {
- case PolyType(tparams, res) =>
- typeParamsString + infoString(res)
- case NullaryMethodType(res) =>
- infoString(res)
- case MethodType(params, res) =>
- params.map(_.defString).mkString("(", ",", ")") + infoString(res)
- case _ =>
- ": " + tp
+ def parents = (
+ if (settings.debug.value) parentsString(tp.parents)
+ else briefParentsString(tp.parents)
+ )
+ if (isType) typeParamsString(tp) + (
+ if (isClass) " extends " + parents
+ else if (isAliasType) " = " + tp.resultType
+ else tp.resultType match {
+ case rt @ TypeBounds(_, _) => "" + rt
+ case rt => " <: " + rt
}
+ )
+ else if (isModule) moduleClass.infoString(tp)
+ else tp match {
+ case PolyType(tparams, res) => typeParamsString(tp) + infoString(res)
+ case NullaryMethodType(res) => infoString(res)
+ case MethodType(params, res) => valueParamsString(tp) + infoString(res)
+ case _ => ": " + tp
+ }
}
- def infosString = infos.toString()
-
+ def infosString = infos.toString
def debugLocationString = fullLocationString + " " + debugFlagString
def debugFlagString = hasFlagsToString(-1L)
def hasFlagsToString(mask: Long): String = flagsToString(
@@ -2129,12 +2123,17 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
else if (owner.isRefinementClass) ExplicitFlags & ~OVERRIDE
else ExplicitFlags
+ // make the error message more googlable
+ def flagsExplanationString =
+ if (isGADTSkolem) " (this is a GADT skolem)"
+ else ""
+
def accessString = hasFlagsToString(PRIVATE | PROTECTED | LOCAL)
def defaultFlagString = hasFlagsToString(defaultFlagMask)
private def defStringCompose(infoString: String) = compose(
defaultFlagString,
keyString,
- varianceString + nameString + infoString
+ varianceString + nameString + infoString + flagsExplanationString
)
/** String representation of symbol's definition. It uses the
* symbol's raw info to avoid forcing types.
@@ -2477,7 +2476,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* where the skolem was introduced (this is important for knowing when to pack it
* again into ab Existential). origin is `null` only in skolemizeExistentials called
* from <:< or isAsSpecific, because here its value does not matter.
- * I elieve the following invariant holds:
+ * I believe the following invariant holds:
*
* origin.isInstanceOf[Symbol] == !hasFlag(EXISTENTIAL)
*/
@@ -2671,7 +2670,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def rawInfo: Type = NoType
protected def doCookJavaRawInfo() {}
override def accessBoundary(base: Symbol): Symbol = RootClass
- def cloneSymbolImpl(owner: Symbol, newFlags: Long): Symbol = abort()
+ def cloneSymbolImpl(owner: Symbol, newFlags: Long): Symbol = abort("NoSymbol.clone()")
override def originalEnclosingMethod = this
override def owner: Symbol =
diff --git a/src/compiler/scala/reflect/internal/TreeInfo.scala b/src/compiler/scala/reflect/internal/TreeInfo.scala
index 769d7a9ed1..ce3de94335 100644
--- a/src/compiler/scala/reflect/internal/TreeInfo.scala
+++ b/src/compiler/scala/reflect/internal/TreeInfo.scala
@@ -17,7 +17,7 @@ abstract class TreeInfo {
val global: SymbolTable
import global._
- import definitions.{ isVarArgsList, isCastSymbol, ThrowableClass }
+ import definitions.{ isVarArgsList, isCastSymbol, ThrowableClass, TupleClass }
/* Does not seem to be used. Not sure what it does anyway.
def isOwnerDefinition(tree: Tree): Boolean = tree match {
@@ -312,6 +312,24 @@ abstract class TreeInfo {
case _ => false
}
+ /** Is this tree comprised of nothing but identifiers,
+ * but possibly in bindings or tuples? For instance
+ *
+ * foo @ (bar, (baz, quux))
+ *
+ * is a variable pattern; if the structure matches,
+ * then the remainder is inevitable.
+ */
+ def isVariablePattern(tree: Tree): Boolean = tree match {
+ case Bind(name, pat) => isVariablePattern(pat)
+ case Ident(name) => true
+ case Apply(sel, args) =>
+ ( isReferenceToScalaMember(sel, TupleClass(args.size).name.toTermName)
+ && (args forall isVariablePattern)
+ )
+ case _ => false
+ }
+
/** Is this argument node of the form <expr> : _* ?
*/
def isWildcardStarArg(tree: Tree): Boolean = tree match {
diff --git a/src/compiler/scala/reflect/internal/TreePrinters.scala b/src/compiler/scala/reflect/internal/TreePrinters.scala
index f823110440..8ed0ee6357 100644
--- a/src/compiler/scala/reflect/internal/TreePrinters.scala
+++ b/src/compiler/scala/reflect/internal/TreePrinters.scala
@@ -433,7 +433,7 @@ trait TreePrinters extends api.TreePrinters { self: SymbolTable =>
/** Hook for extensions */
def xprintTree(treePrinter: TreePrinter, tree: Tree) =
- treePrinter.print(tree.productPrefix+tree.productIterator.mkString("(", ", ", ")"))
+ treePrinter.print(tree.printingPrefix+tree.productIterator.mkString("(", ", ", ")"))
def newTreePrinter(writer: PrintWriter): TreePrinter = new TreePrinter(writer)
def newTreePrinter(stream: OutputStream): TreePrinter = newTreePrinter(new PrintWriter(stream))
diff --git a/src/compiler/scala/reflect/internal/Trees.scala b/src/compiler/scala/reflect/internal/Trees.scala
index 9b1712b790..1a40e0105c 100644
--- a/src/compiler/scala/reflect/internal/Trees.scala
+++ b/src/compiler/scala/reflect/internal/Trees.scala
@@ -350,8 +350,9 @@ trait Trees extends api.Trees { self: SymbolTable =>
"subst[%s, %s](%s)".format(fromStr, toStr, (from, to).zipped map (_ + " -> " + _) mkString ", ")
}
- // NOTE: if symbols in `from` occur multiple times in the `tree` passed to `transform`,
- // the resulting Tree will be a graph, not a tree... this breaks all sorts of stuff,
+ // NOTE: calls shallowDuplicate on trees in `to` to avoid problems when symbols in `from`
+ // occur multiple times in the `tree` passed to `transform`,
+ // otherwise, the resulting Tree would be a graph, not a tree... this breaks all sorts of stuff,
// notably concerning the mutable aspects of Trees (such as setting their .tpe)
class TreeSubstituter(from: List[Symbol], to: List[Tree]) extends Transformer {
override def transform(tree: Tree): Tree = tree match {
diff --git a/src/compiler/scala/reflect/internal/Types.scala b/src/compiler/scala/reflect/internal/Types.scala
index 2382413a9a..bb9549deba 100644
--- a/src/compiler/scala/reflect/internal/Types.scala
+++ b/src/compiler/scala/reflect/internal/Types.scala
@@ -97,7 +97,7 @@ trait Types extends api.Types { self: SymbolTable =>
*/
private final val propagateParameterBoundsToTypeVars = sys.props contains "scalac.debug.prop-constraints"
- protected val enableTypeVarExperimentals = settings.Xexperimental.value
+ protected val enableTypeVarExperimentals = settings.Xexperimental.value || settings.YvirtPatmat.value
/** Empty immutable maps to avoid allocations. */
private val emptySymMap = immutable.Map[Symbol, Symbol]()
@@ -448,7 +448,7 @@ trait Types extends api.Types { self: SymbolTable =>
def resultType(actuals: List[Type]) = this
/** Only used for dependent method types. */
- def resultApprox: Type = ApproximateDependentMap(resultType) // if (!settings.YdepMethTpes.value) resultType else
+ def resultApprox: Type = ApproximateDependentMap(resultType)
/** If this is a TypeRef `clazz`[`T`], return the argument `T`
* otherwise return this type
@@ -1041,8 +1041,8 @@ trait Types extends api.Types { self: SymbolTable =>
baseClasses.head.newOverloaded(this, members.toList)
}
}
- /** The existential skolems and existentially quantified variables which are free in this type */
- def existentialSkolems: List[Symbol] = {
+ /** The (existential or otherwise) skolems and existentially quantified variables which are free in this type */
+ def skolemsExceptMethodTypeParams: List[Symbol] = {
var boundSyms: List[Symbol] = List()
var skolems: List[Symbol] = List()
for (t <- this) {
@@ -1050,7 +1050,8 @@ trait Types extends api.Types { self: SymbolTable =>
case ExistentialType(quantified, qtpe) =>
boundSyms = boundSyms ::: quantified
case TypeRef(_, sym, _) =>
- if ((sym hasFlag EXISTENTIAL) && !(boundSyms contains sym) && !(skolems contains sym))
+ if ((sym.isExistentialSkolem || sym.isGADTSkolem) && // treat GADT skolems like existential skolems
+ !((boundSyms contains sym) || (skolems contains sym)))
skolems = sym :: skolems
case _ =>
}
@@ -1336,9 +1337,14 @@ trait Types extends api.Types { self: SymbolTable =>
case TypeBounds(_, _) => that <:< this
case _ => lo <:< that && that <:< hi
}
- def isEmptyBounds = (lo.typeSymbolDirect eq NothingClass) && (hi.typeSymbolDirect eq AnyClass)
+ private def lowerString = if (emptyLowerBound) "" else " >: " + lo
+ private def upperString = if (emptyUpperBound) "" else " <: " + hi
+ private def emptyLowerBound = lo.typeSymbolDirect eq NothingClass
+ private def emptyUpperBound = hi.typeSymbolDirect eq AnyClass
+ def isEmptyBounds = emptyLowerBound && emptyUpperBound
+
// override def isNullable: Boolean = NullClass.tpe <:< lo;
- override def safeToString = ">: " + lo + " <: " + hi
+ override def safeToString = lowerString + upperString
override def kind = "TypeBoundsType"
}
@@ -2262,7 +2268,7 @@ trait Types extends api.Types { self: SymbolTable =>
override def isTrivial: Boolean = isTrivial0 && (resultType eq resultType.withoutAnnotations)
private lazy val isTrivial0 =
resultType.isTrivial && params.forall{p => p.tpe.isTrivial && (
- /*!settings.YdepMethTpes.value ||*/ !(params.exists(_.tpe.contains(p)) || resultType.contains(p)))
+ !(params.exists(_.tpe.contains(p)) || resultType.contains(p)))
}
def isImplicit = params.nonEmpty && params.head.isImplicit
@@ -2286,8 +2292,9 @@ trait Types extends api.Types { self: SymbolTable =>
}
else existentialAbstraction(params, resultType)
- // implicit args can only be depended on in result type: TODO this may be generalised so that the only constraint is dependencies are acyclic
- def approximate: MethodType = MethodType(params, resultApprox) // if (!settings.YdepMethTpes.value) this else
+ // implicit args can only be depended on in result type:
+ //TODO this may be generalised so that the only constraint is dependencies are acyclic
+ def approximate: MethodType = MethodType(params, resultApprox)
override def finalResultType: Type = resultType.finalResultType
@@ -2616,15 +2623,6 @@ trait Types extends api.Types { self: SymbolTable =>
}
}
- // TODO: I don't really know why this happens -- maybe because
- // the owner hierarchy changes? the other workaround (besides
- // repackExistential) is to explicitly pass expectedTp as the type
- // argument for the call to guard, but repacking the existential
- // somehow feels more robust
- //
- // TODO: check if optimization makes a difference, try something else
- // if necessary (cache?)
-
/** Repack existential types, otherwise they sometimes get unpacked in the
* wrong location (type inference comes up with an unexpected skolem)
*/
@@ -3329,16 +3327,18 @@ trait Types extends api.Types { self: SymbolTable =>
* may or may not be poly? (It filched the standard "canonical creator" name.)
*/
object GenPolyType {
- def apply(tparams: List[Symbol], tpe: Type): Type =
- if (tparams nonEmpty) typeFun(tparams, tpe)
- else tpe // it's okay to be forgiving here
+ def apply(tparams: List[Symbol], tpe: Type): Type = (
+ if (tparams nonEmpty) typeFun(tparams, tpe)
+ else tpe // it's okay to be forgiving here
+ )
def unapply(tpe: Type): Option[(List[Symbol], Type)] = tpe match {
- case PolyType(tparams, restpe) => Some(tparams, restpe)
- case _ => Some(List(), tpe)
+ case PolyType(tparams, restpe) => Some((tparams, restpe))
+ case _ => Some((Nil, tpe))
}
}
+ def genPolyType(params: List[Symbol], tpe: Type): Type = GenPolyType(params, tpe)
- @deprecated("use GenPolyType(...) instead")
+ @deprecated("use genPolyType(...) instead", "2.10.0")
def polyType(params: List[Symbol], tpe: Type): Type = GenPolyType(params, tpe)
/** A creator for anonymous type functions, where the symbol for the type function still needs to be created.
diff --git a/src/compiler/scala/reflect/internal/settings/MutableSettings.scala b/src/compiler/scala/reflect/internal/settings/MutableSettings.scala
index 0092f73fe3..b556c33aba 100644
--- a/src/compiler/scala/reflect/internal/settings/MutableSettings.scala
+++ b/src/compiler/scala/reflect/internal/settings/MutableSettings.scala
@@ -43,4 +43,5 @@ abstract class MutableSettings extends AbsSettings {
def Yrecursion: IntSetting
def maxClassfileName: IntSetting
def Xexperimental: BooleanSetting
+ def YvirtPatmat: BooleanSetting
} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/runtime/Settings.scala b/src/compiler/scala/reflect/runtime/Settings.scala
index b4f0123114..27e90c94bd 100644
--- a/src/compiler/scala/reflect/runtime/Settings.scala
+++ b/src/compiler/scala/reflect/runtime/Settings.scala
@@ -23,7 +23,6 @@ class Settings extends internal.settings.MutableSettings {
val overrideObjects = new BooleanSetting(false)
val debug = new BooleanSetting(false)
- // val YdepMethTpes = new BooleanSetting(true)
val Ynotnull = new BooleanSetting(false)
val explaintypes = new BooleanSetting(false)
val verbose = new BooleanSetting(false)
@@ -34,4 +33,6 @@ class Settings extends internal.settings.MutableSettings {
val Yrecursion = new IntSetting(0)
val maxClassfileName = new IntSetting(255)
val Xexperimental = new BooleanSetting(false)
+ val deepCloning = new BooleanSetting (false)
+ val YvirtPatmat = new BooleanSetting(false)
}
diff --git a/src/compiler/scala/reflect/runtime/ToolBoxes.scala b/src/compiler/scala/reflect/runtime/ToolBoxes.scala
index 8cc4d5f788..28f12b378f 100644
--- a/src/compiler/scala/reflect/runtime/ToolBoxes.scala
+++ b/src/compiler/scala/reflect/runtime/ToolBoxes.scala
@@ -1,6 +1,7 @@
package scala.reflect
package runtime
+import scala.tools.nsc
import scala.tools.nsc.reporters.Reporter
import scala.tools.nsc.reporters.StoreReporter
import scala.tools.nsc.reporters.AbstractReporter
@@ -21,8 +22,7 @@ trait ToolBoxes extends { self: Universe =>
class ToolBox(val reporter: Reporter = new StoreReporter, val options: String = "") {
- class ToolBoxGlobal(settings: scala.tools.nsc.Settings, reporter: scala.tools.nsc.reporters.Reporter)
- extends ReflectGlobal(settings, reporter) {
+ class ToolBoxGlobal(settings0: nsc.Settings, reporter0: nsc.reporters.Reporter) extends ReflectGlobal(settings0, reporter0) {
import definitions._
private val trace = scala.tools.nsc.util.trace when settings.debug.value
diff --git a/src/compiler/scala/reflect/runtime/Universe.scala b/src/compiler/scala/reflect/runtime/Universe.scala
index 324fee87ab..7a5dda3d8e 100644
--- a/src/compiler/scala/reflect/runtime/Universe.scala
+++ b/src/compiler/scala/reflect/runtime/Universe.scala
@@ -39,6 +39,11 @@ class Universe extends SymbolTable {
definitions.AnyValClass // force it.
+ type TreeAnnotation = Position
+ val NoTreeAnnotation: TreeAnnotation = NoPosition
+ def positionToAnnotation(pos: Position): TreeAnnotation = pos // TODO
+ def annotationToPosition(annot: TreeAnnotation): Position = annot //TODO
+
// establish root association to avoid cyclic dependency errors later
classToScala(classOf[java.lang.Object]).initialize
diff --git a/src/compiler/scala/tools/nsc/ast/DocComments.scala b/src/compiler/scala/tools/nsc/ast/DocComments.scala
index 678f7b3028..d3f4688d4b 100755
--- a/src/compiler/scala/tools/nsc/ast/DocComments.scala
+++ b/src/compiler/scala/tools/nsc/ast/DocComments.scala
@@ -229,31 +229,36 @@ trait DocComments { self: Global =>
* 1. It takes longer to run compared to merge
* 2. The inheritdoc annotation should not be used very often, as building the comment from pieces severely
* impacts performance
+ *
+ * @param parent The source (or parent) comment
+ * @param child The child (overriding member or usecase) comment
+ * @param sym The child symbol
+ * @return The child comment with the inheritdoc sections expanded
*/
- def expandInheritdoc(src: String, dst: String, sym: Symbol): String =
- if (dst.indexOf("@inheritdoc") == -1)
- dst
+ def expandInheritdoc(parent: String, child: String, sym: Symbol): String =
+ if (child.indexOf("@inheritdoc") == -1)
+ child
else {
- val srcSections = tagIndex(src)
- val dstSections = tagIndex(dst)
- val srcTagMap = sectionTagMap(src, srcSections)
- val srcNamedParams = Map() +
- ("@param" -> paramDocs(src, "@param", srcSections)) +
- ("@tparam" -> paramDocs(src, "@tparam", srcSections)) +
- ("@throws" -> paramDocs(src, "@throws", srcSections))
+ val parentSections = tagIndex(parent)
+ val childSections = tagIndex(child)
+ val parentTagMap = sectionTagMap(parent, parentSections)
+ val parentNamedParams = Map() +
+ ("@param" -> paramDocs(parent, "@param", parentSections)) +
+ ("@tparam" -> paramDocs(parent, "@tparam", parentSections)) +
+ ("@throws" -> paramDocs(parent, "@throws", parentSections))
val out = new StringBuilder
- def replaceInheritdoc(src: String, dst: String) =
- if (dst.indexOf("@inheritdoc") == -1)
- dst
- else
- dst.replaceAllLiterally("@inheritdoc", src)
+ def replaceInheritdoc(childSection: String, parentSection: => String) =
+ if (childSection.indexOf("@inheritdoc") == -1)
+ childSection
+ else
+ childSection.replaceAllLiterally("@inheritdoc", parentSection)
- def getSourceSection(section: (Int, Int)): String = {
+ def getParentSection(section: (Int, Int)): String = {
- def getSectionHeader = extractSectionTag(dst, section) match {
- case param@("@param"|"@tparam"|"@throws") => param + " " + extractSectionParam(dst, section)
+ def getSectionHeader = extractSectionTag(child, section) match {
+ case param@("@param"|"@tparam"|"@throws") => param + " " + extractSectionParam(child, section)
case other => other
}
@@ -261,17 +266,19 @@ trait DocComments { self: Global =>
paramMap.get(param) match {
case Some(section) =>
// Cleanup the section tag and parameter
- val sectionTextBounds = extractSectionText(src, section)
- cleanupSectionText(src.substring(sectionTextBounds._1, sectionTextBounds._2))
+ val sectionTextBounds = extractSectionText(parent, section)
+ cleanupSectionText(parent.substring(sectionTextBounds._1, sectionTextBounds._2))
case None =>
reporter.info(sym.pos, "The \"" + getSectionHeader + "\" annotation of the " + sym +
" comment contains @inheritdoc, but the corresponding section in the parent is not defined.", true)
"<invalid inheritdoc annotation>"
}
- dst.substring(section._1, section._1 + 7) match {
- case param@("@param "|"@tparam"|"@throws") => sectionString(extractSectionParam(dst, section), srcNamedParams(param.trim))
- case _ => sectionString(extractSectionTag(dst, section), srcTagMap)
+ child.substring(section._1, section._1 + 7) match {
+ case param@("@param "|"@tparam"|"@throws") =>
+ sectionString(extractSectionParam(child, section), parentNamedParams(param.trim))
+ case _ =>
+ sectionString(extractSectionTag(child, section), parentTagMap)
}
}
@@ -283,11 +290,11 @@ trait DocComments { self: Global =>
// Append main comment
out.append("/**")
- out.append(replaceInheritdoc(mainComment(src, srcSections), mainComment(dst, dstSections)))
+ out.append(replaceInheritdoc(mainComment(child, childSections), mainComment(parent, parentSections)))
// Append sections
- for (section <- dstSections)
- out.append(replaceInheritdoc(getSourceSection(section), dst.substring(section._1, section._2)))
+ for (section <- childSections)
+ out.append(replaceInheritdoc(child.substring(section._1, section._2), getParentSection(section)))
out.append("*/")
out.toString
diff --git a/src/compiler/scala/tools/nsc/ast/NodePrinters.scala b/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
index 9466d1c1f2..acbdcd501f 100644
--- a/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
+++ b/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
@@ -9,14 +9,13 @@ package ast
import compat.Platform.EOL
import symtab.Flags._
-/** The object <code>nodePrinter</code> converts the internal tree
- * representation to a string formatted as a Scala expression.
+/** The object `nodePrinter` converts the internal tree
+ * representation to a string.
*
* @author Stephane Micheloud
- * @version 1.0
+ * @author Paul Phillips
*/
abstract class NodePrinters {
-
val global: Global
import global._
@@ -25,262 +24,332 @@ abstract class NodePrinters {
}
var infolevel = InfoLevel.Quiet
- object nodeToString extends Function1[Tree, String] {
+ def nodeToString: Tree => String =
+ if (sys.props contains "scala.colors") nodeToColorizedString
+ else nodeToRegularString
+
+ object nodeToRegularString extends DefaultPrintAST with (Tree => String) {
+ def apply(tree: Tree) = stringify(tree)
+ }
+
+ object nodeToColorizedString extends ColorPrintAST with (Tree => String) {
+ def apply(tree: Tree) = stringify(tree)
+ }
+
+ trait ColorPrintAST extends DefaultPrintAST {
+ import scala.tools.util.color._
+
+ def keywordColor = Cyan
+ def typeColor = Yellow
+ def termColor = Blue
+ def flagColor = Red
+ def literalColor = Green
+
+ override def showFlags(tree: MemberDef) =
+ super.showFlags(tree) in flagColor.bright
+
+ override def showDefTreeName(tree: DefTree) =
+ if (tree.name.isTermName) tree.name.decode in termColor.bright
+ else tree.name.decode in typeColor.bright
+
+ override def showName(name: Name) =
+ if (name == nme.EMPTY || name == tpnme.EMPTY) "<empty>" in keywordColor
+ else if (name.isTermName) name.decode in termColor
+ else name.decode in typeColor
+
+ override def showLiteral(lit: Literal) =
+ super.showLiteral(lit) in literalColor.bright
+ }
+
+ trait DefaultPrintAST extends PrintAST {
+ def showDefTreeName(tree: DefTree) = showName(tree.name)
+ def showFlags(tree: MemberDef) = flagsToString(tree.symbol.flags | tree.mods.flags)
+ def showLiteral(lit: Literal) = lit.value.escapedStringValue
+ def showTypeTree(tt: TypeTree) = "<tpt>" + emptyOrComment(showType(tt))
+ def showName(name: Name) = name match {
+ case nme.EMPTY | tpnme.EMPTY => "<empty>"
+ case name => "\"" + name + "\""
+ }
+
+ def showSymbol(tree: Tree): String = {
+ val sym = tree.symbol
+ if (sym == null || sym == NoSymbol) ""
+ else sym.defString + sym.locationString
+ }
+ def showType(tree: Tree): String = {
+ val tpe = tree.tpe
+ if (tpe == null || tpe == NoType) ""
+ else "tree.tpe=" + tpe
+ }
+
+ def showAttributes(tree: Tree): String = {
+ if (infolevel == InfoLevel.Quiet) ""
+ else {
+ try { List(showSymbol(tree), showType(tree)) filterNot (_ == "") mkString ", " trim }
+ catch { case ex: Throwable => "sym= <error> " + ex.getMessage }
+ }
+ }
+ }
+
+ trait PrintAST {
private val buf = new StringBuilder
+ private var level = 0
- def apply(tree: Tree): String = {
- def traverse(tree: Tree, level: Int, comma: Boolean) {
- def println(s: String) {
- for (i <- 0 until level) buf.append(" ")
- buf.append(s)
- buf.append(EOL)
- }
- def printcln(s: String) {
- for (i <- 0 until level) buf.append(" ")
- buf.append(s)
- if (comma) buf.append(",")
- buf.append(EOL)
- }
- def annotationInfoToString(annot: AnnotationInfo): String = {
- val str = new StringBuilder
- str.append(annot.atp.toString())
- if (!annot.args.isEmpty)
- str.append(annot.args.mkString("(", ",", ")"))
- if (!annot.assocs.isEmpty)
- for (((name, value), index) <- annot.assocs.zipWithIndex) {
- if (index > 0)
- str.append(", ")
- str.append(name).append(" = ").append(value)
- }
- str.toString
- }
- def symflags(tree: Tree): String = {
- val buf = new StringBuffer
- val sym = tree.symbol
- buf append flagsToString(sym.flags)
+ def showName(name: Name): String
+ def showDefTreeName(defTree: DefTree): String
+ def showFlags(tree: MemberDef): String
+ def showLiteral(lit: Literal): String
+ def showTypeTree(tt: TypeTree): String
+ def showAttributes(tree: Tree): String // symbol and type
+
+ def showRefTreeName(tree: Tree): String = tree match {
+ case SelectFromTypeTree(qual, name) => showRefTreeName(qual) + "#" + showName(name)
+ case Select(qual, name) => showRefTreeName(qual) + "." + showName(name)
+ case Ident(name) => showName(name)
+ case _ => "" + tree
+ }
+ def showRefTree(tree: RefTree): String = {
+ def prefix0 = showRefTreeName(tree.qualifier)
+ def prefix = if (prefix0 == "") "" else (tree match {
+ case SelectFromTypeTree(_, _) => prefix0 + "#"
+ case Select(_, _) => prefix0 + "."
+ case _ => ""
+ })
+ prefix + showName(tree.name) + emptyOrComment(showAttributes(tree))
+ }
- val annots = ", annots=" + (
- if (!sym.annotations.isEmpty)
- sym.annotations.map(annotationInfoToString).mkString("[", ",", "]")
- else
- tree.asInstanceOf[MemberDef].mods.annotations)
- (if (buf.length() > 2) buf.substring(3)
- else "0") + ", // flags=" + flagsToString(sym.flags) + annots
- }
+ def emptyOrComment(s: String) = if (s == "") "" else " // " + s
- def nodeinfo(tree: Tree): String =
- if (infolevel == InfoLevel.Quiet) ""
- else {
- try {
- val buf = new StringBuilder(" // sym=" + tree.symbol)
- if (tree.hasSymbol) {
- if (tree.symbol.isPrimaryConstructor)
- buf.append(", isPrimaryConstructor")
- else if (tree.symbol.isConstructor)
- buf.append(", isConstructor")
- if (tree.symbol != NoSymbol)
- buf.append(", sym.owner=" + tree.symbol.owner)
- buf.append(", sym.tpe=" + tree.symbol.tpe)
- }
- buf.append(", tpe=" + tree.tpe)
- if (tree.tpe != null) {
- var sym = tree.tpe.termSymbol
- if (sym == NoSymbol) sym = tree.tpe.typeSymbol
- buf.append(", tpe.sym=" + sym)
- if (sym != NoSymbol) {
- buf.append(", tpe.sym.owner=" + sym.owner)
- if ((infolevel > InfoLevel.Normal) &&
- !(sym.owner eq definitions.ScalaPackageClass) &&
- !sym.isModuleClass && !sym.isPackageClass &&
- !sym.isJavaDefined) {
- val members = for (m <- tree.tpe.decls)
- yield m.toString() + ": " + m.tpe + ", "
- buf.append(", tpe.decls=" + members)
- }
- }
- }
- buf.toString
- } catch {
- case ex: Throwable =>
- return " // sym= <error> " + ex.getMessage
- }
- }
- def nodeinfo2(tree: Tree): String =
- (if (comma) "," else "") + nodeinfo(tree)
+ def stringify(tree: Tree): String = {
+ buf.clear()
+ level = 0
+ traverse(tree)
+ buf.toString
+ }
+ def traverseAny(x: Any) {
+ x match {
+ case t: Tree => traverse(t)
+ case xs: List[_] => printMultiline("List", "")(xs foreach traverseAny)
+ case _ => println("" + x)
+ }
+ }
+ def println(s: String) = printLine(s, "")
+
+ def printLine(value: String, comment: String) {
+ buf append " " * level
+ buf append value
+ if (comment != "") {
+ if (value != "")
+ buf append " "
- def applyCommon(name: String, tree: Tree, fun: Tree, args: List[Tree]) {
- println(name + "(" + nodeinfo(tree))
- traverse(fun, level + 1, true)
- if (args.isEmpty)
- println(" Nil // no argument")
- else {
- val n = args.length
- println(" List( // " + n + " arguments(s)")
- for (i <- 0 until n)
- traverse(args(i), level + 2, i < n-1)
- println(" )")
- }
- printcln(")")
+ buf append "// "
+ buf append comment
+ }
+ buf append EOL
+ }
+
+ def annotationInfoToString(annot: AnnotationInfo): String = {
+ val str = new StringBuilder
+ str.append(annot.atp.toString())
+ if (!annot.args.isEmpty)
+ str.append(annot.args.mkString("(", ",", ")"))
+ if (!annot.assocs.isEmpty)
+ for (((name, value), index) <- annot.assocs.zipWithIndex) {
+ if (index > 0)
+ str.append(", ")
+ str.append(name).append(" = ").append(value)
}
+ str.toString
+ }
+ def printModifiers(tree: MemberDef) {
+ val annots0 = tree.symbol.annotations match {
+ case Nil => tree.mods.annotations
+ case xs => xs map annotationInfoToString
+ }
+ val annots = annots0 match {
+ case Nil => ""
+ case xs => " " + xs.mkString("@{ ", ", ", " }")
+ }
+ val flagString = showFlags(tree) match {
+ case "" => "0"
+ case s => s
+ }
+ println(flagString + annots)
+ }
+
+ def applyCommon(tree: Tree, fun: Tree, args: List[Tree]) {
+ printMultiline(tree) {
+ traverse(fun)
+ traverseList("Nil", "argument")(args)
+ }
+ }
+
+ def printMultiline(tree: Tree)(body: => Unit) {
+ printMultiline(tree.printingPrefix, showAttributes(tree))(body)
+ }
+ def printMultiline(prefix: String, comment: String)(body: => Unit) {
+ printLine(prefix + "(", comment)
+ indent(body)
+ println(")")
+ }
- tree match {
- case AppliedTypeTree(tpt, args) => applyCommon("AppliedTypeTree", tree, tpt, args)
- case Apply(fun, args) => applyCommon("Apply", tree, fun, args)
- case ApplyDynamic(fun, args) => applyCommon("ApplyDynamic", tree, fun, args)
+ @inline private def indent[T](body: => T): T = {
+ level += 1
+ try body
+ finally level -= 1
+ }
- case Block(stats, expr) =>
- println("Block(" + nodeinfo(tree))
- if (stats.isEmpty)
- println(" List(), // no statement")
- else {
- val n = stats.length
- println(" List( // " + n + " statement(s)")
- for (i <- 0 until n)
- traverse(stats(i), level + 2, i < n-1)
- println(" ),")
- }
- traverse(expr, level + 1, false)
- printcln(")")
- case ClassDef(mods, name, tparams, impl) =>
- println("ClassDef(" + nodeinfo(tree))
- println(" " + symflags(tree))
- println(" \"" + name + "\",")
- if (tparams.isEmpty)
- println(" List(), // no type parameter")
- else {
- val n = tparams.length
- println(" List( // " + n + " type parameter(s)")
- for (i <- 0 until n)
- traverse(tparams(i), level + 2, i < n-1)
- println(" ),")
- }
- traverse(impl, level + 1, false)
- printcln(")")
- case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
- println("DefDef(" + nodeinfo(tree))
- println(" " + symflags(tree))
- println(" \"" + name + "\",")
- if (tparams.isEmpty)
- println(" List(), // no type parameter")
- else {
- val n = tparams.length
- println(" List( // " + n + " type parameter(s)")
- for (i <- 0 until n)
- traverse(tparams(i), level + 2, i < n-1)
- println(" ),")
+ def traverseList(ifEmpty: String, what: String)(trees: List[Tree]) {
+ if (trees.isEmpty)
+ println(ifEmpty)
+ else if (trees.tail.isEmpty)
+ traverse(trees.head)
+ else {
+ printLine("", trees.length + " " + what + "s")
+ trees foreach traverse
+ }
+ }
+
+ def printSingle(tree: Tree, name: Name) {
+ println(tree.printingPrefix + "(" + showName(name) + ")" + showAttributes(tree))
+ }
+
+ def traverse(tree: Tree) {
+ tree match {
+ case AppliedTypeTree(tpt, args) => applyCommon(tree, tpt, args)
+ case ApplyDynamic(fun, args) => applyCommon(tree, fun, args)
+ case Apply(fun, args) => applyCommon(tree, fun, args)
+
+ case Throw(Ident(name)) =>
+ printSingle(tree, name)
+
+ case Function(vparams, body) =>
+ printMultiline(tree) {
+ traverseList("()", "parameter")(vparams)
+ traverse(body)
+ }
+ case Try(block, catches, finalizer) =>
+ printMultiline(tree) {
+ traverse(block)
+ traverseList("{}", "case")(catches)
+ if (finalizer ne EmptyTree)
+ traverse(finalizer)
+ }
+
+ case Match(selector, cases) =>
+ printMultiline(tree) {
+ traverse(selector)
+ traverseList("", "case")(cases)
+ }
+ case CaseDef(pat, guard, body) =>
+ printMultiline(tree) {
+ traverse(pat)
+ if (guard ne EmptyTree)
+ traverse(guard)
+ traverse(body)
+ }
+ case Block(stats, expr) =>
+ printMultiline(tree) {
+ traverseList("{}", "statement")(stats)
+ traverse(expr)
+ }
+ case cd @ ClassDef(mods, name, tparams, impl) =>
+ printMultiline(tree) {
+ printModifiers(cd)
+ println(showDefTreeName(cd))
+ traverseList("[]", "type parameter")(tparams)
+ traverse(impl)
+ }
+ case md @ ModuleDef(mods, name, impl) =>
+ printMultiline(tree) {
+ printModifiers(md)
+ println(showDefTreeName(md))
+ traverse(impl)
+ }
+ case dd @ DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
+ printMultiline(tree) {
+ printModifiers(dd)
+ println(showDefTreeName(dd))
+ traverseList("[]", "type parameter")(tparams)
+ vparamss match {
+ case Nil => println("Nil")
+ case Nil :: Nil => println("List(Nil)")
+ case ps :: Nil =>
+ printLine("", "1 parameter list")
+ ps foreach traverse
+ case pss =>
+ printLine("", pss.length + " parameter lists")
+ pss foreach (ps => traverseList("()", "parameter")(ps))
}
- val n = vparamss.length
- if (n == 1 && vparamss(0).isEmpty)
- println(" List(List()), // no parameter")
- else {
- println(" List(")
- for (i <- 0 until n) {
- val m = vparamss(i).length
- println(" List( // " + m + " parameter(s)")
- for (j <- 0 until m)
- traverse(vparamss(i)(j), level + 3, j < m-1)
- println(" )")
+ traverse(tpt)
+ traverse(rhs)
+ }
+ case EmptyTree =>
+ println(showName(nme.EMPTY))
+ case lit @ Literal(value) =>
+ println(showLiteral(lit))
+ case New(tpt) =>
+ printMultiline(tree)(traverse(tpt))
+ case Super(This(qual), mix) =>
+ println("Super(This(" + showName(qual) + "), " + showName(mix) + ")")
+ case Super(qual, mix) =>
+ printMultiline(tree) {
+ traverse(qual)
+ showName(mix)
+ }
+ case Template(parents, self, body) =>
+ printMultiline(tree) {
+ val ps0 = parents map { p =>
+ if (p.tpe eq null) p match {
+ case x: RefTree => showRefTree(x)
+ case x => "" + x
}
- println(" ),")
- }
- println(" " + tpt + ",")
- traverse(rhs, level + 1, false)
- printcln(")")
- case EmptyTree =>
- printcln("EmptyTree")
- case Ident(name) =>
- printcln("Ident(\"" + name + "\")" + nodeinfo2(tree))
- case Literal(value) =>
- printcln("Literal(" + value + ")")
- case New(tpt) =>
- println("New(" + nodeinfo(tree))
- traverse(tpt, level + 1, false)
- printcln(")")
- case Select(qualifier, selector) =>
- println("Select(" + nodeinfo(tree))
- traverse(qualifier, level + 1, true)
- printcln(" \"" + selector + "\")")
- case Super(qual, mix) =>
- println("Super(\"" + mix + "\")" + nodeinfo(tree))
- traverse(qual, level + 1, true)
- case Template(parents, self, body) =>
- println("Template(" + nodeinfo(tree))
- println(" " + parents.map(p =>
- if (p.tpe ne null) p.tpe.typeSymbol else "null-" + p
- ) + ", // parents")
- traverse(self, level + 1, true)
- if (body.isEmpty)
- println(" List() // no body")
- else {
- val n = body.length
- println(" List( // body")
- for (i <- 0 until n)
- traverse(body(i), level + 2, i < n-1)
- println(" )")
- }
- printcln(")")
- case This(qual) =>
- println("This(\"" + qual + "\")" + nodeinfo2(tree))
- case TypeApply(fun, args) =>
- println("TypeApply(" + nodeinfo(tree))
- traverse(fun, level + 1, true)
- if (args.isEmpty)
- println(" List() // no argument")
- else {
- val n = args.length
- println(" List(")
- for (i <- 0 until n)
- traverse(args(i), level + 1, i < n-1)
- println(" )")
+ else showName(newTypeName(p.tpe.typeSymbol.fullName))
}
- printcln(")")
- case TypeTree() =>
- printcln("TypeTree()" + nodeinfo2(tree))
- case Typed(expr, tpt) =>
- println("Typed(" + nodeinfo(tree))
- traverse(expr, level + 1, true)
- traverse(tpt, level + 1, false)
- printcln(")")
- case ValDef(mods, name, tpt, rhs) =>
- println("ValDef(" + nodeinfo(tree))
- println(" " + symflags(tree))
- println(" \"" + name + "\",")
- traverse(tpt, level + 1, true)
- traverse(rhs, level + 1, false)
- printcln(")")
- case PackageDef(pid, stats) =>
- println("PackageDef(")
- traverse(pid, level + 1, false)
- println(",\n")
- for (stat <- stats)
- traverse(stat, level + 1, false)
- printcln(")")
- case _ =>
- tree match {
- case p: Product =>
- if (p.productArity != 0) {
- println(p.productPrefix+"(")
- for (elem <- (0 until p.productArity) map p.productElement) {
- def printElem(elem: Any, level: Int): Unit = elem match {
- case t: Tree =>
- traverse(t, level, false)
- case xs: List[_] =>
- print("List(")
- for (x <- xs) printElem(x, level+1)
- printcln(")")
- case _ =>
- println(elem.toString)
- }
- printElem(elem, level+1)
- }
- printcln(")")
- } else printcln(p.productPrefix)
- }
- }
+ printLine(ps0 mkString ", ", "parents")
+ traverse(self)
+ traverseList("{}", "statement")(body)
+ }
+ case This(qual) =>
+ printSingle(tree, qual)
+ case TypeApply(fun, args) =>
+ printMultiline(tree) {
+ traverse(fun)
+ traverseList("[]", "type argument")(args)
+ }
+ case tt @ TypeTree() =>
+ println(showTypeTree(tt))
+
+ case Typed(expr, tpt) =>
+ printMultiline(tree) {
+ traverse(expr)
+ traverse(tpt)
+ }
+ case vd @ ValDef(mods, name, tpt, rhs) =>
+ printMultiline(tree) {
+ printModifiers(vd)
+ println(showDefTreeName(vd))
+ traverse(tpt)
+ traverse(rhs)
+ }
+ case td @ TypeDef(mods, name, tparams, rhs) =>
+ printMultiline(tree) {
+ printModifiers(td)
+ println(showDefTreeName(td))
+ traverseList("[]", "type parameter")(tparams)
+ traverse(rhs)
+ }
+
+ case PackageDef(pid, stats) =>
+ printMultiline("PackageDef", "")(pid :: stats foreach traverse)
+
+ case _ =>
+ tree match {
+ case t: RefTree => println(showRefTree(t))
+ case t if t.productArity == 0 => println(tree.printingPrefix)
+ case t => printMultiline(tree)(tree.productIterator foreach traverseAny)
+ }
}
- buf setLength 0
- traverse(tree, 0, false)
- buf.toString
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
index c1d6c1a4d4..3302c11127 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
@@ -352,144 +352,17 @@ abstract class TreeBrowsers {
* Tree.
*/
object TreeInfo {
-
/** Return the case class name and the Name, if the node defines one */
- def treeName(t: Tree): (String, Name) = t match {
- case ProgramTree(units) =>
- ("Program", EMPTY)
-
- case UnitTree(unit) =>
- ("CompilationUnit", newTermName("" + unit))
-
- case DocDef(comment, definition) =>
- ("DocDef", EMPTY)
-
- case ClassDef(mods, name, tparams, impl) =>
- ("ClassDef", name)
-
- case PackageDef(packaged, impl) =>
- ("PackageDef", EMPTY)
-
- case ModuleDef(mods, name, impl) =>
- ("ModuleDef", name)
-
- case ValDef(mods, name, tpe, rhs) =>
- ("ValDef", name)
-
- case DefDef(mods, name, tparams, vparams, tpe, rhs) =>
- ("DefDef", name)
-
- case TypeDef(mods, name, tparams, rhs) =>
- ("TypeDef", name)
-
- case Import(expr, selectors) =>
- ("Import", EMPTY)
-
- case CaseDef(pat, guard, body) =>
- ("CaseDef", EMPTY)
-
- case Template(parents, self, body) =>
- ("Template", EMPTY)
-
- case LabelDef(name, params, rhs) =>
- ("LabelDef", name)
-
- case Block(stats, expr) =>
- ("Block", EMPTY)
-
- case Alternative(trees) =>
- ("Alternative", EMPTY)
-
- case Bind(name, rhs) =>
- ("Bind", name)
-
- case UnApply(fun, args) =>
- ("UnApply", EMPTY)
-
- case Match(selector, cases) =>
- ("Visitor", EMPTY)
-
- case Function(vparams, body) =>
- ("Function", EMPTY)
-
- case Assign(lhs, rhs) =>
- ("Assign", EMPTY)
-
- case If(cond, thenp, elsep) =>
- ("If", EMPTY)
-
- case Return(expr) =>
- ("Return", EMPTY)
-
- case Throw(expr) =>
- ("Throw", EMPTY)
-
- case New(init) =>
- ("New", EMPTY)
-
- case Typed(expr, tpe) =>
- ("Typed", EMPTY)
-
- case TypeApply(fun, args) =>
- ("TypeApply", EMPTY)
-
- case Apply(fun, args) =>
- ("Apply", EMPTY)
-
- case ApplyDynamic(qual, args) =>
- ("Apply", EMPTY)
-
- case Super(qualif, mix) =>
- ("Super", newTermName("mix: " + mix))
-
- case This(qualifier) =>
- ("This", qualifier)
-
- case Select(qualifier, selector) =>
- ("Select", selector)
-
- case Ident(name) =>
- ("Ident", name)
-
- case Literal(value) =>
- ("Literal", EMPTY)
-
- case TypeTree() =>
- ("TypeTree", EMPTY)
-
- case Annotated(annot, arg) =>
- ("Annotated", EMPTY)
-
- case SingletonTypeTree(ref) =>
- ("SingletonType", EMPTY)
-
- case SelectFromTypeTree(qualifier, selector) =>
- ("SelectFromType", selector)
-
- case CompoundTypeTree(template) =>
- ("CompoundType", EMPTY)
-
- case AppliedTypeTree(tpe, args) =>
- ("AppliedType", EMPTY)
-
- case TypeBoundsTree(lo, hi) =>
- ("TypeBoundsTree", EMPTY)
-
- case ExistentialTypeTree(tpt, whereClauses) =>
- ("ExistentialTypeTree", EMPTY)
-
- case Try(block, catcher, finalizer) =>
- ("Try", EMPTY)
-
- case EmptyTree =>
- ("Empty", EMPTY)
-
- case ArrayValue(elemtpt, trees) =>
- ("ArrayValue", EMPTY)
-
- case Star(t) =>
- ("Star", EMPTY)
- }
+ def treeName(t: Tree): (String, Name) = ((t.printingPrefix, t match {
+ case UnitTree(unit) => newTermName("" + unit)
+ case Super(_, mix) => newTermName("mix: " + mix)
+ case This(qual) => qual
+ case Select(_, selector) => selector
+ case Ident(name) => name
+ case SelectFromTypeTree(_, selector) => selector
+ case x: DefTree => x.name
+ case _ => EMPTY
+ }))
/** Return a list of children for the given tree node */
def children(t: Tree): List[Tree] = t match {
diff --git a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
index 0d19b781e2..8703de4e18 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
@@ -44,6 +44,11 @@ trait TreeDSL {
def NULL = LIT(null)
def UNIT = LIT(())
+ // for those preferring boring, predictable lives, without the thrills of tree-sharing
+ // (but with the perk of typed trees)
+ def TRUE_typed = LIT(true) setType ConstantType(Constant(true))
+ def FALSE_typed = LIT(false) setType ConstantType(Constant(false))
+
object WILD {
def empty = Ident(nme.WILDCARD)
def apply(tpe: Type) = Ident(nme.WILDCARD) setType tpe
diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
index 6d95b6ffdd..ad26ccad5e 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
@@ -65,6 +65,12 @@ abstract class TreeGen extends reflect.internal.TreeGen with TreeDSL {
case _ => tree
}
+ def mkSynthSwitchSelector(expr: Tree): Tree = atPos(expr.pos) {
+ // This can't be "Annotated(New(SwitchClass), expr)" because annotations
+ // are very picky about things and it crashes the compiler with "unexpected new".
+ Annotated(Ident(nme.synthSwitch), expr)
+ }
+
// must be kept in synch with the codegen in PatMatVirtualiser
object VirtualCaseDef {
def unapply(b: Block): Option[(Assign, Tree, Tree)] = b match {
@@ -73,6 +79,8 @@ abstract class TreeGen extends reflect.internal.TreeGen with TreeDSL {
}
}
+ def hasSynthCaseSymbol(t: Tree) = (t.symbol ne null) && (t.symbol hasFlag (CASE | SYNTHETIC))
+
// TODO: would be so much nicer if we would know during match-translation (i.e., type checking)
// whether we should emit missingCase-style apply (and isDefinedAt), instead of transforming trees post-factum
class MatchMatcher {
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index ccebcfa54d..45325b4694 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -1394,13 +1394,7 @@ self =>
}
}
} else if (in.token == MATCH) {
- t = atPos(t.pos.startOrPoint, in.skipToken()) {
- /** For debugging pattern matcher transition issues */
- if (settings.Ypmatnaive.value)
- makeSequencedMatch(stripParens(t), inBracesOrNil(caseClauses()))
- else
- Match(stripParens(t), inBracesOrNil(caseClauses()))
- }
+ t = atPos(t.pos.startOrPoint, in.skipToken())(Match(stripParens(t), inBracesOrNil(caseClauses())))
}
// in order to allow anonymous functions as statements (as opposed to expressions) inside
// templates, we have to disambiguate them from self type declarations - bug #1565
diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
index 0d2fbc5372..3a6e26d3b5 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
@@ -262,29 +262,25 @@ abstract class TreeBuilder {
else if (stats.length == 1) stats.head
else Block(stats.init, stats.last)
+ def makeFilter(tree: Tree, condition: Tree, scrutineeName: String): Tree = {
+ val cases = List(
+ CaseDef(condition, EmptyTree, Literal(Constant(true))),
+ CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(Constant(false)))
+ )
+ val matchTree = makeVisitor(cases, false, scrutineeName)
+
+ atPos(tree.pos)(Apply(Select(tree, nme.withFilter), matchTree :: Nil))
+ }
+
/** Create tree for for-comprehension generator <val pat0 <- rhs0> */
def makeGenerator(pos: Position, pat: Tree, valeq: Boolean, rhs: Tree): Enumerator = {
val pat1 = patvarTransformer.transform(pat)
val rhs1 =
- if (valeq) rhs
- else matchVarPattern(pat1) match {
- case Some(_) =>
- rhs
- case None =>
- atPos(rhs.pos) {
- Apply(
- Select(rhs, nme.filter),
- List(
- makeVisitor(
- List(
- CaseDef(pat1.duplicate, EmptyTree, Literal(Constant(true))),
- CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(Constant(false)))),
- false,
- nme.CHECK_IF_REFUTABLE_STRING
- )))
- }
- }
- if (valeq) ValEq(pos, pat1, rhs1) else ValFrom(pos, pat1, rhs1)
+ if (valeq || treeInfo.isVariablePattern(pat)) rhs
+ else makeFilter(rhs, pat1.duplicate, nme.CHECK_IF_REFUTABLE_STRING)
+
+ if (valeq) ValEq(pos, pat1, rhs1)
+ else ValFrom(pos, pat1, rhs1)
}
def makeParam(pname: TermName, tpe: Tree) =
@@ -509,37 +505,6 @@ abstract class TreeBuilder {
def makePatDef(pat: Tree, rhs: Tree): List[Tree] =
makePatDef(Modifiers(0), pat, rhs)
- /** For debugging only. Desugar a match statement like so:
- * val x = scrutinee
- * x match {
- * case case1 => ...
- * case _ => x match {
- * case case2 => ...
- * case _ => x match ...
- * }
- * }
- *
- * This way there are never transitions between nontrivial casedefs.
- * Of course many things break: exhaustiveness and unreachable checking
- * do not work, no switches will be generated, etc.
- */
- def makeSequencedMatch(selector: Tree, cases: List[CaseDef]): Tree = {
- require(cases.nonEmpty)
-
- val selectorName = freshTermName()
- val valdef = atPos(selector.pos)(ValDef(Modifiers(PrivateLocal | SYNTHETIC), selectorName, TypeTree(), selector))
- val nselector = Ident(selectorName)
-
- def loop(cds: List[CaseDef]): Match = {
- def mkNext = CaseDef(Ident(nme.WILDCARD), EmptyTree, loop(cds.tail))
-
- if (cds.size == 1) Match(nselector, cds)
- else Match(selector, List(cds.head, mkNext))
- }
-
- Block(List(valdef), loop(cases))
- }
-
/** Create tree for pattern definition <mods val pat0 = rhs> */
def makePatDef(mods: Modifiers, pat: Tree, rhs: Tree): List[Tree] = matchVarPattern(pat) match {
case Some((name, tpt)) =>
diff --git a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
index 68c4ac03f6..4f3b0bf951 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
@@ -386,10 +386,16 @@ trait BasicBlocks {
def close() {
assert(!closed || ignore, this)
assert(instructionList.nonEmpty, "Empty block: " + this)
- closed = true
- setFlag(DIRTYSUCCS)
- instructionList = instructionList.reverse
- instrs = instructionList.toArray
+ if (ignore && closed) { // redundant `ignore &&` for clarity -- we should never be in state `!ignore && closed`
+ // not doing anything to this block is important...
+ // because the else branch reverses innocent blocks, which is wrong when they're in ignore mode (and closed)
+ // reversing the instructions when (closed && ignore) wreaks havoc for nested label jumps (see comments in genLoad)
+ } else {
+ closed = true
+ setFlag(DIRTYSUCCS)
+ instructionList = instructionList.reverse
+ instrs = instructionList.toArray
+ }
}
def open() {
diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
index 9e801e3ea8..8e568eca79 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
@@ -868,6 +868,13 @@ abstract class GenICode extends SubComponent {
abort("Unknown label target: " + sym + " at: " + (fun.pos) + ": ctx: " + ctx)
}
})
+ // note: when one of the args to genLoadLabelArguments is a jump to a label,
+ // it will call back into genLoad and arrive at this case, which will then set ctx1.bb.ignore to true,
+ // this is okay, since we're jumping unconditionally, so the loads and jumps emitted by the outer
+ // call to genLoad (by calling genLoadLabelArguments and emitOnly) can safely be ignored,
+ // however, as emitOnly will close the block, which reverses its instructions (when it's still open),
+ // we better not reverse when the block has already been closed but is in ignore mode
+ // (if it's not in ignore mode, double-closing is an error)
val ctx1 = genLoadLabelArguments(args, label, ctx)
ctx1.bb.emitOnly(if (label.anchored) JUMP(label.block) else PJUMP(label))
ctx1.bb.enterIgnoreMode
@@ -937,11 +944,10 @@ abstract class GenICode extends SubComponent {
"Trying to access the this of another class: " +
"tree.symbol = " + tree.symbol + ", ctx.clazz.symbol = " + ctx.clazz.symbol + " compilation unit:"+unit)
if (tree.symbol.isModuleClass && tree.symbol != ctx.clazz.symbol) {
- debuglog("LOAD_MODULE from 'This': " + tree.symbol);
- assert(!tree.symbol.isPackageClass, "Cannot use package as value: " + tree)
- genLoadModule(ctx, tree.symbol, tree.pos)
+ genLoadModule(ctx, tree)
generatedType = REFERENCE(tree.symbol)
- } else {
+ }
+ else {
ctx.bb.emit(THIS(ctx.clazz.symbol), tree.pos)
generatedType = REFERENCE(
if (tree.symbol == ArrayClass) ObjectClass else ctx.clazz.symbol
@@ -954,11 +960,7 @@ abstract class GenICode extends SubComponent {
"Selection of non-module from empty package: " + tree +
" sym: " + tree.symbol + " at: " + (tree.pos)
)
- debuglog("LOAD_MODULE from Select(<emptypackage>): " + tree.symbol)
-
- assert(!tree.symbol.isPackageClass, "Cannot use package as value: " + tree)
- genLoadModule(ctx, tree.symbol, tree.pos)
- ctx
+ genLoadModule(ctx, tree)
case Select(qualifier, selector) =>
val sym = tree.symbol
@@ -966,14 +968,13 @@ abstract class GenICode extends SubComponent {
val hostClass = qualifier.tpe.typeSymbol.orElse(sym.owner)
if (sym.isModule) {
- debuglog("LOAD_MODULE from Select(qualifier, selector): " + sym)
- assert(!tree.symbol.isPackageClass, "Cannot use package as value: " + tree)
- genLoadModule(ctx, sym, tree.pos)
- ctx
- } else if (sym.isStaticMember) {
+ genLoadModule(ctx, tree)
+ }
+ else if (sym.isStaticMember) {
ctx.bb.emit(LOAD_FIELD(sym, true) setHostClass hostClass, tree.pos)
ctx
- } else {
+ }
+ else {
val ctx1 = genLoadQualifier(tree, ctx)
ctx1.bb.emit(LOAD_FIELD(sym, false) setHostClass hostClass, tree.pos)
ctx1
@@ -983,11 +984,10 @@ abstract class GenICode extends SubComponent {
val sym = tree.symbol
if (!sym.isPackage) {
if (sym.isModule) {
- debuglog("LOAD_MODULE from Ident(name): " + sym)
- assert(!sym.isPackageClass, "Cannot use package as value: " + tree)
- genLoadModule(ctx, sym, tree.pos)
+ genLoadModule(ctx, tree)
generatedType = toTypeKind(sym.info)
- } else {
+ }
+ else {
try {
val Some(l) = ctx.method.lookupLocal(sym)
ctx.bb.emit(LOAD_LOCAL(l), tree.pos)
@@ -1200,8 +1200,19 @@ abstract class GenICode extends SubComponent {
genLoad(arg, res, toTypeKind(tpe))
}
- private def genLoadModule(ctx: Context, sym: Symbol, pos: Position) {
- ctx.bb.emit(LOAD_MODULE(sym), pos)
+ private def genLoadModule(ctx: Context, tree: Tree): Context = {
+ // Working around SI-5604. Rather than failing the compile when we see
+ // a package here, check if there's a package object.
+ val sym = (
+ if (!tree.symbol.isPackageClass) tree.symbol
+ else tree.symbol.info.member(nme.PACKAGE) match {
+ case NoSymbol => assert(false, "Cannot use package as value: " + tree) ; NoSymbol
+ case s => Console.err.println("Bug: found package class where package object expected. Converting.") ; s.moduleClass
+ }
+ )
+ debuglog("LOAD_MODULE from %s: %s".format(tree.shortClass, sym))
+ ctx.bb.emit(LOAD_MODULE(sym), tree.pos)
+ ctx
}
def genConversion(from: TypeKind, to: TypeKind, ctx: Context, cast: Boolean) = {
@@ -1560,9 +1571,10 @@ abstract class GenICode extends SubComponent {
val ctx1 = genLoad(l, ctx, ObjectReference)
val ctx2 = genLoad(r, ctx1, ObjectReference)
- ctx2.bb.emit(CALL_METHOD(equalsMethod, if (settings.optimise.value) Dynamic else Static(false)))
- ctx2.bb.emit(CZJUMP(thenCtx.bb, elseCtx.bb, NE, BOOL))
- ctx2.bb.close
+ ctx2.bb.emitOnly(
+ CALL_METHOD(equalsMethod, if (settings.optimise.value) Dynamic else Static(false)),
+ CZJUMP(thenCtx.bb, elseCtx.bb, NE, BOOL)
+ )
}
else {
if (isNull(l))
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
index 877c51ebc1..d31eafff48 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
@@ -645,7 +645,7 @@ abstract class TypeFlowAnalysis {
} while(!done)
lastInstruction.clear()
- for(b <- isOnPerimeter; val lastIns = b.toList.reverse find isOnWatchlist) {
+ for (b <- isOnPerimeter; lastIns = b.toList.reverse find isOnWatchlist) {
lastInstruction += (b -> lastIns.get.asInstanceOf[opcodes.CALL_METHOD])
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
index 4c77cb7082..8e326c202a 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
@@ -739,7 +739,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
clasz.cunit.warning(sym.pos,
"""|compiler bug: created invalid generic signature for %s in %s
|signature: %s
- |if this is reproducible, please report bug at http://lampsvn.epfl.ch/trac/scala
+ |if this is reproducible, please report bug at https://issues.scala-lang.org/
""".trim.stripMargin.format(sym, sym.owner.skipPackageObject.fullName, sig))
return
}
@@ -753,7 +753,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
|original type: %s
|normalized type: %s
|erasure type: %s
- |if this is reproducible, please report bug at http://lampsvn.epfl.ch/trac/scala
+ |if this is reproducible, please report bug at https://issues.scala-lang.org/
""".trim.stripMargin.format(sym, sym.owner.skipPackageObject.fullName, sig, memberTpe, normalizedTpe, bytecodeTpe))
return
}
diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
index a734b2b92b..dfe9081ee5 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
@@ -262,7 +262,7 @@ abstract class Inliners extends SubComponent {
def inlineWithoutTFA(inputBlocks: Traversable[BasicBlock], callsites: Function1[BasicBlock, List[opcodes.CALL_METHOD]]): Int = {
var inlineCount = 0
import scala.util.control.Breaks._
- for(x <- inputBlocks; val easyCake = callsites(x); if easyCake.nonEmpty) {
+ for(x <- inputBlocks; easyCake = callsites(x); if easyCake.nonEmpty) {
breakable {
for(ocm <- easyCake) {
assert(ocm.method.isEffectivelyFinal && ocm.method.owner.isEffectivelyFinal)
diff --git a/src/compiler/scala/tools/nsc/doc/DocFactory.scala b/src/compiler/scala/tools/nsc/doc/DocFactory.scala
index 9a025b0d14..f32564f097 100644
--- a/src/compiler/scala/tools/nsc/doc/DocFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/DocFactory.scala
@@ -8,7 +8,7 @@ package doc
import scala.util.control.ControlThrowable
import reporters.Reporter
-import util.NoPosition
+import util.{ NoPosition, BatchSourceFile}
import io.{ File, Directory }
import DocParser.Parsed
@@ -46,13 +46,19 @@ class DocFactory(val reporter: Reporter, val settings: doc.Settings) { processor
override def forScaladoc = true
}
- /** Creates a scaladoc site for all symbols defined in this call's `files`,
- * as well as those defined in `files` of previous calls to the same processor.
+ /** Creates a scaladoc site for all symbols defined in this call's `source`,
+ * as well as those defined in `sources` of previous calls to the same processor.
* @param files The list of paths (relative to the compiler's source path,
* or absolute) of files to document. */
- def makeUniverse(files: List[String]): Option[Universe] = {
+ def makeUniverse(source: Either[List[String], String]): Option[Universe] = {
assert(settings.docformat.value == "html")
- new compiler.Run() compile files
+ source match {
+ case Left(files) =>
+ new compiler.Run() compile files
+ case Right(sourceCode) =>
+ new compiler.Run() compileSources List(new BatchSourceFile("newSource", sourceCode))
+ }
+
if (reporter.hasErrors)
return None
@@ -111,7 +117,7 @@ class DocFactory(val reporter: Reporter, val settings: doc.Settings) { processor
docletInstance match {
case universer: Universer =>
- val universe = makeUniverse(files) getOrElse { throw NoCompilerRunException }
+ val universe = makeUniverse(Left(files)) getOrElse { throw NoCompilerRunException }
universer setUniverse universe
docletInstance match {
diff --git a/src/compiler/scala/tools/nsc/doc/Settings.scala b/src/compiler/scala/tools/nsc/doc/Settings.scala
index 6c74951156..45a2ad78b4 100644
--- a/src/compiler/scala/tools/nsc/doc/Settings.scala
+++ b/src/compiler/scala/tools/nsc/doc/Settings.scala
@@ -97,4 +97,6 @@ class Settings(error: String => Unit) extends scala.tools.nsc.Settings(error) {
docformat, doctitle, docfooter, docversion, docUncompilable, docsourceurl, docgenerator
)
val isScaladocSpecific: String => Boolean = scaladocSpecific map (_.name)
+
+ override def isScaladoc = true
}
diff --git a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala b/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
index 4f05678d85..0116e02e0e 100644
--- a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
@@ -119,7 +119,7 @@ class HtmlFactory(val universe: doc.Universe, index: doc.Index) {
def writeTemplate(tpl: DocTemplateEntity) {
if (!(written contains tpl)) {
- writeForThis(new page.Template(tpl))
+ writeForThis(new page.Template(universe, tpl))
written += tpl
tpl.templates map writeTemplate
}
diff --git a/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala b/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala
index b58c71eaa9..1544dafc69 100644
--- a/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala
@@ -22,6 +22,16 @@ abstract class HtmlPage extends Page { thisPage =>
/** The title of this page. */
protected def title: String
+ /** The page description */
+ protected def description: String =
+ // unless overwritten, will display the title in a spaced format, keeping - and .
+ title.replaceAll("[^a-zA-Z0-9\\.\\-]+", " ").replaceAll("\\-+", " - ").replaceAll(" +", " ")
+
+ /** The page keywords */
+ protected def keywords: String =
+ // unless overwritten, same as description, minus the " - "
+ description.replaceAll(" - ", " ")
+
/** Additional header elements (links, scripts, meta tags, etc.) required for this page. */
protected def headers: NodeSeq
@@ -35,6 +45,8 @@ abstract class HtmlPage extends Page { thisPage =>
<html>
<head>
<title>{ title }</title>
+ <meta name="description" content={ description }/>
+ <meta name="keywords" content={ keywords }/>
<meta http-equiv="content-type" content={ "text/html; charset=" + site.encoding }/>
{ headers }
</head>
diff --git a/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala b/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala
index f19f449d2c..f67abc58da 100644
--- a/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala
@@ -219,24 +219,24 @@ private[html] object SyntaxHigh {
parse(" ", i+1)
case '&' =>
parse("&amp;", i+1)
- case '<' =>
+ case '<' if i+1 < buf.length =>
val ch = buf(i+1).toChar
if (ch == '-' || ch == ':' || ch == '%')
parse("<span class=\"kw\">&lt;"+ch+"</span>", i+2)
else
parse("&lt;", i+1)
case '>' =>
- if (buf(i+1) == ':')
+ if (i+1 < buf.length && buf(i+1) == ':')
parse("<span class=\"kw\">&gt;:</span>", i+2)
else
parse("&gt;", i+1)
case '=' =>
- if (buf(i+1) == '>')
+ if (i+1 < buf.length && buf(i+1) == '>')
parse("<span class=\"kw\">=&gt;</span>", i+2)
else
parse(buf(i).toChar.toString, i+1)
case '/' =>
- if (buf(i+1) == '/' || buf(i+1) == '*') {
+ if (i+1 < buf.length && (buf(i+1) == '/' || buf(i+1) == '*')) {
val c = comment(i+1)
parse("<span class=\"cmt\">"+c+"</span>", i+c.length)
} else
@@ -257,9 +257,9 @@ private[html] object SyntaxHigh {
else
parse(buf(i).toChar.toString, i+1)
case _ =>
- if (i == 0 || !Character.isJavaIdentifierPart(buf(i-1).toChar)) {
+ if (i == 0 || (i >= 1 && !Character.isJavaIdentifierPart(buf(i-1).toChar))) {
if (Character.isDigit(buf(i)) ||
- (buf(i) == '.' && Character.isDigit(buf(i+1)))) {
+ (buf(i) == '.' && i + 1 < buf.length && Character.isDigit(buf(i+1)))) {
val s = numlit(i)
parse("<span class=\"num\">"+s+"</span>", i+s.length)
} else {
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala b/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
index e35286b281..f059b5c0cb 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
@@ -11,13 +11,19 @@ package page
import model._
import scala.xml.{ NodeSeq, Text, UnprefixedAttribute }
-class Template(tpl: DocTemplateEntity) extends HtmlPage {
+class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage {
val path =
templateToPath(tpl)
- val title =
- tpl.qualifiedName
+ def title = {
+ val s = universe.settings
+
+ tpl.name +
+ ( if (!s.doctitle.isDefault) " - " + s.doctitle.value else "" ) +
+ ( if (!s.docversion.isDefault) (" " + s.docversion.value) else "" ) +
+ " - " + tpl.qualifiedName
+ }
val headers =
<xml:group>
@@ -29,7 +35,7 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
</xml:group>
val valueMembers =
- tpl.methods.filterNot(_.isBridge) ++ tpl.values ++ tpl.templates.filter(x => x.isObject || x.isPackage) sorted
+ tpl.methods ++ tpl.values ++ tpl.templates.filter(x => x.isObject || x.isPackage) sorted
val (absValueMembers, nonAbsValueMembers) =
valueMembers partition (_.isAbstract)
@@ -339,6 +345,17 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
}
}
+ val fullSignature: Seq[scala.xml.Node] = {
+ mbr match {
+ case nte: NonTemplateMemberEntity if nte.isUseCase =>
+ <div class="full-signature-block toggleContainer">
+ <span class="toggle">Full Signature</span>
+ <div class="hiddenContent full-signature-usecase">{ signature(nte.useCaseOf.get,true) }</div>
+ </div>
+ case _ => NodeSeq.Empty
+ }
+ }
+
val selfType: Seq[scala.xml.Node] = mbr match {
case dtpl: DocTemplateEntity if (isSelf && !dtpl.selfType.isEmpty && !isReduced) =>
<dt>Self Type</dt>
@@ -372,7 +389,7 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
case dtpl: DocTemplateEntity if (isSelf && dtpl.sourceUrl.isDefined && dtpl.inSource.isDefined && !isReduced) =>
val (absFile, line) = dtpl.inSource.get
<dt>Source</dt>
- <dd>{ <a href={ dtpl.sourceUrl.get.toString }>{ Text(absFile.file.getName) }</a> }</dd>
+ <dd>{ <a href={ dtpl.sourceUrl.get.toString } target="_blank">{ Text(absFile.file.getName) }</a> }</dd>
case _ => NodeSeq.Empty
}
@@ -460,7 +477,7 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
}
// end attributes block vals ---
- val attributesInfo = attributes ++ definitionClasses ++ selfType ++ annotations ++ deprecation ++ migration ++ sourceLink ++ mainComment
+ val attributesInfo = attributes ++ definitionClasses ++ fullSignature ++ selfType ++ annotations ++ deprecation ++ migration ++ sourceLink ++ mainComment
val attributesBlock =
if (attributesInfo.isEmpty)
NodeSeq.Empty
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css
index 4f552b7169..6fb83c133e 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css
@@ -340,6 +340,30 @@ div.members > ol > li:last-child {
color: darkgreen;
}
+.full-signature-usecase h4 span {
+ font-size: 10pt;
+}
+
+.full-signature-usecase > #signature {
+ padding-top: 0px;
+}
+
+#template .full-signature-usecase > .signature.closed {
+ background: none;
+}
+
+#template .full-signature-usecase > .signature.opened {
+ background: none;
+}
+
+.full-signature-block {
+ padding: 5px 0 0;
+ border-top: 1px solid #EBEBEB;
+ margin-top: 5px;
+ margin-bottom: 5px;
+}
+
+
/* Comments text formating */
.cmt {}
diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
index 496d004fd8..dd1c75c322 100644
--- a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
@@ -247,7 +247,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
protected lazy val memberSyms =
// Only this class's constructors are part of its members, inherited constructors are not.
- sym.info.members.filter(s => localShouldDocument(s) && (!s.isConstructor || s.owner == sym))
+ sym.info.members.filter(s => localShouldDocument(s) && (!s.isConstructor || s.owner == sym) && !isPureBridge(sym) )
val members = memberSyms flatMap (makeMember(_, this))
val templates = members collect { case c: DocTemplateEntity => c }
@@ -705,4 +705,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
def localShouldDocument(aSym: Symbol): Boolean = {
!aSym.isPrivate && (aSym.isProtected || aSym.privateWithin == NoSymbol) && !aSym.isSynthetic
}
+
+ /** Filter '@bridge' methods only if *they don't override non-bridge methods*. See SI-5373 for details */
+ def isPureBridge(sym: Symbol) = sym.isBridge && sym.allOverriddenSymbols.forall(_.isBridge)
}
diff --git a/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala b/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala
index b088c643cb..bc5cd4a958 100644
--- a/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala
@@ -491,7 +491,7 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
else
jump("}}}")
blockEnded("code block")
- Code(getRead)
+ Code(normalizeIndentation(getRead))
}
/** {{{ title ::= ('=' inline '=' | "==" inline "==" | ...) '\n' }}} */
@@ -732,6 +732,64 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
nextChar()
}
+ /**
+ * Eliminates the (common) leading spaces in all lines, based on the first line
+ * For indented pieces of code, it reduces the indent to the least whitespace prefix:
+ * {{{
+ * indented example
+ * another indented line
+ * if (condition)
+ * then do something;
+ * ^ this is the least whitespace prefix
+ * }}}
+ */
+ def normalizeIndentation(_code: String): String = {
+
+ var code = _code.trim
+ var maxSkip = Integer.MAX_VALUE
+ var crtSkip = 0
+ var wsArea = true
+ var index = 0
+ var firstLine = true
+ var emptyLine = true
+
+ while (index < code.length) {
+ code(index) match {
+ case ' ' =>
+ if (wsArea)
+ crtSkip += 1
+ case c =>
+ wsArea = (c == '\n')
+ maxSkip = if (firstLine || emptyLine) maxSkip else if (maxSkip <= crtSkip) maxSkip else crtSkip
+ crtSkip = if (c == '\n') 0 else crtSkip
+ firstLine = if (c == '\n') false else firstLine
+ emptyLine = if (c == '\n') true else false
+ }
+ index += 1
+ }
+
+ if (maxSkip == 0)
+ code
+ else {
+ index = 0
+ val builder = new StringBuilder
+ while (index < code.length) {
+ builder.append(code(index))
+ if (code(index) == '\n') {
+ // we want to skip as many spaces are available, if there are less spaces (like on empty lines, do not
+ // over-consume them)
+ index += 1
+ val limit = index + maxSkip
+ while ((index < code.length) && (code(index) == ' ') && index < limit)
+ index += 1
+ }
+ else
+ index += 1
+ }
+ builder.toString
+ }
+ }
+
def checkParaEnded(): Boolean = {
(char == endOfText) ||
((char == endOfLine) && {
diff --git a/src/compiler/scala/tools/nsc/interactive/RangePositions.scala b/src/compiler/scala/tools/nsc/interactive/RangePositions.scala
index d08a363a9d..88e3827403 100644
--- a/src/compiler/scala/tools/nsc/interactive/RangePositions.scala
+++ b/src/compiler/scala/tools/nsc/interactive/RangePositions.scala
@@ -189,7 +189,7 @@ self: scala.tools.nsc.Global =>
override def validatePositions(tree: Tree) {
def reportTree(prefix : String, tree : Tree) {
val source = if (tree.pos.isDefined) tree.pos.source else ""
- inform("== "+prefix+" tree ["+tree.id+"] of type "+tree.productPrefix+" at "+tree.pos.show+source)
+ inform("== "+prefix+" tree ["+tree.id+"] of type "+tree.printingPrefix+" at "+tree.pos.show+source)
inform("")
inform(treeStatus(tree))
inform("")
diff --git a/src/compiler/scala/tools/nsc/interpreter/Naming.scala b/src/compiler/scala/tools/nsc/interpreter/Naming.scala
index 8e215cf63b..19266442cb 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Naming.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/Naming.scala
@@ -11,16 +11,18 @@ package interpreter
*/
trait Naming {
def unmangle(str: String): String = {
+ val ESC = '\u001b'
val cleaned = removeIWPackages(removeLineWrapper(str))
- var ctrlChars = 0
- cleaned map { ch =>
- if (ch.isControl && !ch.isWhitespace) {
- ctrlChars += 1
- if (ctrlChars > 5) return "[line elided for control chars: possibly a scala signature]"
- else '?'
- }
- else ch
- }
+ // Looking to exclude binary data which hoses the terminal, but
+ // let through the subset of it we need, like whitespace and also
+ // <ESC> for ansi codes.
+ val binaryChars = cleaned count (ch => ch < 32 && !ch.isWhitespace && ch != ESC)
+ // Lots of binary chars - translate all supposed whitespace into spaces
+ if (binaryChars > 5)
+ cleaned map (ch => if (ch.isWhitespace) ' ' else if (ch < 32) '?' else ch)
+ // Not lots - preserve whitespace and ESC
+ else
+ cleaned map (ch => if (ch.isWhitespace || ch == ESC) ch else if (ch < 32) '?' else ch)
}
// The two name forms this is catching are the two sides of this assignment:
diff --git a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
index 1285e29d4a..be5a9907b8 100644
--- a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
+++ b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
@@ -309,7 +309,7 @@ trait ParallelMatching extends ast.TreeDSL
}
lazy val cases =
- for ((tag, indices) <- literalMap.toList) yield {
+ for ((tag, indices) <- literalMap.toList.sortBy(_._1)) yield {
val newRows = indices map (i => addDefaultVars(i)(rest rows i))
val r = remake(newRows ++ defaultRows, includeScrut = false)
val r2 = make(r.tvars, r.rows map (x => x rebind bindVars(tag, x.subst)))
diff --git a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
index c76a04c6ba..f5335fb0f5 100644
--- a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
+++ b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
@@ -8,7 +8,7 @@ package reporters
import java.io.{ BufferedReader, IOException, PrintWriter }
import util._
-import scala.tools.util.StringOps.countElementsAsString
+import scala.tools.util.StringOps
/**
* This class implements a Reporter that displays messages on a text
@@ -40,7 +40,7 @@ class ConsoleReporter(val settings: Settings, reader: BufferedReader, writer: Pr
* @return ...
*/
private def getCountString(severity: Severity): String =
- countElementsAsString((severity).count, label(severity))
+ StringOps.countElementsAsString((severity).count, label(severity))
/** Prints the message. */
def printMessage(msg: String) { writer.print(msg + "\n"); writer.flush() }
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
index 5f3c7ec32c..fdde8f9990 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
@@ -30,6 +30,22 @@ trait ScalaSettings extends AbsScalaSettings
*/
protected def defaultClasspath = sys.env.getOrElse("CLASSPATH", ".")
+ /** Enabled under -Xexperimental. */
+ protected def experimentalSettings = List[BooleanSetting](YmethodInfer, overrideObjects)
+
+ /** Enabled under -Xfuture. */
+ protected def futureSettings = List[BooleanSetting]()
+
+ /** Enabled under -optimise. */
+ protected def optimiseSettings = List[BooleanSetting](inline, inlineHandlers, Xcloselim, Xdce)
+
+ /** Internal use - syntax enhancements. */
+ private class EnableSettings[T <: Setting](val s: T) {
+ def enabling(toEnable: List[BooleanSetting]): s.type = s withPostSetHook (_ => toEnable foreach (_.value = true))
+ def andThen(f: s.T => Unit): s.type = s withPostSetHook (setting => f(setting.value))
+ }
+ private implicit def installEnableSettings[T <: Setting](s: T) = new EnableSettings(s)
+
/** Disable a setting */
def disable(s: Setting) = allSettings -= s
@@ -43,12 +59,8 @@ trait ScalaSettings extends AbsScalaSettings
*/
// argfiles is only for the help message
val argfiles = BooleanSetting ("@<file>", "A text file containing compiler arguments (options and source files)")
- val classpath = PathSetting ("-classpath", "Specify where to find user class files.", defaultClasspath) .
- withAbbreviation ("-cp")
+ val classpath = PathSetting ("-classpath", "Specify where to find user class files.", defaultClasspath) withAbbreviation "-cp"
val d = OutputSetting (outputDirs, ".")
- val optimise = BooleanSetting ("-optimise", "Generates faster bytecode by applying optimisations to the program") .
- withAbbreviation("-optimize") .
- withPostSetHook(set => List(inline, inlineHandlers, Xcloselim, Xdce) foreach (_.value = set.value))
val nospecialization = BooleanSetting ("-no-specialization", "Ignore @specialize annotations.")
/**
@@ -64,7 +76,6 @@ trait ScalaSettings extends AbsScalaSettings
val elidebelow = IntSetting ("-Xelide-below", "Calls to @elidable methods are omitted if method priority is lower than argument",
elidable.MINIMUM, None, elidable.byName get _)
val noForwarders = BooleanSetting ("-Xno-forwarders", "Do not generate static forwarders in mirror classes.")
- val future = BooleanSetting ("-Xfuture", "Turn on future language features.")
val genPhaseGraph = StringSetting ("-Xgenerate-phase-graph", "file", "Generate the phase graphs (outputs .dot files) to fileX.dot.", "")
val XlogImplicits = BooleanSetting ("-Xlog-implicits", "Show more detail on why some implicits are not applicable.")
val logImplicitConv = BooleanSetting ("-Xlog-implicit-conversions", "Print a message whenever an implicit conversion is inserted.")
@@ -92,12 +103,6 @@ trait ScalaSettings extends AbsScalaSettings
val showPhases = BooleanSetting ("-Xshow-phases", "Print a synopsis of compiler phases.")
val sourceReader = StringSetting ("-Xsource-reader", "classname", "Specify a custom method for reading source files.", "")
- // Experimental Extensions
- val Xexperimental = BooleanSetting ("-Xexperimental", "Enable experimental extensions.") .
- withPostSetHook(set => List(YmethodInfer, overrideObjects) foreach (_.value = set.value))
- // YdepMethTpes, YvirtClasses,
- val Xmacros = BooleanSetting ("-Xmacros", "Enable macros.")
-
/** Compatibility stubs for options whose value name did
* not previously match the option name.
*/
@@ -145,8 +150,7 @@ trait ScalaSettings extends AbsScalaSettings
val Ygenjavap = StringSetting ("-Ygen-javap", "dir", "Generate a parallel output directory of .javap files.", "")
val Ydumpclasses = StringSetting ("-Ydump-classes", "dir", "Dump the generated bytecode to .class files (useful for reflective compilation that utilizes in-memory classloaders).", "")
val Ynosqueeze = BooleanSetting ("-Yno-squeeze", "Disable creation of compact code in matching.")
- val Ystatistics = BooleanSetting ("-Ystatistics", "Print compiler statistics.") .
- withPostSetHook(set => util.Statistics.enabled = set.value)
+ val Ystatistics = BooleanSetting ("-Ystatistics", "Print compiler statistics.") andThen (util.Statistics.enabled = _)
val stopAfter = PhasesSetting ("-Ystop-after", "Stop after") withAbbreviation ("-stop") // backward compat
val stopBefore = PhasesSetting ("-Ystop-before", "Stop before")
val refinementMethodDispatch =
@@ -157,26 +161,11 @@ trait ScalaSettings extends AbsScalaSettings
val YrichExes = BooleanSetting ("-Yrich-exceptions",
"Fancier exceptions. Set source search path with -D" +
sys.SystemProperties.traceSourcePath.key)
- val Yidedebug = BooleanSetting ("-Yide-debug", "Generate, validate and output trees using the interactive compiler.")
- val Ybuilderdebug = ChoiceSetting ("-Ybuilder-debug", "manager", "Compile using the specified build manager.", List("none", "refined", "simple"), "none")
- val Ybuildmanagerdebug =
- BooleanSetting ("-Ybuild-manager-debug", "Generate debug information for the Refined Build Manager compiler.")
- val Ytyperdebug = BooleanSetting ("-Ytyper-debug", "Trace all type assignments.")
- val Yposdebug = BooleanSetting ("-Ypos-debug", "Trace position validation.")
- val Yinferdebug = BooleanSetting ("-Yinfer-debug", "Trace type inference and implicit search.")
- val Ypmatdebug = BooleanSetting ("-Ypmat-debug", "Trace all pattern matcher activity.")
+ val Ybuilderdebug = ChoiceSetting("-Ybuilder-debug", "manager", "Compile using the specified build manager.", List("none", "refined", "simple"), "none")
val Yreifycopypaste =
BooleanSetting ("-Yreify-copypaste", "Dump the reified trees in copypasteable representation.")
- val Yreifydebug = BooleanSetting ("-Yreify-debug", "Trace reification.")
- val Ymacrodebug = BooleanSetting ("-Ymacro-debug", "Trace macro-related activities: generation of synthetics, expansion, exceptions.")
val Yreplsync = BooleanSetting ("-Yrepl-sync", "Do not use asynchronous code for repl startup")
- val Yrepldebug = BooleanSetting ("-Yrepl-debug", "Trace all repl activity.") .
- withPostSetHook(_ => interpreter.replProps.debug setValue true)
- val Ycompletion = BooleanSetting ("-Ycompletion-debug", "Trace all tab completion activity.")
- val Ydocdebug = BooleanSetting ("-Ydoc-debug", "Trace all scaladoc activity.")
- val Ypmatnaive = BooleanSetting ("-Ypmat-naive", "Desugar matches as naively as possible.")
val Ynotnull = BooleanSetting ("-Ynotnull", "Enable (experimental and incomplete) scala.NotNull.")
- // val YdepMethTpes = BooleanSetting ("-Ydependent-method-types", "Allow dependent method types.")
val YmethodInfer = BooleanSetting ("-Yinfer-argument-types", "Infer types for arguments of overriden methods.")
val etaExpandKeepsStar = BooleanSetting("-Yeta-expand-keeps-star", "Eta-expand varargs methods to T* rather than Seq[T]. This is a temporary option to ease transition.")
val noSelfCheck = BooleanSetting ("-Yno-self-type-checks", "Suppress check for self-type conformance among inherited members.")
@@ -188,6 +177,29 @@ trait ScalaSettings extends AbsScalaSettings
def stop = stopAfter
+ /** Area-specific debug output.
+ */
+ val Ybuildmanagerdebug = BooleanSetting("-Ybuild-manager-debug", "Generate debug information for the Refined Build Manager compiler.")
+ val Ycompletion = BooleanSetting("-Ycompletion-debug", "Trace all tab completion activity.")
+ val Ydocdebug = BooleanSetting("-Ydoc-debug", "Trace all scaladoc activity.")
+ val Yidedebug = BooleanSetting("-Yide-debug", "Generate, validate and output trees using the interactive compiler.")
+ val Yinferdebug = BooleanSetting("-Yinfer-debug", "Trace type inference and implicit search.")
+ val Ymacrodebug = BooleanSetting("-Ymacro-debug", "Trace macro-related activities: generation of synthetics, expansion, exceptions.")
+ val Ypmatdebug = BooleanSetting("-Ypmat-debug", "Trace all pattern matcher activity.")
+ val Yposdebug = BooleanSetting("-Ypos-debug", "Trace position validation.")
+ val Yreifydebug = BooleanSetting("-Yreify-debug", "Trace reification.")
+ val Yrepldebug = BooleanSetting("-Yrepl-debug", "Trace all repl activity.") andThen (interpreter.replProps.debug setValue _)
+ val Ytyperdebug = BooleanSetting("-Ytyper-debug", "Trace all type assignments.")
+
+ /** Groups of Settings.
+ */
+ val future = BooleanSetting ("-Xfuture", "Turn on future language features.") enabling futureSettings
+ val optimise = BooleanSetting ("-optimise", "Generates faster bytecode by applying optimisations to the program") withAbbreviation "-optimize" enabling optimiseSettings
+ val Xexperimental = BooleanSetting ("-Xexperimental", "Enable experimental extensions.") enabling experimentalSettings
+
+ // Feature extensions
+ val Xmacros = BooleanSetting ("-Xmacros", "Enable macros.")
+
/**
* IDE-specific settings
*/
@@ -204,4 +216,7 @@ trait ScalaSettings extends AbsScalaSettings
*/
val pluginOptions = MultiStringSetting("-P", "plugin:opt", "Pass an option to a plugin") .
withHelpSyntax("-P:<plugin>:<opt>")
+
+ /** Test whether this is scaladoc we're looking at */
+ def isScaladoc = false
}
diff --git a/src/compiler/scala/tools/nsc/symtab/Positions.scala b/src/compiler/scala/tools/nsc/symtab/Positions.scala
index c96c709fb0..680b06f8ce 100644
--- a/src/compiler/scala/tools/nsc/symtab/Positions.scala
+++ b/src/compiler/scala/tools/nsc/symtab/Positions.scala
@@ -14,6 +14,15 @@ self: scala.tools.nsc.symtab.SymbolTable =>
type Position = scala.tools.nsc.util.Position
val NoPosition = scala.tools.nsc.util.NoPosition
+ type TreeAnnotation = scala.tools.nsc.util.TreeAnnotation
+ val NoTreeAnnotation: TreeAnnotation = NoPosition
+ def positionToAnnotation(pos: Position): TreeAnnotation = pos
+ def annotationToPosition(annot: TreeAnnotation): Position = annot.pos
+ override def _checkSetAnnotation(tree: Tree, annot: TreeAnnotation): Unit = {
+ if (tree.pos != NoPosition && tree.pos != annot.pos) debugwarn("Overwriting annotation "+ tree.annotation +" of tree "+ tree +" with annotation "+ annot)
+ // if ((tree.annotation.isInstanceOf[scala.tools.nsc.util.Position] || !annot.isInstanceOf[scala.tools.nsc.util.Position]) && tree.isInstanceOf[Block])
+ // println("Updating block from "+ tree.annotation +" to "+ annot)
+ }
def focusPos(pos: Position): Position = pos.focus
def isRangePos(pos: Position): Boolean = pos.isRange
def showPos(pos: Position): String = pos.show
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
index 942ec1fa86..f9ff147e82 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
@@ -126,9 +126,13 @@ abstract class SymbolLoaders {
ok = false
if (settings.debug.value) ex.printStackTrace()
val msg = ex.getMessage()
- globalError(
- if (msg eq null) "i/o error while loading " + root.name
- else "error while loading " + root.name + ", " + msg);
+ // SI-5593 Scaladoc's current strategy is to visit all packages in search of user code that can be documented
+ // therefore, it will rummage through the classpath triggering errors whenever it encounters package objects
+ // that are not in their correct place (see bug for details)
+ if (!settings.isScaladoc)
+ globalError(
+ if (msg eq null) "i/o error while loading " + root.name
+ else "error while loading " + root.name + ", " + msg);
}
try {
val start = currentTime
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index 1cd4ab21ea..61668b1a8a 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -446,7 +446,11 @@ abstract class ClassfileParser {
def classNameToSymbol(name: Name): Symbol = {
def loadClassSymbol(name: Name): Symbol = {
val file = global.classPath findSourceFile ("" +name) getOrElse {
- warning("Class " + name + " not found - continuing with a stub.")
+ // SI-5593 Scaladoc's current strategy is to visit all packages in search of user code that can be documented
+ // therefore, it will rummage through the classpath triggering errors whenever it encounters package objects
+ // that are not in their correct place (see bug for details)
+ if (!settings.isScaladoc)
+ warning("Class " + name + " not found - continuing with a stub.")
return NoSymbol.newClass(name.toTypeName)
}
val completer = new global.loaders.ClassfileLoader(file)
diff --git a/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala b/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala
index e11a5a4ad9..4b847fa94a 100644
--- a/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala
@@ -108,7 +108,7 @@ abstract class TypeParser {
val method = new ConstructorInfo(declType, attrs, Array[MSILType]())
val flags = Flags.JAVA
val owner = clazz
- val methodSym = owner.newMethod(NoPosition, nme.CONSTRUCTOR).setFlag(flags)
+ val methodSym = owner.newMethod(nme.CONSTRUCTOR, NoPosition, flags)
val rettype = clazz.tpe
val mtype = methodType(Array[MSILType](), rettype);
val mInfo = mtype(methodSym)
@@ -224,14 +224,14 @@ abstract class TypeParser {
if (canBeTakenAddressOf) {
clazzBoxed.setInfo( if (ownTypeParams.isEmpty) classInfoAsInMetadata
- else polyType(ownTypeParams, classInfoAsInMetadata) )
+ else genPolyType(ownTypeParams, classInfoAsInMetadata) )
clazzBoxed.setFlag(flags)
val rawValueInfoType = ClassInfoType(definitions.anyvalparam, instanceDefs, clazz)
clazz.setInfo( if (ownTypeParams.isEmpty) rawValueInfoType
- else polyType(ownTypeParams, rawValueInfoType) )
+ else genPolyType(ownTypeParams, rawValueInfoType) )
} else {
clazz.setInfo( if (ownTypeParams.isEmpty) classInfoAsInMetadata
- else polyType(ownTypeParams, classInfoAsInMetadata) )
+ else genPolyType(ownTypeParams, classInfoAsInMetadata) )
}
// TODO I don't remember if statics.setInfo and staticModule.setInfo should also know about type params
@@ -284,7 +284,7 @@ abstract class TypeParser {
else
getCLRType(field.FieldType)
val owner = if (field.IsStatic()) statics else clazz;
- val sym = owner.newValue(NoPosition, name).setFlag(flags).setInfo(fieldType);
+ val sym = owner.newValue(name, NoPosition, flags).setInfo(fieldType);
// TODO: set private within!!! -> look at typechecker/Namers.scala
(if (field.IsStatic()) staticDefs else instanceDefs).enter(sym);
clrTypes.fields(sym) = field;
@@ -313,7 +313,7 @@ abstract class TypeParser {
val name: Name = if (gparamsLength == 0) prop.Name else nme.apply;
val flags = translateAttributes(getter);
val owner: Symbol = if (getter.IsStatic) statics else clazz;
- val methodSym = owner.newMethod(NoPosition, name).setFlag(flags)
+ val methodSym = owner.newMethod(name, NoPosition, flags)
val mtype: Type = if (gparamsLength == 0) NullaryMethodType(propType) // .NET properties can't be polymorphic
else methodType(getter, getter.ReturnType)(methodSym)
methodSym.setInfo(mtype);
@@ -337,7 +337,7 @@ abstract class TypeParser {
val flags = translateAttributes(setter);
val mtype = methodType(setter, definitions.UnitClass.tpe);
val owner: Symbol = if (setter.IsStatic) statics else clazz;
- val methodSym = owner.newMethod(NoPosition, name).setFlag(flags)
+ val methodSym = owner.newMethod(name, NoPosition, flags)
methodSym.setInfo(mtype(methodSym))
methodSym.setFlag(Flags.ACCESSOR);
(if (setter.IsStatic) staticDefs else instanceDefs).enter(methodSym);
@@ -424,14 +424,14 @@ abstract class TypeParser {
val flags = Flags.JAVA | Flags.FINAL
for (cmpName <- ENUM_CMP_NAMES) {
- val enumCmp = clazz.newMethod(NoPosition, cmpName)
+ val enumCmp = clazz.newMethod(cmpName)
val enumCmpType = JavaMethodType(enumCmp.newSyntheticValueParams(List(clazz.tpe)), definitions.BooleanClass.tpe)
enumCmp.setFlag(flags).setInfo(enumCmpType)
instanceDefs.enter(enumCmp)
}
for (bitLogName <- ENUM_BIT_LOG_NAMES) {
- val enumBitLog = clazz.newMethod(NoPosition, bitLogName)
+ val enumBitLog = clazz.newMethod(bitLogName)
val enumBitLogType = JavaMethodType(enumBitLog.newSyntheticValueParams(List(clazz.tpe)), clazz.tpe /* was classInfo, infinite typer */)
enumBitLog.setFlag(flags).setInfo(enumBitLogType)
instanceDefs.enter(enumBitLog)
@@ -469,7 +469,7 @@ abstract class TypeParser {
val flags = translateAttributes(method);
val owner = if (method.IsStatic()) statics else clazz;
- val methodSym = owner.newMethod(NoPosition, getName(method)).setFlag(flags)
+ val methodSym = owner.newMethod(getName(method), NoPosition, flags)
/* START CLR generics (snippet 3) */
val newMethodTParams = populateMethodTParams(method, methodSym)
/* END CLR generics (snippet 3) */
@@ -480,7 +480,7 @@ abstract class TypeParser {
val mtype = methodType(method, rettype);
if (mtype == null) return;
/* START CLR generics (snippet 4) */
- val mInfo = if (method.IsGeneric) polyType(newMethodTParams, mtype(methodSym))
+ val mInfo = if (method.IsGeneric) genPolyType(newMethodTParams, mtype(methodSym))
else mtype(methodSym)
/* END CLR generics (snippet 4) */
/* START CLR non-generics (snippet 4)
@@ -500,7 +500,7 @@ abstract class TypeParser {
}
private def createMethod(name: Name, flags: Long, mtype: Symbol => Type, method: MethodInfo, statik: Boolean): Symbol = {
- val methodSym: Symbol = (if (statik) statics else clazz).newMethod(NoPosition, name)
+ val methodSym: Symbol = (if (statik) statics else clazz).newMethod(name)
methodSym.setFlag(flags).setInfo(mtype(methodSym))
(if (statik) staticDefs else instanceDefs).enter(methodSym)
if (method != null)
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index a98cd5c6b1..0c867b9e7e 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -407,6 +407,103 @@ abstract class Erasure extends AddInterfaces
}
}
+ class ComputeBridges(owner: Symbol) {
+ assert(phase == currentRun.erasurePhase, phase)
+
+ var toBeRemoved = immutable.Set[Symbol]()
+ val site = owner.thisType
+ val bridgesScope = newScope
+ val bridgeTarget = mutable.HashMap[Symbol, Symbol]()
+ var bridges = List[Tree]()
+
+ val opc = beforeExplicitOuter {
+ new overridingPairs.Cursor(owner) {
+ override def parents = List(owner.info.firstParent)
+ override def exclude(sym: Symbol) = !sym.isMethod || sym.isPrivate || super.exclude(sym)
+ }
+ }
+
+ def compute(): (List[Tree], immutable.Set[Symbol]) = {
+ while (opc.hasNext) {
+ val member = opc.overriding
+ val other = opc.overridden
+ //println("bridge? " + member + ":" + member.tpe + member.locationString + " to " + other + ":" + other.tpe + other.locationString)//DEBUG
+ if (beforeExplicitOuter(!member.isDeferred))
+ checkPair(member, other)
+
+ opc.next
+ }
+ (bridges, toBeRemoved)
+ }
+
+ def checkPair(member: Symbol, other: Symbol) {
+ val otpe = erasure(owner)(other.tpe)
+ val bridgeNeeded = afterErasure (
+ !(other.tpe =:= member.tpe) &&
+ !(deconstMap(other.tpe) =:= deconstMap(member.tpe)) &&
+ { var e = bridgesScope.lookupEntry(member.name)
+ while ((e ne null) && !((e.sym.tpe =:= otpe) && (bridgeTarget(e.sym) == member)))
+ e = bridgesScope.lookupNextEntry(e)
+ (e eq null)
+ }
+ )
+ if (!bridgeNeeded)
+ return
+
+ val newFlags = (member.flags | BRIDGE) & ~(ACCESSOR | DEFERRED | LAZY | lateDEFERRED)
+ val bridge = other.cloneSymbolImpl(owner, newFlags) setPos owner.pos
+
+ debuglog("generating bridge from %s (%s): %s to %s: %s".format(
+ other, flagsToString(newFlags),
+ otpe + other.locationString, member,
+ erasure(owner)(member.tpe) + member.locationString)
+ )
+
+ // the parameter symbols need to have the new owner
+ bridge setInfo (otpe cloneInfo bridge)
+ bridgeTarget(bridge) = member
+ afterErasure(owner.info.decls enter bridge)
+ if (other.owner == owner) {
+ afterErasure(owner.info.decls.unlink(other))
+ toBeRemoved += other
+ }
+ bridgesScope enter bridge
+ bridges ::= makeBridgeDefDef(bridge, member, other)
+ }
+
+ def makeBridgeDefDef(bridge: Symbol, member: Symbol, other: Symbol) = afterErasure {
+ // type checking ensures we can safely call `other`, but unless `member.tpe <:< other.tpe`,
+ // calling `member` is not guaranteed to succeed in general, there's
+ // nothing we can do about this, except for an unapply: when this subtype test fails,
+ // return None without calling `member`
+ //
+ // TODO: should we do this for user-defined unapplies as well?
+ // does the first argument list have exactly one argument -- for user-defined unapplies we can't be sure
+ def maybeWrap(bridgingCall: Tree): Tree = {
+ val canReturnNone = afterErasure(
+ member.isSynthetic
+ && (member.name == nme.unapply || member.name == nme.unapplySeq)
+ && !(member.tpe <:< other.tpe) // no static guarantees (TODO: is the subtype test ever true?)
+ )
+ if (canReturnNone) {
+ import CODE._
+ val typeTest = gen.mkIsInstanceOf(REF(bridge.firstParam), member.tpe.params.head.tpe)
+ IF (typeTest) THEN bridgingCall ELSE REF(NoneModule)
+ }
+ else bridgingCall
+ }
+ val rhs = member.tpe match {
+ case MethodType(Nil, ConstantType(c)) => Literal(c)
+ case _ =>
+ val sel: Tree = Select(This(owner), member)
+ val bridgingCall = (sel /: bridge.paramss)((fun, vparams) => Apply(fun, vparams map Ident))
+
+ maybeWrap(bridgingCall)
+ }
+ atPos(bridge.pos)(DefDef(bridge, rhs))
+ }
+ }
+
/** The modifier typer which retypes with erased types. */
class Eraser(_context: Context) extends Typer(_context) {
@@ -790,93 +887,10 @@ abstract class Erasure extends AddInterfaces
* type of `m1` in the template.
*/
private def bridgeDefs(owner: Symbol): (List[Tree], immutable.Set[Symbol]) = {
- var toBeRemoved: immutable.Set[Symbol] = immutable.Set()
- debuglog("computing bridges for " + owner)//DEBUG
assert(phase == currentRun.erasurePhase, phase)
- val site = owner.thisType
- val bridgesScope = newScope
- val bridgeTarget = new mutable.HashMap[Symbol, Symbol]
- var bridges: List[Tree] = List()
- val opc = beforeExplicitOuter {
- new overridingPairs.Cursor(owner) {
- override def parents: List[Type] = List(owner.info.firstParent)
- override def exclude(sym: Symbol): Boolean =
- !sym.isMethod || sym.isPrivate || super.exclude(sym)
- }
- }
- while (opc.hasNext) {
- val member = opc.overriding
- val other = opc.overridden
- //println("bridge? " + member + ":" + member.tpe + member.locationString + " to " + other + ":" + other.tpe + other.locationString)//DEBUG
- if (beforeExplicitOuter(!member.isDeferred)) {
- val otpe = erasure(owner)(other.tpe)
- val bridgeNeeded = afterErasure (
- !(other.tpe =:= member.tpe) &&
- !(deconstMap(other.tpe) =:= deconstMap(member.tpe)) &&
- { var e = bridgesScope.lookupEntry(member.name)
- while ((e ne null) && !((e.sym.tpe =:= otpe) && (bridgeTarget(e.sym) == member)))
- e = bridgesScope.lookupNextEntry(e)
- (e eq null)
- }
- );
- if (bridgeNeeded) {
- val newFlags = (member.flags | BRIDGE) & ~(ACCESSOR | DEFERRED | LAZY | lateDEFERRED)
- val bridge = other.cloneSymbolImpl(owner, newFlags) setPos owner.pos
- // the parameter symbols need to have the new owner
- bridge.setInfo(otpe.cloneInfo(bridge))
- bridgeTarget(bridge) = member
- afterErasure { owner.info.decls.enter(bridge) }
- if (other.owner == owner) {
- //println("bridge to same: "+other+other.locationString)//DEBUG
- afterErasure { owner.info.decls.unlink(other) }
- toBeRemoved += other
- }
- bridgesScope enter bridge
- bridges =
- afterErasure {
- atPos(bridge.pos) {
- val bridgeDef =
- DefDef(bridge,
- member.tpe match {
- case MethodType(List(), ConstantType(c)) => Literal(c)
- case _ =>
- val bridgingCall = (((Select(This(owner), member): Tree) /: bridge.paramss)
- ((fun, vparams) => Apply(fun, vparams map Ident)))
- // type checking ensures we can safely call `other`, but unless `member.tpe <:< other.tpe`, calling `member` is not guaranteed to succeed
- // in general, there's nothing we can do about this, except for an unapply: when this subtype test fails, return None without calling `member`
- if ( member.isSynthetic // TODO: should we do this for user-defined unapplies as well?
- && ((member.name == nme.unapply) || (member.name == nme.unapplySeq))
- // && (bridge.paramss.nonEmpty && bridge.paramss.head.nonEmpty && bridge.paramss.head.tail.isEmpty) // does the first argument list has exactly one argument -- for user-defined unapplies we can't be sure
- && !(afterErasure(member.tpe <:< other.tpe))) { // no static guarantees (TODO: is the subtype test ever true?)
- import CODE._
- val typeTest = gen.mkIsInstanceOf(REF(bridge.firstParam), member.tpe.params.head.tpe, any = true, wrapInApply = true) // any = true since we're before erasure (?), wrapInapply is true since we're after uncurry
- // println("unapp type test: "+ typeTest)
- IF (typeTest) THEN bridgingCall ELSE REF(NoneModule)
- } else bridgingCall
- });
- debuglog("generating bridge from " + other + "(" + Flags.flagsToString(bridge.flags) + ")" + ":" + otpe + other.locationString + " to " + member + ":" + erasure(owner)(member.tpe) + member.locationString + " =\n " + bridgeDef);
- bridgeDef
- }
- } :: bridges
- }
- }
- opc.next
- }
- (bridges, toBeRemoved)
+ debuglog("computing bridges for " + owner)
+ new ComputeBridges(owner) compute()
}
-/*
- for (bc <- site.baseClasses.tail; other <- bc.info.decls.toList) {
- if (other.isMethod && !other.isConstructor) {
- for (member <- site.nonPrivateMember(other.name).alternatives) {
- if (member != other &&
- !(member hasFlag DEFERRED) &&
- (site.memberType(member) matches site.memberType(other)) &&
- !(site.parents exists (p =>
- (p.symbol isSubClass member.owner) && (p.symbol isSubClass other.owner)))) {
-...
- }
- }
-*/
def addBridges(stats: List[Tree], base: Symbol): List[Tree] =
if (base.isTrait) stats
diff --git a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
index 4c3972519a..5104518dd9 100644
--- a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
@@ -28,6 +28,9 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
/** the following two members override abstract members in Transform */
val phaseName: String = "extmethods"
+ /** The following flags may be set by this phase: */
+ override def phaseNewFlags: Long = notPRIVATE
+
def newTransformer(unit: CompilationUnit): Transformer =
new Extender(unit)
@@ -101,6 +104,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
case Template(_, _, _) =>
if (currentOwner.isDerivedValueClass) {
extensionDefs(currentOwner.companionModule) = new mutable.ListBuffer[Tree]
+ currentOwner.primaryConstructor.makeNotPrivate(NoSymbol)
super.transform(tree)
} else if (currentOwner.isStaticOwner) {
super.transform(tree)
diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala
index 639e060812..adbb7d43d0 100644
--- a/src/compiler/scala/tools/nsc/transform/Mixin.scala
+++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala
@@ -24,7 +24,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
/** This map contains a binding (class -> info) if
* the class with this info at phase mixinPhase has been treated for mixin composition
*/
- private val treatedClassInfos = perRunCaches.newMap[Symbol, Type]()
+ private val treatedClassInfos = perRunCaches.newMap[Symbol, Type]() withDefaultValue NoType
/** Map a lazy, mixedin field accessor to it's trait member accessor */
private val initializer = perRunCaches.newMap[Symbol, Symbol]
@@ -104,7 +104,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
*/
private val toInterfaceMap = new TypeMap {
def apply(tp: Type): Type = mapOver( tp match {
- case TypeRef(pre, sym, args) if (sym.isImplClass) =>
+ case TypeRef(pre, sym, args) if sym.isImplClass =>
typeRef(pre, beforeMixin(sym.toInterface), args)
case _ => tp
})
@@ -113,10 +113,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
/** The implementation class corresponding to a currently compiled interface.
* todo: try to use Symbol.implClass instead?
*/
- private def implClass(iface: Symbol): Symbol = {
- val impl = iface.implClass
- if (impl != NoSymbol) impl else erasure.implClass(iface)
- }
+ private def implClass(iface: Symbol) = iface.implClass orElse (erasure implClass iface)
/** Returns the symbol that is accessed by a super-accessor in a mixin composition.
*
@@ -168,6 +165,29 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
debuglog("new member of " + clazz + ":" + member.defString)
clazz.info.decls enter member setFlag MIXEDIN
}
+ def cloneAndAddMember(mixinClass: Symbol, mixinMember: Symbol, clazz: Symbol): Symbol =
+ addMember(clazz, cloneBeforeErasure(mixinClass, mixinMember, clazz))
+
+ def cloneBeforeErasure(mixinClass: Symbol, mixinMember: Symbol, clazz: Symbol): Symbol = {
+ val newSym = beforeErasure {
+ // since we used `mixinMember` from the interface that represents the trait that's
+ // being mixed in, have to instantiate the interface type params (that may occur in mixinMember's
+ // info) as they are seen from the class. We can't use the member that we get from the
+ // implementation class, as it's a clone that was made after erasure, and thus it does not
+ // know its info at the beginning of erasure anymore.
+ // Optimize: no need if mixinClass has no typeparams.
+ mixinMember cloneSymbol clazz modifyInfo (info =>
+ if (mixinClass.typeParams.isEmpty) info
+ else (clazz.thisType baseType mixinClass) memberInfo mixinMember
+ )
+ }
+ // clone before erasure got rid of type info we'll need to generate a javaSig
+ // now we'll have the type info at (the beginning of) erasure in our history,
+ // and now newSym has the info that's been transformed to fit this period
+ // (no need for asSeenFrom as phase.erasedTypes)
+ // TODO: verify we need the updateInfo and document why
+ newSym updateInfo (mixinMember.info cloneInfo newSym)
+ }
def needsExpandedSetterName(field: Symbol) = !field.isLazy && (
if (field.isMethod) field.hasStableFlag
@@ -181,9 +201,12 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* - lazy fields don't get a setter.
*/
def addLateInterfaceMembers(clazz: Symbol) {
- if ((treatedClassInfos get clazz) != Some(clazz.info)) {
+ def makeConcrete(member: Symbol) =
+ member setPos clazz.pos resetFlag (DEFERRED | lateDEFERRED)
+
+ if (treatedClassInfos(clazz) != clazz.info) {
treatedClassInfos(clazz) = clazz.info
- assert(phase == currentRun.mixinPhase)
+ assert(phase == currentRun.mixinPhase, phase)
/** Create a new getter. Getters are never private or local. They are
* always accessors and deferred. */
@@ -210,8 +233,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
}
clazz.info // make sure info is up to date, so that implClass is set.
- val impl = implClass(clazz)
- assert(impl != NoSymbol)
+ val impl = implClass(clazz) orElse abort("No impl class for " + clazz)
for (member <- impl.info.decls) {
if (!member.isMethod && !member.isModule && !member.isModuleVar) {
@@ -242,139 +264,107 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* - for every super accessor in T, add an implementation of that accessor
* - for every module in T, add a module
*/
- def addMixedinMembers(clazz: Symbol, unit : CompilationUnit) {
- def cloneBeforeErasure(iface: Symbol, clazz: Symbol, imember: Symbol): Symbol = {
- val newSym = beforeErasure {
- val res = imember.cloneSymbol(clazz)
- // since we used the member (imember) from the interface that represents the trait that's being mixed in,
- // have to instantiate the interface type params (that may occur in imember's info) as they are seen from the class
- // we can't use the member that we get from the implementation class, as it's a clone that was made after erasure,
- // and thus it does not know its info at the beginning of erasure anymore
- // optimize: no need if iface has no typeparams
- if(iface.typeParams nonEmpty) res.setInfo(clazz.thisType.baseType(iface).memberInfo(imember))
- res
- } // clone before erasure got rid of type info we'll need to generate a javaSig
- // now we'll have the type info at (the beginning of) erasure in our history,
- newSym.updateInfo(imember.info.cloneInfo(newSym)) // and now newSym has the info that's been transformed to fit this period (no need for asSeenFrom as phase.erasedTypes)
- newSym // TODO: verify we need the updateInfo and document why
- }
-
- if (!(clazz hasFlag JAVA) && (treatedClassInfos get clazz) != Some(clazz.info)) {
- treatedClassInfos(clazz) = clazz.info
-
- assert(!clazz.isTrait, clazz)
- assert(clazz.info.parents.nonEmpty, clazz)
-
- // first complete the superclass with mixed in members
- addMixedinMembers(clazz.superClass, unit)
-
- //Console.println("adding members of " + clazz.info.baseClasses.tail.takeWhile(superclazz !=) + " to " + clazz);//DEBUG
-
- /** Mix in members of implementation class mixinClass into class clazz */
- def mixinImplClassMembers(impl: Symbol, iface: Symbol) {
- assert(
- // XXX this should be impl.isImplClass, except that we get impl classes
- // coming through under -optimise which do not agree that they are (because
- // the IMPLCLASS flag is unset, I believe.) See ticket #4285.
- nme.isImplClassName(impl.name) || impl.isImplClass,
- "%s (%s) is not a an implementation class, it cannot mix in %s".format(
- impl, impl.defaultFlagString, iface)
- )
- if (!impl.isImplClass) {
- debugwarn("!!! " + impl + " has an impl class name, but !isImplClass: " + impl.defaultFlagString + ", mixing in " + iface)
- }
+ def addMixedinMembers(clazz: Symbol, unit: CompilationUnit) {
+ def cloneAndAddMixinMember(mixinClass: Symbol, mixinMember: Symbol): Symbol = (
+ cloneAndAddMember(mixinClass, mixinMember, clazz)
+ setPos clazz.pos
+ resetFlag DEFERRED | lateDEFERRED
+ )
- for (member <- impl.info.decls) {
- if (isForwarded(member)) {
- val imember = member.overriddenSymbol(iface)
- // atPhase(currentRun.erasurePhase){
- // println(""+(clazz, iface, clazz.typeParams, iface.typeParams, imember, clazz.thisType.baseType(iface), clazz.thisType.baseType(iface).memberInfo(imember), imember.info substSym(iface.typeParams, clazz.typeParams) ))
- // }
- // Console.println("mixin member "+member+":"+member.tpe+member.locationString+" "+imember+" "+imember.overridingSymbol(clazz)+" to "+clazz+" with scope "+clazz.info.decls)//DEBUG
- if (imember.overridingSymbol(clazz) == NoSymbol &&
- clazz.info.findMember(member.name, 0, lateDEFERRED, false).alternatives.contains(imember)) {
- val member1 = addMember(
- clazz,
- cloneBeforeErasure(iface, clazz, imember) setPos clazz.pos resetFlag (DEFERRED | lateDEFERRED))
- member1.asInstanceOf[TermSymbol] setAlias member;
- }
- }
+ /** Mix in members of implementation class mixinClass into class clazz */
+ def mixinImplClassMembers(mixinClass: Symbol, mixinInterface: Symbol) {
+ assert(mixinClass.isImplClass, "Not an impl class:" +
+ ((mixinClass.debugLocationString, mixinInterface.debugLocationString)))
+
+ for (member <- mixinClass.info.decls ; if isForwarded(member)) {
+ val imember = member overriddenSymbol mixinInterface
+ imember overridingSymbol clazz match {
+ case NoSymbol =>
+ if (clazz.info.findMember(member.name, 0, lateDEFERRED, false).alternatives contains imember)
+ cloneAndAddMixinMember(mixinInterface, imember).asInstanceOf[TermSymbol] setAlias member
+ case _ =>
}
}
+ }
- /** Mix in members of trait mixinClass into class clazz. Also,
- * for each lazy field in mixinClass, add a link from its mixed in member to its
- * initializer method inside the implclass.
- */
- def mixinTraitMembers(mixinClass: Symbol) {
- // For all members of a trait's interface do:
- for (member <- mixinClass.info.decls) {
- if (isConcreteAccessor(member)) {
- if (isOverriddenAccessor(member, clazz.info.baseClasses)) {
- debugwarn("!!! is overridden val: "+member.fullLocationString)
+ /** Mix in members of trait mixinClass into class clazz. Also,
+ * for each lazy field in mixinClass, add a link from its mixed in member to its
+ * initializer method inside the implclass.
+ */
+ def mixinTraitMembers(mixinClass: Symbol) {
+ // For all members of a trait's interface do:
+ for (mixinMember <- mixinClass.info.decls) {
+ if (isConcreteAccessor(mixinMember)) {
+ if (isOverriddenAccessor(mixinMember, clazz.info.baseClasses))
+ debugwarn("!!! is overridden val: "+mixinMember.fullLocationString)
+ else {
+ // mixin field accessors
+ val mixedInAccessor = cloneAndAddMixinMember(mixinClass, mixinMember)
+ if (mixinMember.isLazy) {
+ initializer(mixedInAccessor) = (
+ implClass(mixinClass).info.decl(mixinMember.name)
+ orElse abort("Could not find initializer for " + mixinMember.name)
+ )
}
- else {
- // mixin field accessors
- val member1 = addMember(
- clazz,
- cloneBeforeErasure(mixinClass, clazz, member) //member.cloneSymbol(clazz)
- setPos clazz.pos
- resetFlag (DEFERRED | lateDEFERRED))
- // println("mixing in: "+ (member, member.info, member1.info))
- // atPhase(currentRun.erasurePhase){
- // println("before erasure: "+ (member.info, member1.info))
- // }
- if (member.isLazy) {
- var init = implClass(mixinClass).info.decl(member.name)
- assert(init != NoSymbol, "Could not find initializer for " + member.name)
- initializer(member1) = init
+ if (!mixinMember.isSetter)
+ mixinMember.tpe match {
+ case MethodType(Nil, ConstantType(_)) =>
+ // mixinMember is a constant; only getter is needed
+ ;
+ case MethodType(Nil, TypeRef(_, UnitClass, _)) =>
+ // mixinMember is a value of type unit. No field needed
+ ;
+ case _ => // otherwise mixin a field as well
+ // atPhase: the private field is moved to the implementation class by erasure,
+ // so it can no longer be found in the mixinMember's owner (the trait)
+ val accessed = beforePickler(mixinMember.accessed)
+ // #3857, need to retain info before erasure when cloning (since cloning only
+ // carries over the current entry in the type history)
+ val sym = beforeErasure {
+ // so we have a type history entry before erasure
+ clazz.newValue(nme.getterToLocal(mixinMember.name), mixinMember.pos).setInfo(mixinMember.tpe.resultType)
+ }
+ sym updateInfo mixinMember.tpe.resultType // info at current phase
+
+ val newFlags = (
+ ( PrivateLocal )
+ | ( mixinMember getFlag MUTABLE | LAZY)
+ | ( if (mixinMember.hasStableFlag) 0 else MUTABLE )
+ )
+
+ addMember(clazz, sym setFlag newFlags setAnnotations accessed.annotations)
}
- if (!member.isSetter)
- member.tpe match {
- case MethodType(Nil, ConstantType(_)) =>
- // member is a constant; only getter is needed
- ;
- case MethodType(Nil, TypeRef(_, UnitClass, _)) =>
- // member is a value of type unit. No field needed
- ;
- case _ => // otherwise mixin a field as well
- // atPhase: the private field is moved to the implementation class by erasure,
- // so it can no longer be found in the member's owner (the trait)
- val accessed = beforePickler(member.accessed)
- val sym = beforeErasure { // #3857, need to retain info before erasure when cloning (since cloning only carries over the current entry in the type history)
- clazz.newValue(nme.getterToLocal(member.name), member.pos).setInfo(member.tpe.resultType) // so we have a type history entry before erasure
- }
- sym.updateInfo(member.tpe.resultType) // info at current phase
- addMember(clazz,
- sym
- setFlag (PrivateLocal | member.getFlag(MUTABLE | LAZY))
- setFlag (if (!member.hasStableFlag) MUTABLE else 0)
- setAnnotations accessed.annotations)
- }
- }
- }
- else if (member.isSuperAccessor) { // mixin super accessors
- val member1 = addMember(clazz, member.cloneSymbol(clazz)) setPos clazz.pos
- assert(member1.alias != NoSymbol, member1)
- val alias1 = rebindSuper(clazz, member.alias, mixinClass)
- member1.asInstanceOf[TermSymbol] setAlias alias1
-
- }
- else if (member.isMethod && member.isModule && member.hasNoFlags(LIFTED | BRIDGE)) {
- // mixin objects: todo what happens with abstract objects?
- addMember(clazz, member.cloneSymbol(clazz, member.flags & ~(DEFERRED | lateDEFERRED)) setPos clazz.pos)
}
}
+ else if (mixinMember.isSuperAccessor) { // mixin super accessors
+ val superAccessor = addMember(clazz, mixinMember.cloneSymbol(clazz)) setPos clazz.pos
+ assert(superAccessor.alias != NoSymbol, superAccessor)
+ val alias1 = rebindSuper(clazz, mixinMember.alias, mixinClass)
+ superAccessor.asInstanceOf[TermSymbol] setAlias alias1
+ }
+ else if (mixinMember.isMethod && mixinMember.isModule && mixinMember.hasNoFlags(LIFTED | BRIDGE)) {
+ // mixin objects: todo what happens with abstract objects?
+ addMember(clazz, mixinMember.cloneSymbol(clazz, mixinMember.flags & ~(DEFERRED | lateDEFERRED)) setPos clazz.pos)
+ }
}
+ }
- for (mc <- clazz.mixinClasses)
- if (mc hasFlag lateINTERFACE) {
- // @SEAN: adding trait tracking so we don't have to recompile transitive closures
- unit.depends += mc
- addLateInterfaceMembers(mc)
- mixinTraitMembers(mc)
- mixinImplClassMembers(implClass(mc), mc)
- }
+ if (clazz.isJavaDefined || treatedClassInfos(clazz) == clazz.info)
+ return
+
+ treatedClassInfos(clazz) = clazz.info
+ assert(!clazz.isTrait && clazz.info.parents.nonEmpty, clazz)
+
+ // first complete the superclass with mixed in members
+ addMixedinMembers(clazz.superClass, unit)
+
+ //Console.println("adding members of " + clazz.info.baseClasses.tail.takeWhile(superclazz !=) + " to " + clazz);//DEBUG
+ for (mc <- clazz.mixinClasses ; if mc hasFlag lateINTERFACE) {
+ // @SEAN: adding trait tracking so we don't have to recompile transitive closures
+ unit.depends += mc
+ addLateInterfaceMembers(mc)
+ mixinTraitMembers(mc)
+ mixinImplClassMembers(implClass(mc), mc)
}
}
@@ -436,9 +426,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* Such fields will be nulled after the initializer has memoized the lazy value.
*/
def singleUseFields(templ: Template): collection.Map[Symbol, List[Symbol]] = {
- val usedIn = new mutable.HashMap[Symbol, List[Symbol]] {
- override def default(key: Symbol) = Nil
- }
+ val usedIn = mutable.HashMap[Symbol, List[Symbol]]() withDefaultValue Nil
object SingleUseTraverser extends Traverser {
override def traverse(tree: Tree) {
@@ -477,7 +465,6 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
new MixinTransformer(unit)
class MixinTransformer(unit : CompilationUnit) extends Transformer {
-
/** Within a static implementation method: the parameter referring to the
* current object. Undefined everywhere else.
*/
@@ -1129,18 +1116,20 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* - refer to fields in some implementation class via an abstract method in the interface.
*/
private def postTransform(tree: Tree): Tree = {
+ def siteWithinImplClass = currentOwner.enclClass.isImplClass
val sym = tree.symbol
+
// change every node type that refers to an implementation class to its
// corresponding interface, unless the node's symbol is an implementation class.
if (tree.tpe.typeSymbol.isImplClass && ((sym eq null) || !sym.isImplClass))
tree.tpe = toInterface(tree.tpe)
tree match {
- case Template(parents, self, body) =>
+ case templ @ Template(parents, self, body) =>
// change parents of templates to conform to parents in the symbol info
val parents1 = currentOwner.info.parents map (t => TypeTree(t) setPos tree.pos)
// mark fields which can be nulled afterward
- lazyValNullables = nullableFields(tree.asInstanceOf[Template]) withDefaultValue Set()
+ lazyValNullables = nullableFields(templ) withDefaultValue Set()
// add all new definitions to current class or interface
treeCopy.Template(tree, parents1, self, addNewDefs(currentOwner, body))
@@ -1179,18 +1168,23 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
// - if `m` refers to a trait, insert a static call to the corresponding static
// implementation
// - otherwise return tree unchanged
- if (mix == tpnme.EMPTY && currentOwner.enclClass.isImplClass)
- assert(false, "illegal super in trait: " + currentOwner.enclClass + " " + tree);
+ assert(
+ !(mix == tpnme.EMPTY && siteWithinImplClass),
+ "illegal super in trait: " + currentOwner.enclClass + " " + tree
+ )
+
if (sym.owner hasFlag lateINTERFACE) {
if (sym.hasAccessorFlag) {
assert(args.isEmpty, args)
val sym1 = sym.overridingSymbol(currentOwner.enclClass)
typedPos(tree.pos)((transformSuper(qual) DOT sym1)())
- } else {
+ }
+ else {
staticCall(beforePrevPhase(sym.overridingSymbol(implClass(sym.owner))))
}
- } else {
- assert(!currentOwner.enclClass.isImplClass, currentOwner.enclClass)
+ }
+ else {
+ assert(!siteWithinImplClass, currentOwner.enclClass)
tree
}
case _ =>
@@ -1208,8 +1202,8 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
// refer to fields in some implementation class via an abstract
// getter in the interface.
val iface = toInterface(sym.owner.tpe).typeSymbol
- val getter = sym.getter(iface)
- assert(getter != NoSymbol, sym)
+ val getter = sym getter iface orElse abort("No getter for " + sym + " in " + iface)
+
typedPos(tree.pos)((qual DOT getter)())
case Assign(Apply(lhs @ Select(qual, _), List()), rhs) =>
diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
index fdb5c7e52e..9915f7e9fc 100644
--- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala
+++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
@@ -36,6 +36,8 @@ abstract class TailCalls extends Transform {
}
}
+ import gen.hasSynthCaseSymbol
+
/**
* A Tail Call Transformer
*
@@ -87,10 +89,22 @@ abstract class TailCalls extends Transform {
class TailCallElimination(unit: CompilationUnit) extends Transformer {
private val defaultReason = "it contains a recursive call not in tail position"
+ /** Has the label been accessed? Then its symbol is in this set. */
+ private val accessed = new collection.mutable.HashSet[Symbol]()
+ // `accessed` was stored as boolean in the current context -- this is no longer tenable
+ // with jumps to labels in tailpositions now considered in tailposition,
+ // a downstream context may access the label, and the upstream one will be none the wiser
+ // this is necessary because tail-calls may occur in places where syntactically they seem impossible
+ // (since we now consider jumps to labels that are in tailposition, such as matchEnd(x) {x})
+
+
class Context() {
/** The current method */
var method: Symbol = NoSymbol
+ // symbols of label defs in this method that are in tail position
+ var tailLabels: Set[Symbol] = Set()
+
/** The current tail-call label */
var label: Symbol = NoSymbol
@@ -104,24 +118,20 @@ abstract class TailCalls extends Transform {
var failReason = defaultReason
var failPos = method.pos
- /** Has the label been accessed? */
- var accessed = false
-
def this(that: Context) = {
this()
this.method = that.method
this.tparams = that.tparams
this.tailPos = that.tailPos
- this.accessed = that.accessed
this.failPos = that.failPos
this.label = that.label
+ this.tailLabels = that.tailLabels
}
def this(dd: DefDef) {
this()
this.method = dd.symbol
this.tparams = dd.tparams map (_.symbol)
this.tailPos = true
- this.accessed = false
this.failPos = dd.pos
/** Create a new method symbol for the current method and store it in
@@ -141,14 +151,14 @@ abstract class TailCalls extends Transform {
def isEligible = method.isEffectivelyFinal
// @tailrec annotation indicates mandatory transformation
def isMandatory = method.hasAnnotation(TailrecClass) && !forMSIL
- def isTransformed = isEligible && accessed
+ def isTransformed = isEligible && accessed(label)
def tailrecFailure() = unit.error(failPos, "could not optimize @tailrec annotated " + method + ": " + failReason)
def newThis(pos: Position) = method.newValue(nme.THIS, pos, SYNTHETIC) setInfo currentClass.typeOfThis
override def toString(): String = (
"" + method.name + " tparams: " + tparams + " tailPos: " + tailPos +
- " accessed: " + accessed + "\nLabel: " + label + "\nLabel type: " + label.info
+ " Label: " + label + " Label type: " + label.info
)
}
@@ -206,7 +216,7 @@ abstract class TailCalls extends Transform {
def rewriteTailCall(recv: Tree): Tree = {
debuglog("Rewriting tail recursive call: " + fun.pos.lineContent.trim)
- ctx.accessed = true
+ accessed += ctx.label
typedPos(fun.pos)(Apply(Ident(ctx.label), recv :: transformArgs))
}
@@ -242,10 +252,16 @@ abstract class TailCalls extends Transform {
unit.error(tree.pos, "@tailrec annotated method contains no recursive calls")
}
}
- debuglog("Considering " + dd.name + " for tailcalls")
+
+ // labels are local to a method, so only traverse the rhs of a defdef
+ val collectTailPosLabels = new TailPosLabelsTraverser
+ collectTailPosLabels traverse rhs0
+ newCtx.tailLabels = collectTailPosLabels.tailLabels.toSet
+
+ debuglog("Considering " + dd.name + " for tailcalls, with labels in tailpos: "+ newCtx.tailLabels)
val newRHS = transform(rhs0, newCtx)
- deriveDefDef(tree)(rhs =>
+ deriveDefDef(tree){rhs =>
if (newCtx.isTransformed) {
/** We have rewritten the tree, but there may be nested recursive calls remaining.
* If @tailrec is given we need to fail those now.
@@ -270,8 +286,22 @@ abstract class TailCalls extends Transform {
newRHS
}
+ }
+
+ // a translated match
+ case Block(stats, expr) if stats forall hasSynthCaseSymbol =>
+ // the assumption is once we encounter a case, the remainder of the block will consist of cases
+ // the prologue may be empty, usually it is the valdef that stores the scrut
+ val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef])
+ treeCopy.Block(tree,
+ noTailTransforms(prologue) ++ transformTrees(cases),
+ transform(expr)
)
+ // a translated casedef
+ case LabelDef(_, _, body) if hasSynthCaseSymbol(tree) =>
+ deriveLabelDef(tree)(transform)
+
case Block(stats, expr) =>
treeCopy.Block(tree,
noTailTransforms(stats),
@@ -308,8 +338,23 @@ abstract class TailCalls extends Transform {
case Apply(fun, args) =>
if (fun.symbol == Boolean_or || fun.symbol == Boolean_and)
treeCopy.Apply(tree, fun, transformTrees(args))
- else
- rewriteApply(fun, fun, Nil, args)
+ else if (fun.symbol.isLabel && args.nonEmpty && args.tail.isEmpty && ctx.tailLabels(fun.symbol)) {
+ // this is to detect tailcalls in translated matches
+ // it's a one-argument call to a label that is in a tailposition and that looks like label(x) {x}
+ // thus, the argument to the call is in tailposition
+ val saved = ctx.tailPos
+ ctx.tailPos = true
+ debuglog("in tailpos label: "+ args.head)
+ val res = transform(args.head)
+ ctx.tailPos = saved
+ if (res ne args.head) {
+ // we tail-called -- TODO: shield from false-positives where we rewrite but don't tail-call
+ // must leave the jump to the original tailpos-label (fun)!
+ // there might be *a* tailcall *in* res, but it doesn't mean res *always* tailcalls
+ treeCopy.Apply(tree, fun, List(res))
+ }
+ else rewriteApply(fun, fun, Nil, args)
+ } else rewriteApply(fun, fun, Nil, args)
case Alternative(_) | Star(_) | Bind(_, _) =>
sys.error("We should've never gotten inside a pattern")
@@ -320,4 +365,66 @@ abstract class TailCalls extends Transform {
}
}
}
+
+ // collect the LabelDefs (generated by the pattern matcher) in a DefDef that are in tail position
+ // the labels all look like: matchEnd(x) {x}
+ // then, in a forward jump `matchEnd(expr)`, `expr` is considered in tail position (and the matchEnd jump is replaced by the jump generated by expr)
+ class TailPosLabelsTraverser extends Traverser {
+ val tailLabels = new collection.mutable.ListBuffer[Symbol]()
+
+ private var maybeTail: Boolean = true // since we start in the rhs of a DefDef
+
+ def traverse(tree: Tree, maybeTailNew: Boolean): Unit = {
+ val saved = maybeTail
+ maybeTail = maybeTailNew
+ try traverse(tree)
+ finally maybeTail = saved
+ }
+
+ def traverseNoTail(tree: Tree) = traverse(tree, false)
+ def traverseTreesNoTail(trees: List[Tree]) = trees foreach traverseNoTail
+
+ override def traverse(tree: Tree) = tree match {
+ case LabelDef(_, List(arg), body@Ident(_)) if arg.symbol == body.symbol => // we're looking for label(x){x} in tail position, since that means `a` is in tail position in a call `label(a)`
+ if (maybeTail) tailLabels += tree.symbol
+
+ // a translated casedef
+ case LabelDef(_, _, body) if hasSynthCaseSymbol(tree) =>
+ traverse(body)
+
+ // a translated match
+ case Block(stats, expr) if stats forall hasSynthCaseSymbol =>
+ // the assumption is once we encounter a case, the remainder of the block will consist of cases
+ // the prologue may be empty, usually it is the valdef that stores the scrut
+ val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef])
+ traverseTreesNoTail(prologue) // selector (may be absent)
+ traverseTrees(cases)
+ traverse(expr)
+
+ case CaseDef(pat, guard, body) =>
+ traverse(body)
+
+ case Match(selector, cases) =>
+ traverseNoTail(selector)
+ traverseTrees(cases)
+
+ case dd @ DefDef(_, _, _, _, _, _) => // we are run per-method
+
+ case Block(stats, expr) =>
+ traverseTreesNoTail(stats)
+ traverse(expr)
+
+ case If(cond, thenp, elsep) =>
+ traverse(thenp)
+ traverse(elsep)
+
+ case Try(block, catches, finalizer) =>
+ traverseNoTail(block)
+ traverseTreesNoTail(catches)
+ traverseNoTail(finalizer)
+
+ case EmptyTree | Super(_, _) | This(_) | Select(_, _) | Ident(_) | Literal(_) | Function(_, _) | TypeTree() =>
+ case _ => super.traverse(tree)
+ }
+ }
}
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index ee565530b7..e54e0289bb 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -168,7 +168,7 @@ abstract class UnCurry extends InfoTransform
private def nonLocalReturnTry(body: Tree, key: Symbol, meth: Symbol) = {
localTyper typed {
val extpe = nonLocalReturnExceptionType(meth.tpe.finalResultType)
- val ex = meth.newValue(body.pos, nme.ex) setInfo extpe
+ val ex = meth.newValue(nme.ex, body.pos) setInfo extpe
val pat = gen.mkBindForCase(ex, NonLocalReturnControlClass, List(meth.tpe.finalResultType))
val rhs = (
IF ((ex DOT nme.key)() OBJ_EQ Ident(key))
@@ -237,8 +237,10 @@ abstract class UnCurry extends InfoTransform
def targs = fun.tpe.typeArgs
def isPartial = fun.tpe.typeSymbol == PartialFunctionClass
+ // if the function was eta-expanded, it's not a match without a selector
if (fun1 ne fun) fun1
else {
+ assert(!(opt.virtPatmat && isPartial)) // empty-selector matches have already been translated into instantiations of anonymous (partial) functions
val (formals, restpe) = (targs.init, targs.last)
val anonClass = owner.newAnonymousFunctionClass(fun.pos, inConstructorFlag)
def parents =
@@ -286,52 +288,54 @@ abstract class UnCurry extends InfoTransform
def defaultCase = CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(Constant(false)))
- val casesNoSynthCatchAll = dropSyntheticCatchAll(cases)
+// val casesNoSynthCatchAll = dropSyntheticCatchAll(cases)
gen.mkUncheckedMatch(
- if (casesNoSynthCatchAll exists treeInfo.isDefaultCase) Literal(Constant(true))
- else substTree(wrap(Match(selector, (casesNoSynthCatchAll map transformCase) :+ defaultCase)).duplicate)
+ if (cases exists treeInfo.isDefaultCase) Literal(Constant(true))
+ else substTree(wrap(Match(selector, (cases map transformCase) :+ defaultCase)).duplicate)
)
}
- override def caseVirtualizedMatch(orig: Tree, _match: Tree, targs: List[Tree], scrut: Tree, matcher: Tree): Tree = {
- object noOne extends Transformer {
- override val treeCopy = newStrictTreeCopier // must duplicate everything
- val one = _match.tpe member newTermName("one")
- override def transform(tree: Tree): Tree = tree match {
- case Apply(fun, List(a)) if fun.symbol == one =>
- // blow one's argument away since all we want to know is whether the match succeeds or not
- // (the alternative, making `one` CBN, would entail moving away from Option)
- Apply(fun.duplicate, List(gen.mkZeroContravariantAfterTyper(a.tpe)))
- case _ =>
- super.transform(tree)
- }
- }
- substTree(Apply(Apply(TypeApply(Select(_match.duplicate, _match.tpe.member(newTermName("isSuccess"))), targs map (_.duplicate)), List(scrut.duplicate)), List(noOne.transform(matcher))))
- }
-
- override def caseVirtualizedMatchOpt(orig: Tree, zero: ValDef, x: ValDef, matchRes: ValDef, keepGoing: ValDef, stats: List[Tree], epilogue: Tree, wrap: Tree => Tree) = {
- object dropMatchResAssign extends Transformer {
- // override val treeCopy = newStrictTreeCopier // will duplicate below
- override def transform(tree: Tree): Tree = tree match {
- // don't compute the result of the match -- remove the block for the RHS (emitted by pmgen.one), except for the assignment to keepGoing
- case gen.VirtualCaseDef(assignKeepGoing, matchRes, zero) if assignKeepGoing.lhs.symbol eq keepGoing.symbol =>
- Block(List(assignKeepGoing), zero)
- case _ =>
- super.transform(tree)
- }
- }
- val statsNoMatchRes: List[Tree] = stats map (dropMatchResAssign.transform) toList
- val idaBlock = wrap(Block(
- zero ::
- x ::
- /* drop matchRes def */
- keepGoing ::
- statsNoMatchRes,
- NOT(REF(keepGoing.symbol)) // replace `if (keepGoing) throw new MatchError(...) else matchRes` epilogue by `!keepGoing`
- ))
- substTree(idaBlock.duplicate) // duplicate on block as a whole to ensure valdefs are properly cloned and substed
- }
+ override def caseVirtualizedMatch(orig: Tree, _match: Tree, targs: List[Tree], scrut: Tree, matcher: Tree): Tree = {assert(false); orig}
+ // {
+ // object noOne extends Transformer {
+ // override val treeCopy = newStrictTreeCopier // must duplicate everything
+ // val one = _match.tpe member newTermName("one")
+ // override def transform(tree: Tree): Tree = tree match {
+ // case Apply(fun, List(a)) if fun.symbol == one =>
+ // // blow one's argument away since all we want to know is whether the match succeeds or not
+ // // (the alternative, making `one` CBN, would entail moving away from Option)
+ // Apply(fun.duplicate, List(gen.mkZeroContravariantAfterTyper(a.tpe)))
+ // case _ =>
+ // super.transform(tree)
+ // }
+ // }
+ // substTree(Apply(Apply(TypeApply(Select(_match.duplicate, _match.tpe.member(newTermName("isSuccess"))), targs map (_.duplicate)), List(scrut.duplicate)), List(noOne.transform(matcher))))
+ // }
+
+ override def caseVirtualizedMatchOpt(orig: Tree, zero: ValDef, x: ValDef, matchRes: ValDef, keepGoing: ValDef, stats: List[Tree], epilogue: Tree, wrap: Tree => Tree) = {assert(false); orig}
+ // {
+ // object dropMatchResAssign extends Transformer {
+ // // override val treeCopy = newStrictTreeCopier // will duplicate below
+ // override def transform(tree: Tree): Tree = tree match {
+ // // don't compute the result of the match -- remove the block for the RHS (emitted by pmgen.one), except for the assignment to keepGoing
+ // case gen.VirtualCaseDef(assignKeepGoing, matchRes, zero) if assignKeepGoing.lhs.symbol eq keepGoing.symbol =>
+ // Block(List(assignKeepGoing), zero)
+ // case _ =>
+ // super.transform(tree)
+ // }
+ // }
+ // val statsNoMatchRes: List[Tree] = stats map (dropMatchResAssign.transform) toList
+ // val idaBlock = wrap(Block(
+ // zero ::
+ // x ::
+ // /* drop matchRes def */
+ // keepGoing ::
+ // statsNoMatchRes,
+ // NOT(REF(keepGoing.symbol)) // replace `if (keepGoing) throw new MatchError(...) else matchRes` epilogue by `!keepGoing`
+ // ))
+ // substTree(idaBlock.duplicate) // duplicate on block as a whole to ensure valdefs are properly cloned and substed
+ // }
}
DefDef(m, isDefinedAtTransformer(fun.body))
diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
index ed9fee986f..764823d786 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
@@ -376,21 +376,16 @@ trait ContextErrors {
setError(tree)
}
- def MissingParameterTypeError(fun: Tree, vparam: ValDef, pt: Type) = {
- def anonMessage = (
- "\nThe argument types of an anonymous function must be fully known. (SLS 8.5)" +
- "\nExpected type was: " + pt.toLongString
- )
-
- val suffix =
- if (!vparam.mods.isSynthetic) ""
- else " for expanded function" + (fun match {
- case Function(_, Match(_, _)) => anonMessage
- case _ => " " + fun
- })
+ def MissingParameterTypeError(fun: Tree, vparam: ValDef, pt: Type) =
+ if (vparam.mods.isSynthetic) fun match {
+ case Function(_, Match(_, _)) => MissingParameterTypeAnonMatchError(vparam, pt)
+ case _ => issueNormalTypeError(vparam, "missing parameter type for expanded function " + fun)
+ } else issueNormalTypeError(vparam, "missing parameter type")
- issueNormalTypeError(vparam, "missing parameter type" + suffix)
- }
+ def MissingParameterTypeAnonMatchError(vparam: Tree, pt: Type) =
+ issueNormalTypeError(vparam, "missing parameter type for expanded function\n"+
+ "The argument types of an anonymous function must be fully known. (SLS 8.5)\n"+
+ "Expected type was: " + pt.toLongString)
def ConstructorsOrderError(tree: Tree) = {
issueNormalTypeError(tree, "called constructor's definition must precede calling constructor's definition")
@@ -684,7 +679,7 @@ trait ContextErrors {
def errMsg = {
val location = if (sym.isClassConstructor) owner0 else pre.widen
- underlying(sym).fullLocationString + " cannot be accessed in " +
+ underlyingSymbol(sym).fullLocationString + " cannot be accessed in " +
location + explanation
}
NormalTypeError(tree, errMsg, ErrorKinds.Access)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
index 29831c8469..eb0d489901 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
@@ -256,8 +256,16 @@ abstract class Duplicators extends Analyzer {
case ldef @ LabelDef(name, params, rhs) =>
// log("label def: " + ldef)
ldef.tpe = null
- val params1 = params map (p => Ident(updateSym(p.symbol)))
- super.typed(treeCopy.LabelDef(tree, name, params1, rhs), mode, pt)
+ // since typer does not create the symbols for a LabelDef's params,
+ // we do that manually here -- we should really refactor LabelDef to be a subclass of DefDef
+ def newParam(p: Tree): Ident = {
+ val newsym = p.symbol.cloneSymbol //(context.owner) // TODO owner?
+ Ident(newsym.setInfo(fixType(p.symbol.info)))
+ }
+ val params1 = params map newParam
+ val rhs1 = (new TreeSubstituter(params map (_.symbol), params1) transform rhs) // TODO: duplicate?
+ rhs1.tpe = null
+ super.typed(treeCopy.LabelDef(tree, name, params1, rhs1), mode, pt)
case Bind(name, _) =>
// log("bind: " + tree)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index 8b3bc253fd..a1ca4904f4 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -196,10 +196,12 @@ trait Infer {
/* -- Error Messages --------------------------------------------------- */
def setError[T <: Tree](tree: T): T = {
- if (settings.debug.value) { // DEBUG
- println("set error: "+tree);
- throw new Error()
- }
+ debuglog("set error: "+ tree)
+ // this breaks -Ydebug pretty radically
+ // if (settings.debug.value) { // DEBUG
+ // println("set error: "+tree);
+ // throw new Error()
+ // }
def name = newTermName("<error: " + tree.symbol + ">")
def errorClass = if (context.reportErrors) context.owner.newErrorClass(name.toTypeName) else stdErrorClass
def errorValue = if (context.reportErrors) context.owner.newErrorValue(name) else stdErrorValue
@@ -1099,7 +1101,7 @@ trait Infer {
// since instantiateTypeVar wants to modify the skolem that corresponds to the method's type parameter,
// and it uses the TypeVar's origin to locate it, deskolemize the existential skolem to the method tparam skolem
// (the existential skolem was created by adaptConstrPattern to introduce the type slack necessary to soundly deal with variant type parameters)
- case skolem if skolem.isExistentialSkolem => freshVar(skolem.deSkolemize.asInstanceOf[TypeSymbol])
+ case skolem if skolem.isGADTSkolem => freshVar(skolem.deSkolemize.asInstanceOf[TypeSymbol])
case p => freshVar(p)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
index f32ad9293c..b57f139074 100644
--- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
@@ -169,6 +169,16 @@ trait MethodSynthesis {
self: Namer =>
import NamerErrorGen._
+
+ /** TODO - synthesize method.
+ */
+ def enterImplicitClass(tree: ClassDef) {
+ /** e.g.
+ val ClassDef(mods, name, tparams, impl) = tree
+ val converter = ImplicitClassConverter(tree).createAndEnterSymbol()
+ ...
+ */
+ }
def enterGetterSetter(tree: ValDef) {
val ValDef(mods, name, _, _) = tree
@@ -195,17 +205,13 @@ trait MethodSynthesis {
enterBeans(tree)
}
def finishGetterSetter(typer: Typer, stat: Tree): List[Tree] = stat match {
- case vd @ ValDef(mods, name, tpt, rhs) if !noFinishGetterSetter(vd) =>
+ case vd @ ValDef(mods, name, tpt, rhs) if !noFinishGetterSetter(vd) && !vd.symbol.isLazy =>
// If we don't save the annotations, they seem to wander off.
val annotations = stat.symbol.initialize.annotations
- val trees = (
- allValDefDerived(vd)
- map (acc => atPos(vd.pos.focus)(acc derive annotations))
- filterNot (_ eq EmptyTree)
+ ( allValDefDerived(vd)
+ map (acc => atPos(vd.pos.focus)(acc derive annotations))
+ filterNot (_ eq EmptyTree)
)
- // log(trees.mkString("Accessor trees:\n ", "\n ", "\n"))
- if (vd.symbol.isLazy) List(stat)
- else trees
case _ =>
List(stat)
}
@@ -230,14 +236,33 @@ trait MethodSynthesis {
}
trait Derived {
+ /** The tree from which we are deriving a synthetic member. */
+ def tree: Tree
def name: TermName
def flagsMask: Long
def flagsExtra: Long
+
+ /** The tree, symbol, and type completer for the synthetic member. */
def completer(sym: Symbol): Type
+ def derivedSym: Symbol
+ def derivedTree: Tree
}
- trait DerivedFromValDef extends Derived {
- /** The declaration from which we are deriving.
- */
+
+ trait DerivedFromMemberDef extends Derived {
+ def tree: MemberDef
+
+ // Final methods to make the rest easier to reason about.
+ final def mods = tree.mods
+ final def basisSym = tree.symbol
+ final def enclClass = basisSym.enclClass
+ final def derivedFlags: Long = basisSym.flags & flagsMask | flagsExtra
+ }
+
+ trait DerivedFromClassDef extends DerivedFromMemberDef {
+ def tree: ClassDef
+ }
+
+ trait DerivedFromValDef extends DerivedFromMemberDef {
def tree: ValDef
/** Which meta-annotation is associated with this kind of entity.
@@ -245,14 +270,8 @@ trait MethodSynthesis {
*/
def category: Symbol
- // Final methods to make the rest easier to reason about.
- final def mods = tree.mods
- final def basisSym = tree.symbol
- final def enclClass = basisSym.enclClass
-
final def completer(sym: Symbol) = namerOf(sym).accessorTypeCompleter(tree, isSetter)
final def fieldSelection = Select(This(enclClass), basisSym)
- final def derivedFlags: Long = basisSym.flags & flagsMask | flagsExtra
final def derivedMods: Modifiers = mods & flagsMask | flagsExtra mapAnnotations (_ => Nil)
def derivedSym: Symbol = tree.symbol
@@ -312,6 +331,19 @@ trait MethodSynthesis {
private def setterDef = DefDef(derivedSym, setterRhs)
override def derivedTree: Tree = if (setterParam == NoSymbol) EmptyTree else setterDef
}
+
+ /** A synthetic method which performs the implicit conversion implied by
+ * the declaration of an implicit class. Yet to be written.
+ */
+ case class ImplicitClassConverter(tree: ClassDef) extends DerivedFromClassDef {
+ def completer(sym: Symbol): Type = ???
+ def derivedSym: Symbol = ???
+ def derivedTree: DefDef = ???
+ def flagsExtra: Long = ???
+ def flagsMask: Long = ???
+ def name: TermName = ???
+ }
+
case class Getter(tree: ValDef) extends DerivedGetter {
def name = tree.name
def category = GetterTargetClass
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index c5fb13a5a9..6b27c27652 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -665,6 +665,10 @@ trait Namers extends MethodSynthesis {
"If possible, define " + tree.symbol + " in " + owner.skipPackageObject + " instead."
)
}
+
+ // Suggested location only.
+ if (mods.isImplicit)
+ enterImplicitClass(tree)
}
// this logic is needed in case typer was interrupted half
diff --git a/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala b/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala
index b060fd7121..aff8368f75 100644
--- a/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala
@@ -32,6 +32,8 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
import global._
import definitions._
+ val SYNTH_CASE = Flags.CASE | SYNTHETIC
+
object vpmName {
val one = newTermName("one")
val drop = newTermName("drop")
@@ -43,13 +45,13 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
val outer = newTermName("<outer>")
val runOrElse = newTermName("runOrElse")
val zero = newTermName("zero")
- val _match = newTermName("__match") // don't call it __match, since that will trigger virtual pattern matching...
+ val _match = newTermName("__match") // don't call it __match, since that will trigger virtual pattern matching...
def counted(str: String, i: Int) = newTermName(str+i)
}
object MatchTranslator {
- def apply(typer: Typer): MatchTranslation = {
+ def apply(typer: Typer): MatchTranslation with CodegenCore = {
import typer._
// typing `_match` to decide which MatchTranslator to create adds 4% to quick.comp.timer
newTyper(context.makeImplicit(reportAmbiguousErrors = false)).silent(_.typed(Ident(vpmName._match), EXPRmode, WildcardType), reportAmbiguousErrors = false) match {
@@ -116,10 +118,6 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
trait MatchTranslation extends MatchMonadInterface { self: TreeMakers with CodegenCore =>
import typer.{typed, context, silent, reallyExists}
- private def repeatedToSeq(tp: Type): Type = (tp baseType RepeatedParamClass) match {
- case TypeRef(_, RepeatedParamClass, args) => appliedType(SeqClass.typeConstructor, args)
- case _ => tp
- }
/** Implement a pattern match by turning its cases (including the implicit failure case)
* into the corresponding (monadic) extractors, and combining them with the `orElse` combinator.
@@ -131,18 +129,15 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
* thus, you must typecheck the result (and that will in turn translate nested matches)
* this could probably optimized... (but note that the matchStrategy must be solved for each nested patternmatch)
*/
- def translateMatch(scrut: Tree, cases: List[CaseDef], pt: Type): Tree = {
+ def translateMatch(scrut: Tree, cases: List[CaseDef], pt: Type, scrutType: Type, matchFailGenOverride: Option[Tree => Tree] = None): Tree = {
// we don't transform after typers
// (that would require much more sophistication when generating trees,
// and the only place that emits Matches after typers is for exception handling anyway)
assert(phase.id <= currentRun.typerPhase.id, phase)
- val scrutType = repeatedToSeq(elimAnonymousClass(scrut.tpe.widen))
-
- val scrutSym = freshSym(scrut.pos, pureType(scrutType))
- val okPt = repeatedToSeq(pt)
+ val scrutSym = freshSym(scrut.pos, pureType(scrutType)) setFlag SYNTH_CASE
// pt = Any* occurs when compiling test/files/pos/annotDepMethType.scala with -Xexperimental
- combineCases(scrut, scrutSym, cases map translateCase(scrutSym, okPt), okPt, matchOwner)
+ combineCases(scrut, scrutSym, cases map translateCase(scrutSym, pt), pt, matchOwner, matchFailGenOverride)
}
// return list of typed CaseDefs that are supported by the backend (typed/bind/wildcard)
@@ -154,21 +149,22 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
// if they're already simple enough to be handled by the back-end, we're done
if (caseDefs forall treeInfo.isCatchCase) caseDefs
else {
- val okPt = repeatedToSeq(pt)
- val switch = {
+ val swatches = { // switch-catches
val bindersAndCases = caseDefs map { caseDef =>
// generate a fresh symbol for each case, hoping we'll end up emitting a type-switch (we don't have a global scrut there)
// if we fail to emit a fine-grained switch, have to do translateCase again with a single scrutSym (TODO: uniformize substitution on treemakers so we can avoid this)
val caseScrutSym = freshSym(pos, pureType(ThrowableClass.tpe))
- (caseScrutSym, propagateSubstitution(translateCase(caseScrutSym, okPt)(caseDef), EmptySubstitution))
+ (caseScrutSym, propagateSubstitution(translateCase(caseScrutSym, pt)(caseDef), EmptySubstitution))
}
- (emitTypeSwitch(bindersAndCases, pt) map (_.map(fixerUpper(matchOwner, pos).apply(_).asInstanceOf[CaseDef])))
+ for(cases <- emitTypeSwitch(bindersAndCases, pt) toList;
+ if cases forall treeInfo.isCatchCase; // must check again, since it's not guaranteed -- TODO: can we eliminate this? e.g., a type test could test for a trait or a non-trivial prefix, which are not handled by the back-end
+ cse <- cases) yield fixerUpper(matchOwner, pos)(cse).asInstanceOf[CaseDef]
}
- val catches = switch getOrElse {
+ val catches = if (swatches nonEmpty) swatches else {
val scrutSym = freshSym(pos, pureType(ThrowableClass.tpe))
- val casesNoSubstOnly = caseDefs map { caseDef => (propagateSubstitution(translateCase(scrutSym, okPt)(caseDef), EmptySubstitution))}
+ val casesNoSubstOnly = caseDefs map { caseDef => (propagateSubstitution(translateCase(scrutSym, pt)(caseDef), EmptySubstitution))}
val exSym = freshSym(pos, pureType(ThrowableClass.tpe), "ex")
@@ -177,7 +173,7 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
CaseDef(
Bind(exSym, Ident(nme.WILDCARD)), // TODO: does this need fixing upping?
EmptyTree,
- combineCasesNoSubstOnly(CODE.REF(exSym), scrutSym, casesNoSubstOnly, pt, matchOwner, scrut => Throw(CODE.REF(exSym)))
+ combineCasesNoSubstOnly(CODE.REF(exSym), scrutSym, casesNoSubstOnly, pt, matchOwner, Some(scrut => Throw(CODE.REF(exSym))))
)
})
}
@@ -262,9 +258,9 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
* @arg patBinder symbol used to refer to the result of the previous pattern's extractor (will later be replaced by the outer tree with the correct tree to refer to that patterns result)
*/
def unapply(tree: Tree): Option[(Symbol, Type)] = tree match {
- case Bound(subpatBinder, typed@Typed(expr, tpt)) => Some((subpatBinder, typed.tpe))
- case Bind(_, typed@Typed(expr, tpt)) => Some((patBinder, typed.tpe))
- case Typed(expr, tpt) => Some((patBinder, tree.tpe))
+ case Bound(subpatBinder, typed@Typed(expr, tpt)) if typed.tpe ne null => Some((subpatBinder, typed.tpe))
+ case Bind(_, typed@Typed(expr, tpt)) if typed.tpe ne null => Some((patBinder, typed.tpe))
+ case Typed(expr, tpt) if tree.tpe ne null => Some((patBinder, tree.tpe))
case _ => None
}
}
@@ -706,10 +702,10 @@ class Foo(x: Other) { x._1 } // no error in this order
def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): (List[List[TreeMaker]], List[Tree]) =
(cases, Nil)
- def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type): Option[Tree] =
+ def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type, matchFailGenOverride: Option[Tree => Tree]): Option[Tree] =
None
- // for catch
+ // for catch (no need to customize match failure)
def emitTypeSwitch(bindersAndCases: List[(Symbol, List[TreeMaker])], pt: Type): Option[List[CaseDef]] =
None
@@ -733,23 +729,25 @@ class Foo(x: Other) { x._1 } // no error in this order
private[this] var currSub: Substitution = null
// build Tree that chains `next` after the current extractor
- def chainBefore(next: Tree, pt: Type): Tree
+ def chainBefore(next: Tree)(casegen: Casegen): Tree
+ }
+
+ trait NoNewBinders extends TreeMaker {
+ protected val localSubstitution: Substitution = EmptySubstitution
}
- case class TrivialTreeMaker(tree: Tree) extends TreeMaker {
- val localSubstitution: Substitution = EmptySubstitution
- def chainBefore(next: Tree, pt: Type): Tree = tree
+ case class TrivialTreeMaker(tree: Tree) extends TreeMaker with NoNewBinders {
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = tree
}
- case class BodyTreeMaker(body: Tree, matchPt: Type) extends TreeMaker {
- val localSubstitution: Substitution = EmptySubstitution
- def chainBefore(next: Tree, pt: Type): Tree = // assert(next eq EmptyTree)
- atPos(body.pos)(substitution(codegen.one(body, body.tpe, matchPt))) // since SubstOnly treemakers are dropped, need to do it here
+ case class BodyTreeMaker(body: Tree, matchPt: Type) extends TreeMaker with NoNewBinders {
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = // assert(next eq EmptyTree)
+ atPos(body.pos)(casegen.one(substitution(body))) // since SubstOnly treemakers are dropped, need to do it here
}
case class SubstOnlyTreeMaker(prevBinder: Symbol, nextBinder: Symbol) extends TreeMaker {
val localSubstitution = Substitution(prevBinder, CODE.REF(nextBinder))
- def chainBefore(next: Tree, pt: Type): Tree = substitution(next)
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = substitution(next)
}
abstract class FunTreeMaker extends TreeMaker {
@@ -766,8 +764,8 @@ class Foo(x: Other) { x._1 } // no error in this order
lazy val nextBinder = freshSym(pos, nextBinderTp)
lazy val localSubstitution = Substitution(List(prevBinder), List(CODE.REF(nextBinder)))
- def chainBefore(next: Tree, pt: Type): Tree =
- atPos(pos)(codegen.flatMapCond(cond, res, nextBinder, nextBinderTp, substitution(next)))
+ def chainBefore(next: Tree)(casegen: Casegen): Tree =
+ atPos(pos)(casegen.flatMapCond(cond, res, nextBinder, substitution(next)))
}
/**
@@ -778,11 +776,11 @@ class Foo(x: Other) { x._1 } // no error in this order
* in this function's body, and all the subsequent ones, references to the symbols in `from` will be replaced by the corresponding tree in `to`
*/
case class ExtractorTreeMaker(extractor: Tree, extraCond: Option[Tree], nextBinder: Symbol, localSubstitution: Substitution)(extractorReturnsBoolean: Boolean) extends FunTreeMaker {
- def chainBefore(next: Tree, pt: Type): Tree = {
- val condAndNext = extraCond map (codegen.ifThenElseZero(_, next)) getOrElse next
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = {
+ val condAndNext = extraCond map (casegen.ifThenElseZero(_, next)) getOrElse next
atPos(extractor.pos)(
- if (extractorReturnsBoolean) codegen.flatMapCond(extractor, CODE.UNIT, nextBinder, nextBinder.info.widen, substitution(condAndNext))
- else codegen.flatMap(extractor, nextBinder, substitution(condAndNext))
+ if (extractorReturnsBoolean) casegen.flatMapCond(extractor, CODE.UNIT, nextBinder, substitution(condAndNext))
+ else casegen.flatMap(extractor, nextBinder, substitution(condAndNext))
)
}
@@ -791,10 +789,10 @@ class Foo(x: Other) { x._1 } // no error in this order
// TODO: allow user-defined unapplyProduct
case class ProductExtractorTreeMaker(prevBinder: Symbol, extraCond: Option[Tree], localSubstitution: Substitution) extends TreeMaker { import CODE._
- def chainBefore(next: Tree, pt: Type): Tree = {
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = {
val nullCheck = REF(prevBinder) OBJ_NE NULL
val cond = extraCond map (nullCheck AND _) getOrElse nullCheck
- codegen.ifThenElseZero(cond, substitution(next))
+ casegen.ifThenElseZero(cond, substitution(next))
}
override def toString = "P"+(prevBinder, extraCond getOrElse "", localSubstitution)
@@ -907,61 +905,39 @@ class Foo(x: Other) { x._1 } // no error in this order
override def toString = "ET"+(prevBinder, patTree)
}
- case class AlternativesTreeMaker(prevBinder: Symbol, var altss: List[List[TreeMaker]], pos: Position) extends TreeMaker {
+ case class AlternativesTreeMaker(prevBinder: Symbol, var altss: List[List[TreeMaker]], pos: Position) extends TreeMaker with NoNewBinders {
// don't substitute prevBinder to nextBinder, a set of alternatives does not need to introduce a new binder, simply reuse the previous one
- val localSubstitution: Substitution = EmptySubstitution
override private[TreeMakers] def incorporateOuterSubstitution(outerSubst: Substitution): Unit = {
super.incorporateOuterSubstitution(outerSubst)
altss = altss map (alts => propagateSubstitution(alts, substitution))
}
- def chainBefore(next: Tree, pt: Type): Tree = { import CODE._
- // next does not contain deftrees, is pretty short
- val canDuplicate = {
- var okToInline = true
- var sizeBudget = 100 / (altss.length max 1) // yep, totally arbitrary!
- object travOkToInline extends Traverser { override def traverse(tree: Tree): Unit = if (sizeBudget >= 0) { sizeBudget -= 1; tree match {
- case TypeApply(_, _) | Apply(_, _) | Select(_, _)
- | Block(_, _) | Assign(_, _) | If(_, _, _) | Typed(_, _) => super.traverse(tree) // these are allowed if their subtrees are
- case EmptyTree | This(_) | New(_) | Literal(_) | Ident(_) => // these are always ok
- case _ if tree.isType => // these are always ok
- case _ => okToInline = false //; println("not inlining: "+ (tree, tree.getClass))
- }}}
- travOkToInline.traverse(next)
- // println("(okToInline, sizeBudget): "+ (okToInline, sizeBudget))
- okToInline && sizeBudget > 0 // must be strict comparison
- }
+ def chainBefore(next: Tree)(codegenAlt: Casegen): Tree = { import CODE._
+ atPos(pos){
+ // one alternative may still generate multiple trees (e.g., an extractor call + equality test)
+ // (for now,) alternatives may not bind variables (except wildcards), so we don't care about the final substitution built internally by makeTreeMakers
+ val combinedAlts = altss map (altTreeMakers =>
+ ((casegen: Casegen) => combineExtractors(altTreeMakers :+ TrivialTreeMaker(casegen.one(TRUE_typed)))(casegen))
+ )
- atPos(pos)(
- if (canDuplicate) {
- altss map {altTreeMakers =>
- combineExtractors(altTreeMakers :+ TrivialTreeMaker(substitution(next).duplicate), pt)
- } reduceLeft codegen.typedOrElse(pt)
- } else {
- val rest = freshSym(pos, functionType(List(), inMatchMonad(pt)), "rest")
- // rest.info.member(nme.apply).withAnnotation(AnnotationInfo(ScalaInlineClass.tpe, Nil, Nil))
-
- // one alternative may still generate multiple trees (e.g., an extractor call + equality test)
- // (for now,) alternatives may not bind variables (except wildcards), so we don't care about the final substitution built internally by makeTreeMakers
- val combinedAlts = altss map (altTreeMakers =>
- combineExtractors(altTreeMakers :+ TrivialTreeMaker(REF(rest) APPLY ()), pt)
- )
- BLOCK(
- VAL(rest) === Function(Nil, substitution(next)),
- combinedAlts reduceLeft codegen.typedOrElse(pt)
- )
- }
- )
+ val findAltMatcher = codegenAlt.matcher(EmptyTree, NoSymbol, BooleanClass.tpe)(combinedAlts, Some(x => FALSE_typed))
+ codegenAlt.ifThenElseZero(findAltMatcher, substitution(next))
+ }
}
}
- case class GuardTreeMaker(guardTree: Tree) extends TreeMaker {
- val localSubstitution: Substitution = EmptySubstitution
- def chainBefore(next: Tree, pt: Type): Tree = codegen.flatMapGuard(substitution(guardTree), next)
+ case class GuardTreeMaker(guardTree: Tree) extends TreeMaker with NoNewBinders {
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = casegen.flatMapGuard(substitution(guardTree), next)
override def toString = "G("+ guardTree +")"
}
+ // combineExtractors changes the current substitution's of the tree makers in `treeMakers`
+ // requires propagateSubstitution(treeMakers) has been called
+ def combineExtractors(treeMakers: List[TreeMaker])(casegen: Casegen): Tree =
+ treeMakers.foldRight(EmptyTree: Tree)((a, b) => a.chainBefore(b)(casegen))
+
+
def removeSubstOnly(makers: List[TreeMaker]) = makers filterNot (_.isInstanceOf[SubstOnlyTreeMaker])
// a foldLeft to accumulate the localSubstitution left-to-right
@@ -976,51 +952,42 @@ class Foo(x: Other) { x._1 } // no error in this order
}
// calls propagateSubstitution on the treemakers
- def combineCases(scrut: Tree, scrutSym: Symbol, casesRaw: List[List[TreeMaker]], pt: Type, owner: Symbol): Tree = {
- val casesNoSubstOnly = casesRaw map (propagateSubstitution(_, EmptySubstitution)) // drops SubstOnlyTreeMakers, since their effect is now contained in the TreeMakers that follow them
- combineCasesNoSubstOnly(scrut, scrutSym, casesNoSubstOnly, pt, owner, CODE.MATCHERROR(_))
+ def combineCases(scrut: Tree, scrutSym: Symbol, casesRaw: List[List[TreeMaker]], pt: Type, owner: Symbol, matchFailGenOverride: Option[Tree => Tree]): Tree = {
+ // drops SubstOnlyTreeMakers, since their effect is now contained in the TreeMakers that follow them
+ val casesNoSubstOnly = casesRaw map (propagateSubstitution(_, EmptySubstitution))
+ combineCasesNoSubstOnly(scrut, scrutSym, casesNoSubstOnly, pt, owner, matchFailGenOverride)
}
- def combineCasesNoSubstOnly(scrut: Tree, scrutSym: Symbol, casesNoSubstOnly: List[List[TreeMaker]], pt: Type, owner: Symbol, matchFail: Tree => Tree): Tree = fixerUpper(owner, scrut.pos){
- emitSwitch(scrut, scrutSym, casesNoSubstOnly, pt).getOrElse{
- val (matcher, hasDefault, toHoist) =
- if (casesNoSubstOnly nonEmpty) {
- // when specified, need to propagate pt explicitly (type inferencer can't handle it)
- val optPt =
- if (isFullyDefined(pt)) inMatchMonad(pt)
- else NoType
+ def combineCasesNoSubstOnly(scrut: Tree, scrutSym: Symbol, casesNoSubstOnly: List[List[TreeMaker]], pt: Type, owner: Symbol, matchFailGenOverride: Option[Tree => Tree]): Tree =
+ fixerUpper(owner, scrut.pos){
+ val ptDefined = if (isFullyDefined(pt)) pt else NoType
+ def matchFailGen = (matchFailGenOverride orElse Some(CODE.MATCHERROR(_: Tree)))
- // do this check on casesNoSubstOnly, since DCE will eliminate trivial cases like `case _ =>`, even if they're the last one
+ emitSwitch(scrut, scrutSym, casesNoSubstOnly, pt, matchFailGenOverride).getOrElse{
+ if (casesNoSubstOnly nonEmpty) {
+ // before optimizing, check casesNoSubstOnly for presence of a default case,
+ // since DCE will eliminate trivial cases like `case _ =>`, even if they're the last one
// exhaustivity and reachability must be checked before optimization as well
- // TODO: improve, a trivial type test before the body still makes for a default case
- // ("trivial" depends on whether we're emitting a straight match or an exception, or more generally, any supertype of scrutSym.tpe is a no-op)
- val hasDefault = casesNoSubstOnly.nonEmpty && {
- val nonTrivLast = casesNoSubstOnly.last
- nonTrivLast.nonEmpty && nonTrivLast.head.isInstanceOf[BodyTreeMaker]
- }
+ // TODO: improve notion of trivial/irrefutable -- a trivial type test before the body still makes for a default case
+ // ("trivial" depends on whether we're emitting a straight match or an exception, or more generally, any supertype of scrutSym.tpe is a no-op)
+ // irrefutability checking should use the approximation framework also used for CSE, unreachability and exhaustivity checking
+ val synthCatchAll =
+ if (casesNoSubstOnly.nonEmpty && {
+ val nonTrivLast = casesNoSubstOnly.last
+ nonTrivLast.nonEmpty && nonTrivLast.head.isInstanceOf[BodyTreeMaker]
+ }) None
+ else matchFailGen
val (cases, toHoist) = optimizeCases(scrutSym, casesNoSubstOnly, pt)
- val combinedCases =
- cases.map(combineExtractors(_, pt)).reduceLeft(codegen.typedOrElse(optPt))
-
- (combinedCases, hasDefault, toHoist)
- } else (codegen.zero, false, Nil)
+ val matchRes = codegen.matcher(scrut, scrutSym, pt)(cases map combineExtractors, synthCatchAll)
- // catch-all
- val catchAll =
- if (hasDefault) None // no need for a catch-all when there's already a default
- else Some(matchFail)
- val expr = codegen.runOrElse(scrut, scrutSym, matcher, if (isFullyDefined(pt)) pt else NoType, catchAll)
- if (toHoist isEmpty) expr
- else Block(toHoist, expr)
+ if (toHoist isEmpty) matchRes else Block(toHoist, matchRes)
+ } else {
+ codegen.matcher(scrut, scrutSym, pt)(Nil, matchFailGen)
+ }
+ }
}
- }
-
- // combineExtractors changes the current substitution's of the tree makers in `treeMakers`
- // requires propagateSubstitution(treeMakers) has been called
- def combineExtractors(treeMakers: List[TreeMaker], pt: Type): Tree =
- treeMakers.foldRight (EmptyTree: Tree) (_.chainBefore(_, pt))
// TODO: do this during tree construction, but that will require tracking the current owner in treemakers
// TODO: assign more fine-grained positions
@@ -1070,34 +1037,36 @@ class Foo(x: Other) { x._1 } // no error in this order
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
trait CodegenCore extends MatchMonadInterface {
private var ctr = 0
- def freshSym(pos: Position, tp: Type = NoType, prefix: String = "x") = {ctr += 1;
- // assert(owner ne null)
- // assert(owner ne NoSymbol)
- NoSymbol.newTermSymbol(vpmName.counted(prefix, ctr), pos) setInfo repackExistential(tp)
- }
+ def freshName(prefix: String) = {ctr += 1; vpmName.counted(prefix, ctr)}
+
+ // assert(owner ne null); assert(owner ne NoSymbol)
+ def freshSym(pos: Position, tp: Type = NoType, prefix: String = "x") =
+ NoSymbol.newTermSymbol(freshName(prefix), pos) setInfo /*repackExistential*/(tp)
// codegen relevant to the structure of the translation (how extractors are combined)
trait AbsCodegen {
- def runOrElse(scrut: Tree, scrutSym: Symbol, matcher: Tree, resTp: Type, catchAll: Option[Tree => Tree]): Tree
- def one(res: Tree, bodyPt: Type, matchPt: Type): Tree
- def zero: Tree
- def flatMap(prev: Tree, b: Symbol, next: Tree): Tree
- def typedOrElse(pt: Type)(thisCase: Tree, elseCase: Tree): Tree
+ def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree
- def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, nextBinderTp: Type, next: Tree): Tree
- def flatMapGuard(cond: Tree, next: Tree): Tree
-
- def fun(arg: Symbol, body: Tree): Tree
- def ifThenElseZero(c: Tree, then: Tree): Tree
- def _equals(checker: Tree, binder: Symbol): Tree
+ // local / context-free
def _asInstanceOf(b: Symbol, tp: Type): Tree
+ def _equals(checker: Tree, binder: Symbol): Tree
+ def _isInstanceOf(b: Symbol, tp: Type): Tree
+ def and(a: Tree, b: Tree): Tree
+ def drop(tgt: Tree)(n: Int): Tree
+ def index(tgt: Tree)(i: Int): Tree
def mkZero(tp: Type): Tree
-
def tupleSel(binder: Symbol)(i: Int): Tree
- def index(tgt: Tree)(i: Int): Tree
- def drop(tgt: Tree)(n: Int): Tree
- def and(a: Tree, b: Tree): Tree
- def _isInstanceOf(b: Symbol, tp: Type): Tree
+ }
+
+ // structure
+ trait Casegen extends AbsCodegen { import CODE._
+ def one(res: Tree): Tree
+
+ def flatMap(prev: Tree, b: Symbol, next: Tree): Tree
+ def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree
+ def flatMapGuard(cond: Tree, next: Tree): Tree
+ def ifThenElseZero(c: Tree, then: Tree): Tree = IF (c) THEN then ELSE zero
+ protected def zero: Tree
}
def codegen: AbsCodegen
@@ -1112,21 +1081,20 @@ class Foo(x: Other) { x._1 } // no error in this order
def drop(tgt: Tree)(n: Int): Tree = (tgt DOT vpmName.drop) (LIT(n))
def _equals(checker: Tree, binder: Symbol): Tree = checker MEMBER_== REF(binder) // NOTE: checker must be the target of the ==, that's the patmat semantics for ya
def and(a: Tree, b: Tree): Tree = a AND b
- def ifThenElseZero(c: Tree, then: Tree): Tree = IF (c) THEN then ELSE zero
// the force is needed mainly to deal with the GADT typing hack (we can't detect it otherwise as tp nor pt need contain an abstract type, we're just casting wildly)
- def _asInstanceOf(t: Tree, tp: Type, force: Boolean = false): Tree = { val tpX = repackExistential(tp)
+ def _asInstanceOf(t: Tree, tp: Type, force: Boolean = false): Tree = { val tpX = /*repackExistential*/(tp)
if (!force && (t.tpe ne NoType) && t.isTyped && typesConform(t.tpe, tpX)) t //{ println("warning: emitted redundant asInstanceOf: "+(t, t.tpe, tp)); t } //.setType(tpX)
else gen.mkAsInstanceOf(t, tpX, true, false)
}
- def _isInstanceOf(b: Symbol, tp: Type): Tree = gen.mkIsInstanceOf(REF(b), repackExistential(tp), true, false)
- // { val tpX = repackExistential(tp)
+ def _isInstanceOf(b: Symbol, tp: Type): Tree = gen.mkIsInstanceOf(REF(b), /*repackExistential*/(tp), true, false)
+ // { val tpX = /*repackExistential*/(tp)
// if (typesConform(b.info, tpX)) { println("warning: emitted spurious isInstanceOf: "+(b, tp)); TRUE }
// else gen.mkIsInstanceOf(REF(b), tpX, true, false)
// }
- def _asInstanceOf(b: Symbol, tp: Type): Tree = { val tpX = repackExistential(tp)
+ def _asInstanceOf(b: Symbol, tp: Type): Tree = { val tpX = /*repackExistential*/(tp)
if (typesConform(b.info, tpX)) REF(b) //{ println("warning: emitted redundant asInstanceOf: "+(b, b.info, tp)); REF(b) } //.setType(tpX)
else gen.mkAsInstanceOf(REF(b), tpX, true, false)
}
@@ -1166,28 +1134,29 @@ class Foo(x: Other) { x._1 } // no error in this order
trait PureCodegen extends CodegenCore with PureMatchMonadInterface {
def codegen: AbsCodegen = pureCodegen
- object pureCodegen extends CommonCodegen { import CODE._
+ object pureCodegen extends CommonCodegen with Casegen { import CODE._
//// methods in MatchingStrategy (the monad companion) -- used directly in translation
// __match.runOrElse(`scrut`)(`scrutSym` => `matcher`)
// TODO: consider catchAll, or virtualized matching will break in exception handlers
- def runOrElse(scrut: Tree, scrutSym: Symbol, matcher: Tree, resTp: Type, catchAll: Option[Tree => Tree]): Tree
- = _match(vpmName.runOrElse) APPLY (scrut) APPLY (fun(scrutSym, matcher))
+ def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree =
+ _match(vpmName.runOrElse) APPLY (scrut) APPLY (fun(scrutSym, cases map (f => f(this)) reduceLeft typedOrElse))
+
// __match.one(`res`)
- def one(res: Tree, bodyPt: Type, matchPt: Type): Tree = (_match(vpmName.one)) (res)
+ def one(res: Tree): Tree = (_match(vpmName.one)) (res)
// __match.zero
- def zero: Tree = _match(vpmName.zero)
+ protected def zero: Tree = _match(vpmName.zero)
// __match.guard(`c`, `then`)
- def guard(c: Tree, then: Tree, tp: Type): Tree = _match(vpmName.guard) APPLY (c, then)
+ def guard(c: Tree, then: Tree): Tree = _match(vpmName.guard) APPLY (c, then)
//// methods in the monad instance -- used directly in translation
// `prev`.flatMap(`b` => `next`)
def flatMap(prev: Tree, b: Symbol, next: Tree): Tree = (prev DOT vpmName.flatMap)(fun(b, next))
// `thisCase`.orElse(`elseCase`)
- def typedOrElse(pt: Type)(thisCase: Tree, elseCase: Tree): Tree = (thisCase DOT vpmName.orElse) APPLY (elseCase)
+ def typedOrElse(thisCase: Tree, elseCase: Tree): Tree = (thisCase DOT vpmName.orElse) APPLY (elseCase)
// __match.guard(`cond`, `res`).flatMap(`nextBinder` => `next`)
- def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, nextBinderTp: Type, next: Tree): Tree = flatMap(guard(cond, res, nextBinderTp), nextBinder, next)
+ def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree = flatMap(guard(cond, res), nextBinder, next)
// __match.guard(`guardTree`, ()).flatMap((_: P[Unit]) => `next`)
- def flatMapGuard(guardTree: Tree, next: Tree): Tree = flatMapCond(guardTree, CODE.UNIT, freshSym(guardTree.pos, pureType(UnitClass.tpe)), pureType(UnitClass.tpe), next)
+ def flatMapGuard(guardTree: Tree, next: Tree): Tree = flatMapCond(guardTree, CODE.UNIT, freshSym(guardTree.pos, pureType(UnitClass.tpe)), next)
}
}
@@ -1456,8 +1425,8 @@ class Foo(x: Other) { x._1 } // no error in this order
}
// TODO: finer-grained duplication
- def chainBefore(next: Tree, pt: Type): Tree = // assert(codegen eq optimizedCodegen)
- atPos(pos)(optimizedCodegen.flatMapCondStored(cond, storedCond, res, nextBinder, substitution(next).duplicate))
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = // assert(codegen eq optimizedCodegen)
+ atPos(pos)(casegen.asInstanceOf[optimizedCodegen.OptimizedCasegen].flatMapCondStored(cond, storedCond, res, nextBinder, substitution(next).duplicate))
}
case class ReusingCondTreeMaker(sharedPrefix: List[Test], toReused: TreeMaker => TreeMaker) extends TreeMaker { import CODE._
@@ -1474,12 +1443,11 @@ class Foo(x: Other) { x._1 } // no error in this order
oldSubs.foldLeft(Substitution(from, to))(_ >> _)
}
- def chainBefore(next: Tree, pt: Type): Tree = {
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = {
val cond = REF(dropped_priors.reverse.collectFirst{case (_, Some(ctm: ReusedCondTreeMaker)) => ctm}.get.storedCond)
- IF (cond) THEN BLOCK(
- substitution(next).duplicate // TODO: finer-grained duplication -- MUST duplicate though, or we'll get VerifyErrors since sharing trees confuses lambdalift, and its confusion it emits illegal casts (diagnosed by Grzegorz: checkcast T ; invokevirtual S.m, where T not a subtype of S)
- ) ELSE codegen.zero
+ // TODO: finer-grained duplication -- MUST duplicate though, or we'll get VerifyErrors since sharing trees confuses lambdalift, and its confusion it emits illegal casts (diagnosed by Grzegorz: checkcast T ; invokevirtual S.m, where T not a subtype of S)
+ casegen.ifThenElseZero(cond, substitution(next).duplicate)
}
}
}
@@ -1551,7 +1519,7 @@ class Foo(x: Other) { x._1 } // no error in this order
}
}
- class RegularSwitchMaker(scrutSym: Symbol) extends SwitchMaker {
+ class RegularSwitchMaker(scrutSym: Symbol, matchFailGenOverride: Option[Tree => Tree]) extends SwitchMaker {
val switchableTpe = Set(ByteClass.tpe, ShortClass.tpe, IntClass.tpe, CharClass.tpe)
val alternativesSupported = true
@@ -1574,14 +1542,14 @@ class Foo(x: Other) { x._1 } // no error in this order
}
def defaultSym: Symbol = scrutSym
- def defaultBody: Tree = { import CODE._; MATCHERROR(REF(scrutSym)) }
+ def defaultBody: Tree = { import CODE._; matchFailGenOverride map (gen => gen(REF(scrutSym))) getOrElse MATCHERROR(REF(scrutSym)) }
def defaultCase(scrutSym: Symbol = defaultSym, body: Tree = defaultBody): CaseDef = { import CODE._; atPos(body.pos) {
DEFAULT ==> body
}}
}
- override def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type): Option[Tree] = { import CODE._
- val regularSwitchMaker = new RegularSwitchMaker(scrutSym)
+ override def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type, matchFailGenOverride: Option[Tree => Tree]): Option[Tree] = { import CODE._
+ val regularSwitchMaker = new RegularSwitchMaker(scrutSym, matchFailGenOverride)
// TODO: if patterns allow switch but the type of the scrutinee doesn't, cast (type-test) the scrutinee to the corresponding switchable type and switch on the result
if (regularSwitchMaker.switchableTpe(scrutSym.tpe)) {
val caseDefsWithDefault = regularSwitchMaker(cases map {c => (scrutSym, c)}, pt)
@@ -1589,11 +1557,11 @@ class Foo(x: Other) { x._1 } // no error in this order
else {
// match on scrutSym -- converted to an int if necessary -- not on scrut directly (to avoid duplicating scrut)
val scrutToInt: Tree =
- if(scrutSym.tpe =:= IntClass.tpe) REF(scrutSym)
+ if (scrutSym.tpe =:= IntClass.tpe) REF(scrutSym)
else (REF(scrutSym) DOT (nme.toInt))
Some(BLOCK(
VAL(scrutSym) === scrut,
- Match(scrutToInt, caseDefsWithDefault)
+ Match(gen.mkSynthSwitchSelector(scrutToInt), caseDefsWithDefault) // add switch annotation
))
}
} else None
@@ -1657,8 +1625,7 @@ class Foo(x: Other) { x._1 } // no error in this order
// for example, `o.flatMap(f)` becomes `if(o == None) None else f(o.get)`, similarly for orElse and guard
// this is a special instance of the advanced inlining optimization that takes a method call on
// an object of a type that only has two concrete subclasses, and inlines both bodies, guarded by an if to distinguish the two cases
- object optimizedCodegen extends CommonCodegen /*with AbsOptimizedCodegen*/ { import CODE._
- lazy val zeroSym = freshSym(NoPosition, optionType(NothingClass.tpe), "zero")
+ object optimizedCodegen extends CommonCodegen { import CODE._
/** Inline runOrElse and get rid of Option allocations
*
@@ -1666,67 +1633,92 @@ class Foo(x: Other) { x._1 } // no error in this order
* the matcher's optional result is encoded as a flag, keepGoing, where keepGoing == true encodes result.isEmpty,
* if keepGoing is false, the result Some(x) of the naive translation is encoded as matchRes == x
*/
- @inline private def dontStore(tp: Type) = (tp.typeSymbol eq UnitClass) || (tp.typeSymbol eq NothingClass)
- lazy val keepGoing = freshSym(NoPosition, BooleanClass.tpe, "keepGoing") setFlag MUTABLE
- lazy val matchRes = freshSym(NoPosition, AnyClass.tpe, "matchRes") setFlag MUTABLE
- def runOrElse(scrut: Tree, scrutSym: Symbol, matcher: Tree, resTp: Type, catchAll: Option[Tree => Tree]) = {
- matchRes.info = if (resTp ne NoType) resTp.widen else AnyClass.tpe // we don't always know resTp, and it might be AnyVal, in which case we can't assign NULL
- if (dontStore(resTp)) matchRes resetFlag MUTABLE // don't assign to Unit-typed var's, in fact, make it a val -- conveniently also works around SI-5245
- BLOCK(
- VAL(zeroSym) === REF(NoneModule), // TODO: can we just get rid of explicitly emitted zero? don't know how to do that as a local rewrite...
- VAL(scrutSym) === scrut,
- VAL(matchRes) === mkZero(matchRes.info), // must cast to deal with GADT typing, hence the private mkZero above
- VAL(keepGoing) === TRUE,
- matcher,
- catchAll map { catchAllGen => (IF (REF(keepGoing)) THEN catchAllGen(REF(scrutSym)) ELSE REF(matchRes)) } getOrElse REF(matchRes)
- )
- }
+ def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree = {
+ val matchEnd = NoSymbol.newLabel(freshName("matchEnd"), NoPosition) setFlag SYNTH_CASE
+ val matchRes = NoSymbol.newValueParameter(newTermName("x"), NoPosition, SYNTHETIC) setInfo restpe
+ matchEnd setInfo MethodType(List(matchRes), restpe)
+
+ def newCaseSym = NoSymbol.newLabel(freshName("case"), NoPosition) setInfo MethodType(Nil, restpe) setFlag SYNTH_CASE
+ var nextCase = newCaseSym
+ def caseDef(mkCase: Casegen => Tree): Tree = {
+ val currCase = nextCase
+ nextCase = newCaseSym
+ val casegen = new OptimizedCasegen(matchEnd, nextCase, restpe)
+ LabelDef(currCase, Nil, mkCase(casegen))
+ }
- // only used to wrap the RHS of a body
- def one(res: Tree, bodyPt: Type, matchPt: Type): Tree = {
- BLOCK(
- REF(keepGoing) === FALSE, // comes before assignment to matchRes, so the latter is in tail positions (can ignore the trailing zero -- will disappear when we flatten blocks, which is TODO)
- if (dontStore(matchPt)) res else (REF(matchRes) === res), // runOrElse hasn't been called yet, so matchRes.isMutable is irrelevant, also, tp may be a subtype of resTp used in runOrElse...
- zero // to have a nice lub for lubs -- otherwise we'll get a boxed unit here -- TODO: get rid of all those dangling else zero's
+ def catchAll = matchFailGen map { matchFailGen =>
+ val scrutRef = if(scrutSym ne NoSymbol) REF(scrutSym) else EmptyTree // for alternatives
+ LabelDef(nextCase, Nil, matchEnd APPLY (_asInstanceOf(matchFailGen(scrutRef), restpe))) // need to jump to matchEnd with result generated by matchFailGen (could be `FALSE` for isDefinedAt)
+ } toList
+ // catchAll.isEmpty iff no synthetic default case needed (the (last) user-defined case is a default)
+ // if the last user-defined case is a default, it will never jump to the next case; it will go immediately to matchEnd
+
+ // the generated block is taken apart in TailCalls under the following assumptions
+ // the assumption is once we encounter a case, the remainder of the block will consist of cases
+ // the prologue may be empty, usually it is the valdef that stores the scrut
+ // val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef])
+
+ // scrutSym == NoSymbol when generating an alternatives matcher
+ val scrutDef = if(scrutSym ne NoSymbol) List(VAL(scrutSym) === scrut) else Nil // for alternatives
+ Block(
+ scrutDef ++ (cases map caseDef) ++ catchAll,
+ LabelDef(matchEnd, List(matchRes), REF(matchRes))
)
}
- def zero: Tree = REF(zeroSym)
-
- def flatMap(prev: Tree, b: Symbol, next: Tree): Tree = {
- val tp = inMatchMonad(b.tpe)
- val prevSym = freshSym(prev.pos, tp, "o")
- val isEmpty = tp member vpmName.isEmpty
- val get = tp member vpmName.get
+ class OptimizedCasegen(matchEnd: Symbol, nextCase: Symbol, restpe: Type) extends CommonCodegen with Casegen {
+ def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree =
+ optimizedCodegen.matcher(scrut, scrutSym, restpe)(cases, matchFailGen)
+
+ // only used to wrap the RHS of a body
+ // res: T
+ // returns MatchMonad[T]
+ def one(res: Tree): Tree = matchEnd APPLY (_asInstanceOf(res, restpe)) // need cast for GADT magic
+ protected def zero: Tree = nextCase APPLY ()
+
+ // prev: MatchMonad[T]
+ // b: T
+ // next: MatchMonad[U]
+ // returns MatchMonad[U]
+ def flatMap(prev: Tree, b: Symbol, next: Tree): Tree = {
+ val tp = inMatchMonad(b.tpe)
+ val prevSym = freshSym(prev.pos, tp, "o")
+ val isEmpty = tp member vpmName.isEmpty
+ val get = tp member vpmName.get
+
+ BLOCK(
+ VAL(prevSym) === prev,
+ // must be isEmpty and get as we don't control the target of the call (prev is an extractor call)
+ ifThenElseZero(NOT(prevSym DOT isEmpty), Substitution(b, prevSym DOT get)(next))
+ )
+ }
- BLOCK(
- VAL(prevSym) === prev,
- IF (prevSym DOT isEmpty) THEN zero ELSE Substitution(b, prevSym DOT get)(next) // must be isEmpty and get as we don't control the target of the call (could be the result of a user-defined extractor)
- )
- }
+ // cond: Boolean
+ // res: T
+ // nextBinder: T
+ // next == MatchMonad[U]
+ // returns MatchMonad[U]
+ def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree =
+ ifThenElseZero(cond, BLOCK(
+ VAL(nextBinder) === res,
+ next
+ ))
- def typedOrElse(pt: Type)(thisCase: Tree, elseCase: Tree): Tree = {
- BLOCK(
- thisCase,
- IF (REF(keepGoing)) THEN elseCase ELSE zero // leave trailing zero for now, otherwise typer adds () anyway
- )
+ // guardTree: Boolean
+ // next: MatchMonad[T]
+ // returns MatchMonad[T]
+ def flatMapGuard(guardTree: Tree, next: Tree): Tree =
+ ifThenElseZero(guardTree, next)
+
+ def flatMapCondStored(cond: Tree, condSym: Symbol, res: Tree, nextBinder: Symbol, next: Tree): Tree =
+ ifThenElseZero(cond, BLOCK(
+ condSym === TRUE_typed,
+ nextBinder === res,
+ next
+ ))
}
- def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, nextBinderTp: Type, next: Tree): Tree =
- IF (cond) THEN BLOCK(
- VAL(nextBinder) === res,
- next
- ) ELSE zero
-
- def flatMapCondStored(cond: Tree, condSym: Symbol, res: Tree, nextBinder: Symbol, next: Tree): Tree =
- IF (cond) THEN BLOCK(
- condSym === TRUE,
- nextBinder === res,
- next
- ) ELSE zero
-
- def flatMapGuard(guardTree: Tree, next: Tree): Tree =
- IF (guardTree) THEN next ELSE zero
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index ec42d251ff..013a74da7e 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -266,7 +266,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
(if (showLocation)
sym1.locationString +
(if (sym1.isAliasType) ", which equals "+self.memberInfo(sym1)
- else if (sym1.isAbstractType) " with bounds "+self.memberInfo(sym1)
+ else if (sym1.isAbstractType) " with bounds"+self.memberInfo(sym1)
else if (sym1.isTerm) " of type "+self.memberInfo(sym1)
else "")
else "")
@@ -1445,7 +1445,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
private def transformApply(tree: Apply): Tree = tree match {
case Apply(
- Select(qual, nme.filter),
+ Select(qual, nme.filter | nme.withFilter),
List(Function(
List(ValDef(_, pname, tpt, _)),
Match(_, CaseDef(pat1, _, _) :: _))))
@@ -1625,12 +1625,18 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
case _ => tree
}
+ // skip refchecks in patterns....
result = result match {
case CaseDef(pat, guard, body) =>
inPattern = true
val pat1 = transform(pat)
inPattern = false
treeCopy.CaseDef(tree, pat1, transform(guard), transform(body))
+ case LabelDef(_, _, _) if gen.hasSynthCaseSymbol(result) =>
+ inPattern = true
+ val res = deriveLabelDef(result)(transform)
+ inPattern = false
+ res
case _ =>
super.transform(result)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
index 2f4eff30d2..da87d38ab0 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
@@ -136,7 +136,7 @@ trait SyntheticMethods extends ast.TreeDSL {
* where that is the given methods first parameter.
*/
def thatTest(eqmeth: Symbol): Tree =
- gen.mkIsInstanceOf(Ident(eqmeth.firstParam), typeCaseType(clazz), true, false)
+ gen.mkIsInstanceOf(Ident(eqmeth.firstParam), classExistentialType(clazz), true, false)
/** (that.asInstanceOf[this.C])
* where that is the given methods first parameter.
diff --git a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
index ed263cbbef..105c2c0b98 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
@@ -263,8 +263,8 @@ abstract class TreeCheckers extends Analyzer {
tree match {
case x: PackageDef =>
- if (sym.ownerChain contains currentOwner) ()
- else fail(sym + " owner chain does not contain currentOwner " + currentOwner)
+ if ((sym.ownerChain contains currentOwner) || currentOwner == definitions.EmptyPackageClass) ()
+ else fail(sym + " owner chain does not contain currentOwner " + currentOwner + sym.ownerChain)
case _ =>
def cond(s: Symbol) = !s.isTerm || s.isMethod || s == sym.owner
diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
index 1434002121..6efa595d99 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
@@ -157,7 +157,7 @@ trait TypeDiagnostics {
}
// todo: use also for other error messages
- def existentialContext(tp: Type) = tp.existentialSkolems match {
+ def existentialContext(tp: Type) = tp.skolemsExceptMethodTypeParams match {
case Nil => ""
case xs => " where " + (disambiguate(xs map (_.existentialToString)) mkString ", ")
}
@@ -252,12 +252,17 @@ trait TypeDiagnostics {
}
"" // no elaborable variance situation found
}
- def foundReqMsg(found: Type, req: Type): String = (
- withDisambiguation(Nil, found, req)(
+ // TODO - figure out how to avoid doing any work at all
+ // when the message will never be seen. I though context.reportErrors
+ // being false would do that, but if I return "<suppressed>" under
+ // that condition, I see it.
+ def foundReqMsg(found: Type, req: Type): String = {
+ def baseMessage = (
";\n found : " + found.toLongString + existentialContext(found) + explainAlias(found) +
"\n required: " + req + existentialContext(req) + explainAlias(req)
- ) + explainVariance(found, req)
- )
+ )
+ withDisambiguation(Nil, found, req)(baseMessage) + explainVariance(found, req)
+ }
case class TypeDiag(tp: Type, sym: Symbol) extends Ordered[TypeDiag] {
// save the name because it will be mutated until it has been
@@ -307,16 +312,37 @@ trait TypeDiagnostics {
)
}
}
- private def typeDiags(locals: List[Symbol], types: Type*): List[TypeDiag] = {
- object SymExtractor {
- def unapply(x: Any) = x match {
- case t @ ConstantType(_) => Some(t -> t.underlying.typeSymbol)
- case t @ TypeRef(_, sym, _) => if (locals contains sym) None else Some(t -> sym)
- case _ => None
+ /** This is tricky stuff - we need to traverse types deeply to
+ * explain name ambiguities, which may occur anywhere. However
+ * when lub explosions come through it knocks us into an n^2
+ * disaster, see SI-5580. This is trying to perform the initial
+ * filtering of possibly ambiguous types in a sufficiently
+ * aggressive way that the state space won't explode.
+ */
+ private def typeDiags(locals: List[Symbol], types0: Type*): List[TypeDiag] = {
+ val types = types0.toList
+ // If two different type diag instances are seen for a given
+ // key (either the string representation of a type, or the simple
+ // name of a symbol) then keep them for disambiguation.
+ val strings = mutable.Map[String, Set[TypeDiag]]() withDefaultValue Set()
+ val names = mutable.Map[Name, Set[TypeDiag]]() withDefaultValue Set()
+
+ def record(t: Type, sym: Symbol) = {
+ val diag = TypeDiag(t, sym)
+
+ strings("" + t) += diag
+ names(sym.name) += diag
+ }
+ for (tpe <- types ; t <- tpe) {
+ t match {
+ case ConstantType(_) => record(t, t.underlying.typeSymbol)
+ case TypeRef(_, sym, _) => record(t, sym)
+ case _ => ()
}
}
- for (tp <- types.toList; SymExtractor(t, sym) <- tp) yield TypeDiag(t, sym)
+ val collisions = strings.values ++ names.values filter (_.size > 1)
+ collisions.flatten.toList
}
/** The distinct pairs from an ordered list. */
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 25f3e7af5c..06a7311f79 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -909,8 +909,9 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
def apply(tp: Type) = mapOver(tp) match {
case TypeRef(NoPrefix, tpSym, Nil) if variance != 0 && tpSym.isTypeParameterOrSkolem && tpSym.owner.isTerm =>
val bounds = if (variance == 1) TypeBounds.upper(tpSym.tpe) else TypeBounds.lower(tpSym.tpe)
- val skolem = context.owner.newExistentialSkolem(tpSym, tpSym, unit.freshTypeName("?"+tpSym.name), bounds)
- // println("mapping "+ tpSym +" to "+ skolem + " : "+ bounds +" -- pt= "+ pt)
+ // origin must be the type param so we can deskolemize
+ val skolem = context.owner.newGADTSkolem(unit.freshTypeName("?"+tpSym.name), tpSym, bounds)
+ // println("mapping "+ tpSym +" to "+ skolem + " : "+ bounds +" -- pt= "+ pt +" in "+ context.owner +" at "+ context.tree )
skolems += skolem
skolem.tpe
case tp1 => tp1
@@ -928,9 +929,19 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
freeVars foreach ctorContext.scope.enter
newTyper(ctorContext).infer.inferConstructorInstance(tree1, clazz.typeParams, ptSafe)
- // tree1's type-slack skolems will be deskolemized (to the method type parameter skolems)
- // once the containing CaseDef has been type checked (see typedCase)
- tree1
+ // simplify types without losing safety,
+ // so that error messages don't unnecessarily refer to skolems
+ val extrapolate = new ExistentialExtrapolation(freeVars) extrapolate (_: Type)
+ val extrapolated = tree1.tpe match {
+ case MethodType(ctorArgs, res) => // ctorArgs are actually in a covariant position, since this is the type of the subpatterns of the pattern represented by this Apply node
+ ctorArgs foreach (p => p.info = extrapolate(p.info)) // no need to clone, this is OUR method type
+ copyMethodType(tree1.tpe, ctorArgs, extrapolate(res))
+ case tp => tp
+ }
+
+ // once the containing CaseDef has been type checked (see typedCase),
+ // tree1's remaining type-slack skolems will be deskolemized (to the method type parameter skolems)
+ tree1 setType extrapolated
} else {
tree
}
@@ -1095,7 +1106,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
val found = tree.tpe
val req = pt
if (!found.isErroneous && !req.isErroneous) {
- if (!context.reportErrors && isPastTyper && req.existentialSkolems.nonEmpty) {
+ if (!context.reportErrors && isPastTyper && req.skolemsExceptMethodTypeParams.nonEmpty) {
// Ignore type errors raised in later phases that are due to mismatching types with existential skolems
// We have lift crashing in 2.9 with an adapt failure in the pattern matcher.
// Here's my hypothsis why this happens. The pattern matcher defines a variable of type
@@ -1112,7 +1123,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
//
// val x = expr
context.unit.warning(tree.pos, "recovering from existential Skolem type error in tree \n" + tree + "\nwith type " + tree.tpe + "\n expected type = " + pt + "\n context = " + context.tree)
- adapt(tree, mode, deriveTypeWithWildcards(pt.existentialSkolems)(pt))
+ adapt(tree, mode, deriveTypeWithWildcards(pt.skolemsExceptMethodTypeParams)(pt))
} else {
// create an actual error
AdaptTypeError(tree, found, req)
@@ -1147,6 +1158,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
val qtpe = qual.tpe.widen
( !isPastTyper
&& qual.isTerm
+ && !qual.isInstanceOf[Super]
&& ((qual.symbol eq null) || !qual.symbol.isTerm || qual.symbol.isValue)
&& !qtpe.isError
&& !qtpe.typeSymbol.isBottomClass
@@ -1162,12 +1174,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
)
}
- def adaptToMember(qual: Tree, searchTemplate: Type): Tree =
- adaptToMember(qual, searchTemplate, true, true)
- def adaptToMember(qual: Tree, searchTemplate: Type, reportAmbiguous: Boolean): Tree =
- adaptToMember(qual, searchTemplate, reportAmbiguous, true)
-
- def adaptToMember(qual: Tree, searchTemplate: Type, reportAmbiguous: Boolean, saveErrors: Boolean): Tree = {
+ def adaptToMember(qual: Tree, searchTemplate: Type, reportAmbiguous: Boolean = true, saveErrors: Boolean = true): Tree = {
if (isAdaptableWithView(qual)) {
qual.tpe.widen.normalize match {
case et: ExistentialType =>
@@ -1273,8 +1280,6 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
unit.error(clazz.pos, "value class may not be a "+
(if (clazz.owner.isTerm) "local class" else "member of another class"))
val constr = clazz.primaryConstructor
- if ((constr hasFlag (PRIVATE | PROTECTED)) || constr.privateWithin != NoSymbol)
- unit.error(constr.pos, "value class must have public primary constructor")
clazz.info.decls.toList.filter(acc => acc.isMethod && (acc hasFlag PARAMACCESSOR)) match {
case List(acc) =>
def isUnderlyingAcc(sym: Symbol) =
@@ -2112,26 +2117,151 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
// body1 = checkNoEscaping.locals(context.scope, pt, body1)
val treeWithSkolems = treeCopy.CaseDef(cdef, pat1, guard1, body1) setType body1.tpe
- // undo adaptConstrPattern's evil deeds, as they confuse the old pattern matcher
- // TODO: Paul, can we do the deskolemization lazily in the old pattern matcher
- object deskolemizeOnce extends TypeMap {
- def apply(tp: Type): Type = mapOver(tp) match {
- case TypeRef(pre, sym, args) if sym.isExistentialSkolem && sym.deSkolemize.isSkolem && sym.deSkolemize.owner.isTerm =>
- typeRef(NoPrefix, sym.deSkolemize, args)
- case tp1 => tp1
- }
- }
-
- new TypeMapTreeSubstituter(deskolemizeOnce).traverse(treeWithSkolems)
+ new TypeMapTreeSubstituter(deskolemizeGADTSkolems).traverse(treeWithSkolems)
treeWithSkolems // now without skolems, actually
}
+ // undo adaptConstrPattern's evil deeds, as they confuse the old pattern matcher
+ // the flags are used to avoid accidentally deskolemizing unrelated skolems of skolems
+ object deskolemizeGADTSkolems extends TypeMap {
+ def apply(tp: Type): Type = mapOver(tp) match {
+ case TypeRef(pre, sym, args) if sym.isGADTSkolem =>
+ typeRef(NoPrefix, sym.deSkolemize, args)
+ case tp1 => tp1
+ }
+ }
+
def typedCases(cases: List[CaseDef], pattp: Type, pt: Type): List[CaseDef] =
cases mapConserve { cdef =>
newTyper(context.makeNewScope(cdef, context.owner)).typedCase(cdef, pattp, pt)
}
+ def adaptCase(cdef: CaseDef, mode: Int, tpe: Type): CaseDef = deriveCaseDef(cdef)(adapt(_, mode, tpe))
+
+ def prepareTranslateMatch(selector0: Tree, cases: List[CaseDef], mode: Int, resTp: Type) = {
+ val (selector, doTranslation) = selector0 match {
+ case Annotated(Ident(nme.synthSwitch), selector) => (selector, false)
+ case s => (s, true)
+ }
+ val selector1 = checkDead(typed(selector, EXPRmode | BYVALmode, WildcardType))
+ val selectorTp = packCaptured(selector1.tpe.widen)
+
+ val casesTyped = typedCases(cases, selectorTp, resTp)
+ val caseTypes = casesTyped map (c => packedType(c, context.owner).deconst)
+ val (ownType, needAdapt) = if (isFullyDefined(resTp)) (resTp, false) else weakLub(caseTypes)
+
+ val casesAdapted = if (!needAdapt) casesTyped else casesTyped map (adaptCase(_, mode, ownType))
+
+ (selector1, selectorTp, casesAdapted, ownType, doTranslation)
+ }
+
+ def translateMatch(selector1: Tree, selectorTp: Type, casesAdapted: List[CaseDef], ownType: Type, doTranslation: Boolean, matchFailGen: Option[Tree => Tree] = None) = {
+ def repeatedToSeq(tp: Type): Type = (tp baseType RepeatedParamClass) match {
+ case TypeRef(_, RepeatedParamClass, args) => appliedType(SeqClass.typeConstructor, args)
+ case _ => tp
+ }
+
+ if (!doTranslation) { // a switch
+ Match(selector1, casesAdapted) setType ownType // setType of the Match to avoid recursing endlessly
+ } else {
+ val scrutType = repeatedToSeq(elimAnonymousClass(selectorTp))
+ // we've packed the type for each case in prepareTranslateMatch so that if all cases have the same existential case, we get a clean lub
+ // here, we should open up the existential again
+ // relevant test cases: pos/existentials-harmful.scala, pos/gadt-gilles.scala, pos/t2683.scala, pos/virtpatmat_exist4.scala
+ MatchTranslator(this).translateMatch(selector1, casesAdapted, repeatedToSeq(ownType.skolemizeExistential(context.owner, context.tree)), scrutType, matchFailGen)
+ }
+ }
+
+ def typedMatchAnonFun(tree: Tree, cases: List[CaseDef], mode: Int, pt0: Type, selOverride: Option[(List[ValDef], Tree)] = None) = {
+ val pt = deskolemizeGADTSkolems(pt0)
+ val targs = pt.normalize.typeArgs
+ val arity = if (isFunctionType(pt)) targs.length - 1 else 1 // TODO pt should always be a (Partial)Function, right?
+ val ptRes = if (targs.isEmpty) WildcardType else targs.last // may not be fully defined
+
+ val isPartial = pt.typeSymbol == PartialFunctionClass
+ val anonClass = context.owner.newAnonymousFunctionClass(tree.pos)
+ val funThis = This(anonClass)
+ val serialVersionUIDAnnotation = AnnotationInfo(SerialVersionUIDAttr.tpe, List(Literal(Constant(0))), List())
+
+ anonClass addAnnotation serialVersionUIDAnnotation
+
+ def mkParams(methodSym: Symbol) = {
+ selOverride match {
+ case None if targs.isEmpty => MissingParameterTypeAnonMatchError(tree, pt); (Nil, EmptyTree)
+ case None =>
+ val ps = methodSym newSyntheticValueParams targs.init // is there anything we can do if targs.isEmpty??
+ val ids = ps map (p => Ident(p.name))
+ val sel = atPos(tree.pos.focusStart) { if (arity == 1) ids.head else gen.mkTuple(ids) }
+ (ps, sel)
+ case Some((vparams, sel)) =>
+ val newParamSyms = vparams map {p =>
+ val tp = if(p.tpt.tpe == null) typedType(p.tpt).tpe else p.tpt.tpe
+ methodSym.newValueParameter(p.name, focusPos(p.pos), SYNTHETIC) setInfo tp
+ }
+
+ (newParamSyms, sel.duplicate)
+ }
+ }
+
+ import CODE._
+
+ // need to duplicate the cases before typing them to generate the apply method, or the symbols will be all messed up
+ val casesTrue = if (isPartial) cases map (c => deriveCaseDef(c)(x => TRUE_typed).duplicate) else Nil
+
+ val applyMethod = {
+ // rig the show so we can get started typing the method body -- later we'll correct the infos...
+ anonClass setInfo ClassInfoType(List(ObjectClass.tpe, pt, SerializableClass.tpe), newScope, anonClass)
+ val methodSym = anonClass.newMethod(nme.apply, tree.pos, FINAL)
+ val (paramSyms, selector) = mkParams(methodSym)
+
+ if (selector eq EmptyTree) EmptyTree
+ else {
+ methodSym setInfoAndEnter MethodType(paramSyms, AnyClass.tpe)
+
+ val methodBodyTyper = newTyper(context.makeNewScope(context.tree, methodSym)) // should use the DefDef for the context's tree, but it doesn't exist yet (we need the typer we're creating to create it)
+ paramSyms foreach (methodBodyTyper.context.scope enter _)
+
+ val (selector1, selectorTp, casesAdapted, resTp, doTranslation) = methodBodyTyper.prepareTranslateMatch(selector, cases, mode, ptRes)
+
+ val formalTypes = paramSyms map (_.tpe)
+ val parents =
+ if (isPartial) List(appliedType(AbstractPartialFunctionClass.typeConstructor, List(formalTypes.head, resTp)), SerializableClass.tpe)
+ else List(appliedType(AbstractFunctionClass(arity).typeConstructor, formalTypes :+ resTp), SerializableClass.tpe)
+
+ anonClass setInfo ClassInfoType(parents, newScope, anonClass)
+ methodSym setInfoAndEnter MethodType(paramSyms, resTp)
+
+ // use apply's parameter since the scrut's type has been widened
+ def missingCase(scrut_ignored: Tree) = (funThis DOT nme.missingCase) (REF(paramSyms.head))
+
+ val body = methodBodyTyper.translateMatch(selector1, selectorTp, casesAdapted, resTp, doTranslation, if (isPartial) Some(missingCase) else None)
+
+ DefDef(methodSym, body)
+ }
+ }
+
+ def isDefinedAtMethod = {
+ val methodSym = anonClass.newMethod(nme._isDefinedAt, tree.pos, FINAL)
+ val (paramSyms, selector) = mkParams(methodSym)
+ if (selector eq EmptyTree) EmptyTree
+ else {
+ val methodBodyTyper = newTyper(context.makeNewScope(context.tree, methodSym)) // should use the DefDef for the context's tree, but it doesn't exist yet (we need the typer we're creating to create it)
+ paramSyms foreach (methodBodyTyper.context.scope enter _)
+ methodSym setInfoAndEnter MethodType(paramSyms, BooleanClass.tpe)
+
+ val (selector1, selectorTp, casesAdapted, resTp, doTranslation) = methodBodyTyper.prepareTranslateMatch(selector, casesTrue, mode, BooleanClass.tpe)
+ val body = methodBodyTyper.translateMatch(selector1, selectorTp, casesAdapted, resTp, doTranslation, Some(scrutinee => FALSE_typed))
+
+ DefDef(methodSym, body)
+ }
+ }
+
+ val members = if (!isPartial) List(applyMethod) else List(applyMethod, isDefinedAtMethod)
+ if (members.head eq EmptyTree) setError(tree)
+ else typed(Block(List(ClassDef(anonClass, NoMods, List(List()), List(List()), members, tree.pos)), New(anonClass.tpe)), mode, pt)
+ }
+
/**
* @param fun ...
* @param mode ...
@@ -2144,14 +2274,10 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
return MaxFunctionArityError(fun)
def decompose(pt: Type): (Symbol, List[Type], Type) =
- if ((isFunctionType(pt)
- ||
- pt.typeSymbol == PartialFunctionClass &&
- numVparams == 1 && fun.body.isInstanceOf[Match])
- && // see bug901 for a reason why next conditions are needed
- (pt.normalize.typeArgs.length - 1 == numVparams
- ||
- fun.vparams.exists(_.tpt.isEmpty)))
+ if ((isFunctionType(pt) || (pt.typeSymbol == PartialFunctionClass && numVparams == 1 && fun.body.isInstanceOf[Match])) && // see bug901 for a reason why next conditions are needed
+ ( pt.normalize.typeArgs.length - 1 == numVparams
+ || fun.vparams.exists(_.tpt.isEmpty)
+ ))
(pt.typeSymbol, pt.normalize.typeArgs.init, pt.normalize.typeArgs.last)
else
(FunctionClass(numVparams), fun.vparams map (x => NoType), WildcardType)
@@ -2160,7 +2286,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
if (argpts.lengthCompare(numVparams) != 0)
WrongNumberOfParametersError(fun, argpts)
else {
- val vparamSyms = map2(fun.vparams, argpts) { (vparam, argpt) =>
+ foreach2(fun.vparams, argpts) { (vparam, argpt) =>
if (vparam.tpt.isEmpty) {
vparam.tpt.tpe =
if (isFullyDefined(argpt)) argpt
@@ -2184,22 +2310,32 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
}
if (!vparam.tpt.pos.isDefined) vparam.tpt setPos vparam.pos.focus
}
- enterSym(context, vparam)
- if (context.retyping) context.scope enter vparam.symbol
- vparam.symbol
}
- val vparams = fun.vparams mapConserve (typedValDef)
-// for (vparam <- vparams) {
-// checkNoEscaping.locals(context.scope, WildcardType, vparam.tpt); ()
-// }
- val body1 = typed(fun.body, respt)
- val formals = vparamSyms map (_.tpe)
- val restpe = packedType(body1, fun.symbol).deconst.resultType
- val funtpe = typeRef(clazz.tpe.prefix, clazz, formals :+ restpe)
-// body = checkNoEscaping.locals(context.scope, restpe, body)
- treeCopy.Function(fun, vparams, body1).setType(funtpe)
- }
+ fun.body match {
+ case Match(sel, cases) if opt.virtPatmat =>
+ // go to outer context -- must discard the context that was created for the Function since we're discarding the function
+ // thus, its symbol, which serves as the current context.owner, is not the right owner
+ // you won't know you're using the wrong owner until lambda lift crashes (unless you know better than to use the wrong owner)
+ newTyper(context.outer).typedMatchAnonFun(fun, cases, mode, pt, Some((fun.vparams, sel)))
+ case _ =>
+ val vparamSyms = fun.vparams map { vparam =>
+ enterSym(context, vparam)
+ if (context.retyping) context.scope enter vparam.symbol
+ vparam.symbol
+ }
+ val vparams = fun.vparams mapConserve (typedValDef)
+ // for (vparam <- vparams) {
+ // checkNoEscaping.locals(context.scope, WildcardType, vparam.tpt); ()
+ // }
+ val formals = vparamSyms map (_.tpe)
+ val body1 = typed(fun.body, respt)
+ val restpe = packedType(body1, fun.symbol).deconst.resultType
+ val funtpe = typeRef(clazz.tpe.prefix, clazz, formals :+ restpe)
+ // body = checkNoEscaping.locals(context.scope, restpe, body)
+ treeCopy.Function(fun, vparams, body1).setType(funtpe)
+ }
+ }
}
def typedRefinement(stats: List[Tree]) {
@@ -3410,7 +3546,18 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
} else {
var thenp1 = typed(thenp, pt)
var elsep1 = typed(elsep, pt)
- val (owntype, needAdapt) = ptOrLub(List(thenp1.tpe, elsep1.tpe))
+
+ lazy val thenTp = packedType(thenp1, context.owner)
+ lazy val elseTp = packedType(elsep1, context.owner)
+ val (owntype, needAdapt) =
+ // in principle we should pack the types of each branch before lubbing, but lub doesn't really work for existentials anyway
+ // in the special (though common) case where the types are equal, it pays to pack before comparing
+ // especially virtpatmat needs more aggressive unification of skolemized types
+ // this breaks src/library/scala/collection/immutable/TrieIterator.scala
+ if (opt.virtPatmat && !isPastTyper && thenTp =:= elseTp) (thenp1.tpe, false) // use unpacked type
+ // TODO: skolemize (lub of packed types) when that no longer crashes on files/pos/t4070b.scala
+ else ptOrLub(List(thenp1.tpe, elsep1.tpe))
+
if (needAdapt) { //isNumericValueType(owntype)) {
thenp1 = adapt(thenp1, mode, owntype)
elsep1 = adapt(elsep1, mode, owntype)
@@ -3420,7 +3567,12 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
}
def typedMatch(tree: Tree, selector: Tree, cases: List[CaseDef]): Tree = {
- if (selector == EmptyTree) {
+ if (opt.virtPatmat && !isPastTyper) {
+ if (selector ne EmptyTree) {
+ val (selector1, selectorTp, casesAdapted, ownType, doTranslation) = prepareTranslateMatch(selector, cases, mode, pt)
+ typed(translateMatch(selector1, selectorTp, casesAdapted, ownType, doTranslation), mode, pt)
+ } else typedMatchAnonFun(tree, cases, mode, pt)
+ } else if (selector == EmptyTree) {
val arity = if (isFunctionType(pt)) pt.normalize.typeArgs.length - 1 else 1
val params = for (i <- List.range(0, arity)) yield
atPos(tree.pos.focusStart) {
@@ -3434,32 +3586,11 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
} else {
val selector1 = checkDead(typed(selector, EXPRmode | BYVALmode, WildcardType))
var cases1 = typedCases(cases, packCaptured(selector1.tpe.widen), pt)
-
- if (isPastTyper || !opt.virtPatmat) {
- val (owntype, needAdapt) = ptOrLub(cases1 map (_.tpe))
- if (needAdapt) {
- cases1 = cases1 map (adaptCase(_, owntype))
- }
- treeCopy.Match(tree, selector1, cases1) setType owntype
- } else { // don't run translator after typers (see comments in PatMatVirtualiser)
- val (owntype0, needAdapt) = ptOrLub(cases1 map (x => repackExistential(x.tpe)))
- val owntype = elimAnonymousClass(owntype0)
- if (needAdapt) cases1 = cases1 map (adaptCase(_, owntype))
-
- (MatchTranslator(this)).translateMatch(selector1, cases1, owntype) match {
- case Block(vd :: Nil, tree@Match(selector, cases)) =>
- val selector1 = checkDead(typed(selector, EXPRmode | BYVALmode, WildcardType))
- var cases1 = typedCases(cases, packCaptured(selector1.tpe.widen), pt)
- val (owntype, needAdapt) = ptOrLub(cases1 map (_.tpe))
- if (needAdapt)
- cases1 = cases1 map (adaptCase(_, owntype))
- typed(Block(vd :: Nil, treeCopy.Match(tree, selector1, cases1) setType owntype))
- case translated =>
- // TODO: get rid of setType owntype -- it should all typecheck
- // must call typed, not typed1, or we overflow the stack when emitting switches
- typed(translated, mode, WildcardType) setType owntype
- }
+ val (owntype, needAdapt) = ptOrLub(cases1 map (_.tpe))
+ if (needAdapt) {
+ cases1 = cases1 map (adaptCase(_, mode, owntype))
}
+ treeCopy.Match(tree, selector1, cases1) setType owntype
}
}
@@ -4229,9 +4360,6 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
}
}
- def adaptCase(cdef: CaseDef, tpe: Type): CaseDef =
- deriveCaseDef(cdef)(adapt(_, mode, tpe))
-
// begin typed1
val sym: Symbol = tree.symbol
if ((sym ne null) && (sym ne NoSymbol)) sym.initialize
@@ -4340,7 +4468,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
val (owntype, needAdapt) = ptOrLub(block1.tpe :: (catches1 map (_.tpe)))
if (needAdapt) {
block1 = adapt(block1, mode, owntype)
- catches1 = catches1 map (adaptCase(_, owntype))
+ catches1 = catches1 map (adaptCase(_, mode, owntype))
}
if(!isPastTyper && opt.virtPatmat) {
diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
index cc272b7b8d..4f5b6868ae 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
@@ -45,6 +45,11 @@ trait Unapplies extends ast.TreeDSL
case BooleanClass => Nil
case OptionClass | SomeClass =>
val prod = tp.typeArgs.head
+// the spec doesn't allow just any subtype of Product, it *must* be TupleN[...] -- see run/virtpatmat_extends_product.scala
+// this breaks plenty of stuff, though...
+// val targs =
+// if (isTupleType(prod)) getProductArgs(prod)
+// else List(prod)
val targs = getProductArgs(prod)
if (targs.isEmpty || targs.tail.isEmpty) List(prod) // special n == 0 || n == 1
diff --git a/src/compiler/scala/tools/nsc/util/Position.scala b/src/compiler/scala/tools/nsc/util/Position.scala
index 53c767be20..bc74717366 100644
--- a/src/compiler/scala/tools/nsc/util/Position.scala
+++ b/src/compiler/scala/tools/nsc/util/Position.scala
@@ -33,6 +33,16 @@ object Position {
}
}
+/**
+ * A tree does not directly store a Position. It stores a TreeAnnotation, which /typically/ is a Position.
+ *
+ * A TreeAnnotion may encompass more than just a Position, though, depending on the exact subclass of TreeAnnotation.
+ */
+trait TreeAnnotation {
+ def pos: Position
+}
+
+
/** The Position class and its subclasses represent positions of ASTs and symbols.
* Except for NoPosition and FakePos, every position refers to a SourceFile
* and to an offset in the sourcefile (its `point`). For batch compilation,
@@ -77,7 +87,8 @@ object Position {
* pos.makeTransparent converts an opaque range position into a transparent one.
* returns all other positions unchanged.
*/
-trait Position {
+trait Position extends TreeAnnotation {
+ def pos: Position = this
/** An optional value containing the source file referred to by this position, or
* None if not defined.
diff --git a/src/compiler/scala/tools/util/color/CString.scala b/src/compiler/scala/tools/util/color/CString.scala
index d0785eaeff..fa57229f09 100644
--- a/src/compiler/scala/tools/util/color/CString.scala
+++ b/src/compiler/scala/tools/util/color/CString.scala
@@ -11,18 +11,27 @@ final class CString(val uncolorized: String, val colorized: String) {
def visibleLength = uncolorized.length
def colorizedLength = colorized.length
def show() = Console println colorized
- def bytes() = colorized map (ch => ch.toByte)
- def > = show()
+ def bytes() = colorized map (_.toByte)
+ def >() = show()
def append(x: CString): CString = new CString(uncolorized + x.uncolorized, colorized + x.colorized)
def +(other: CString): CString = this append other
+
override def toString = colorized
}
class CStringOps(str: String) {
- /** Enables for example
+ /** String to String operation.
* println("foo" in Red)
- * println("foo" in Magenta.bright)
+ * println("bar" in Magenta.bright)
+ */
+ def in(ansi: Ansi): String = ansi colorize str
+
+ /** Gave in to one bit of punctuation, because everyone adds
+ * strings with '+' and we need something which higher precedence
+ * for it to be at all satisfying.
+ *
+ * "foo" %> Red + "bar" %> Magenta.bright
*/
- def in(ansi: Ansi): CString = new CString(str, ansi colorize str)
+ def %>(ansi: Ansi): CString = new CString(str, in(ansi))
}
diff --git a/src/compiler/scala/tools/util/color/package.scala b/src/compiler/scala/tools/util/color/package.scala
index 7c7c7dab74..3b3e85751e 100644
--- a/src/compiler/scala/tools/util/color/package.scala
+++ b/src/compiler/scala/tools/util/color/package.scala
@@ -18,4 +18,5 @@ package object color {
case x: AnsiForeground => x.flip
}
implicit def implicitCStringOps(str: String): CStringOps = new CStringOps(str)
+ implicit def implicitCString(str: String): CString = new CString(str, str)
}
diff --git a/src/library/scala/Specializable.scala b/src/library/scala/Specializable.scala
index 67126b3069..d5e22195d2 100644
--- a/src/library/scala/Specializable.scala
+++ b/src/library/scala/Specializable.scala
@@ -20,10 +20,10 @@ object Specializable {
// Smuggle a list of types by way of a tuple upon which Group is parameterized.
class Group[T >: Null](value: T) extends SpecializedGroup { }
- final val Primitives = new Group(Byte, Short, Int, Long, Char, Float, Double, Boolean, Unit)
- final val Everything = new Group(Byte, Short, Int, Long, Char, Float, Double, Boolean, Unit, AnyRef)
- final val Bits32AndUp = new Group(Int, Long, Float, Double)
- final val Integral = new Group(Byte, Short, Int, Long, Char)
- final val AllNumeric = new Group(Byte, Short, Int, Long, Char, Float, Double)
- final val BestOfBreed = new Group(Int, Double, Boolean, Unit, AnyRef)
+ final val Primitives = new Group((Byte, Short, Int, Long, Char, Float, Double, Boolean, Unit))
+ final val Everything = new Group((Byte, Short, Int, Long, Char, Float, Double, Boolean, Unit, AnyRef))
+ final val Bits32AndUp = new Group((Int, Long, Float, Double))
+ final val Integral = new Group((Byte, Short, Int, Long, Char))
+ final val AllNumeric = new Group((Byte, Short, Int, Long, Char, Float, Double))
+ final val BestOfBreed = new Group((Int, Double, Boolean, Unit, AnyRef))
}
diff --git a/src/library/scala/collection/GenIterableLike.scala b/src/library/scala/collection/GenIterableLike.scala
index 7e68733afd..8fa5981969 100644
--- a/src/library/scala/collection/GenIterableLike.scala
+++ b/src/library/scala/collection/GenIterableLike.scala
@@ -40,17 +40,18 @@ trait GenIterableLike[+A, +Repr] extends Any with GenTraversableLike[A, Repr] {
/** Checks if the other iterable collection contains the same elements in the same order as this $coll.
*
- * $orderDependent
- * $willNotTerminateInf
- *
* @param that the collection to compare with.
* @tparam B the type of the elements of collection `that`.
* @return `true`, if both collections contain the same elements in the same order, `false` otherwise.
*
* @usecase def sameElements(that: GenIterable[A]): Boolean
+ * @inheritdoc
*
- * @param that the collection to compare with.
- * @return `true`, if both collections contain the same elements in the same order, `false` otherwise.
+ * $orderDependent
+ * $willNotTerminateInf
+ *
+ * @param that the collection to compare with.
+ * @return `true`, if both collections contain the same elements in the same order, `false` otherwise.
*/
def sameElements[A1 >: A](that: GenIterable[A1]): Boolean
@@ -58,8 +59,6 @@ trait GenIterableLike[+A, +Repr] extends Any with GenTraversableLike[A, Repr] {
* by combining corresponding elements in pairs.
* If one of the two collections is longer than the other, its remaining elements are ignored.
*
- * $orderDependent
- *
* @param that The iterable providing the second half of each result pair
* @tparam A1 the type of the first half of the returned pairs (this is always a supertype
* of the collection's element type `A`).
@@ -71,19 +70,20 @@ trait GenIterableLike[+A, +Repr] extends Any with GenTraversableLike[A, Repr] {
* of the returned collection is the minimum of the lengths of this $coll and `that`.
*
* @usecase def zip[B](that: GenIterable[B]): $Coll[(A, B)]
+ * @inheritdoc
*
- * @param that The iterable providing the second half of each result pair
- * @tparam B the type of the second half of the returned pairs
- * @return a new $coll containing pairs consisting of
- * corresponding elements of this $coll and `that`. The length
- * of the returned collection is the minimum of the lengths of this $coll and `that`.
+ * $orderDependent
+ *
+ * @param that The iterable providing the second half of each result pair
+ * @tparam B the type of the second half of the returned pairs
+ * @return a new $coll containing pairs consisting of
+ * corresponding elements of this $coll and `that`. The length
+ * of the returned collection is the minimum of the lengths of this $coll and `that`.
*/
def zip[A1 >: A, B, That](that: GenIterable[B])(implicit bf: CBF[Repr, (A1, B), That]): That
/** Zips this $coll with its indices.
*
- * $orderDependent
- *
* @tparam A1 the type of the first half of the returned pairs (this is always a supertype
* of the collection's element type `A`).
* @tparam That the class of the returned collection. Where possible, `That` is
@@ -98,11 +98,14 @@ trait GenIterableLike[+A, +Repr] extends Any with GenTraversableLike[A, Repr] {
* $coll paired with their index. Indices start at `0`.
*
* @usecase def zipWithIndex: $Coll[(A, Int)]
+ * @inheritdoc
*
- * @return A new $coll containing pairs consisting of all elements of this
- * $coll paired with their index. Indices start at `0`.
- * @example
- * `List("a", "b", "c").zipWithIndex = List(("a", 0), ("b", 1), ("c", 2))`
+ * $orderDependent
+ *
+ * @return A new $coll containing pairs consisting of all elements of this
+ * $coll paired with their index. Indices start at `0`.
+ * @example
+ * `List("a", "b", "c").zipWithIndex = List(("a", 0), ("b", 1), ("c", 2))`
*
*/
def zipWithIndex[A1 >: A, That](implicit bf: CBF[Repr, (A1, Int), That]): That
@@ -112,8 +115,6 @@ trait GenIterableLike[+A, +Repr] extends Any with GenTraversableLike[A, Repr] {
* If one of the two collections is shorter than the other,
* placeholder elements are used to extend the shorter collection to the length of the longer.
*
- * $orderDependent
- *
* @param that the iterable providing the second half of each result pair
* @param thisElem the element to be used to fill up the result if this $coll is shorter than `that`.
* @param thatElem the element to be used to fill up the result if `that` is shorter than this $coll.
@@ -124,16 +125,19 @@ trait GenIterableLike[+A, +Repr] extends Any with GenTraversableLike[A, Repr] {
* If `that` is shorter than this $coll, `thatElem` values are used to pad the result.
*
* @usecase def zipAll[B](that: Iterable[B], thisElem: A, thatElem: B): $Coll[(A, B)]
- *
- * @param that The iterable providing the second half of each result pair
- * @param thisElem the element to be used to fill up the result if this $coll is shorter than `that`.
- * @param thatElem the element to be used to fill up the result if `that` is shorter than this $coll.
- * @tparam B the type of the second half of the returned pairs
- * @return a new $coll containing pairs consisting of
- * corresponding elements of this $coll and `that`. The length
- * of the returned collection is the maximum of the lengths of this $coll and `that`.
- * If this $coll is shorter than `that`, `thisElem` values are used to pad the result.
- * If `that` is shorter than this $coll, `thatElem` values are used to pad the result.
+ * @inheritdoc
+ *
+ * $orderDependent
+ *
+ * @param that The iterable providing the second half of each result pair
+ * @param thisElem the element to be used to fill up the result if this $coll is shorter than `that`.
+ * @param thatElem the element to be used to fill up the result if `that` is shorter than this $coll.
+ * @tparam B the type of the second half of the returned pairs
+ * @return a new $coll containing pairs consisting of
+ * corresponding elements of this $coll and `that`. The length
+ * of the returned collection is the maximum of the lengths of this $coll and `that`.
+ * If this $coll is shorter than `that`, `thisElem` values are used to pad the result.
+ * If `that` is shorter than this $coll, `thatElem` values are used to pad the result.
*/
def zipAll[B, A1 >: A, That](that: GenIterable[B], thisElem: A1, thatElem: B)(implicit bf: CBF[Repr, (A1, B), That]): That
diff --git a/src/library/scala/collection/GenMapLike.scala b/src/library/scala/collection/GenMapLike.scala
index 12ecbcf140..114169c849 100644
--- a/src/library/scala/collection/GenMapLike.scala
+++ b/src/library/scala/collection/GenMapLike.scala
@@ -41,6 +41,8 @@ trait GenMapLike[A, +B, +Repr] extends GenIterableLike[(A, B), Repr] with Equals
* @return the value associated with `key` if it exists,
* otherwise the result of the `default` computation.
* @usecase def getOrElse(key: A, default: => B): B
+ * @inheritdoc
+ * @tparam B the result type of the default computation.
*/
def getOrElse[B1 >: B](key: A, default: => B1): B1
diff --git a/src/library/scala/collection/GenSeqLike.scala b/src/library/scala/collection/GenSeqLike.scala
index cb0e96fcbb..755abcd2bf 100644
--- a/src/library/scala/collection/GenSeqLike.scala
+++ b/src/library/scala/collection/GenSeqLike.scala
@@ -115,21 +115,21 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
/** Finds index of first occurrence of some value in this $coll.
*
- * $mayNotTerminateInf
- *
* @param elem the element value to search for.
* @tparam B the type of the element `elem`.
* @return the index of the first element of this $coll that is equal (wrt `==`)
* to `elem`, or `-1`, if none exists.
*
* @usecase def indexOf(elem: A): Int
+ * @inheritdoc
+ *
+ * $mayNotTerminateInf
+ *
*/
def indexOf[B >: A](elem: B): Int = indexOf(elem, 0)
/** Finds index of first occurrence of some value in this $coll after or at some start index.
*
- * $mayNotTerminateInf
- *
* @param elem the element value to search for.
* @tparam B the type of the element `elem`.
* @param from the start index
@@ -137,19 +137,25 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
* to `elem`, or `-1`, if none exists.
*
* @usecase def indexOf(elem: A, from: Int): Int
+ * @inheritdoc
+ *
+ * $mayNotTerminateInf
+ *
*/
def indexOf[B >: A](elem: B, from: Int): Int = indexWhere(elem ==, from)
/** Finds index of last occurrence of some value in this $coll.
*
- * $willNotTerminateInf
- *
* @param elem the element value to search for.
* @tparam B the type of the element `elem`.
* @return the index of the last element of this $coll that is equal (wrt `==`)
* to `elem`, or `-1`, if none exists.
*
* @usecase def lastIndexOf(elem: A): Int
+ * @inheritdoc
+ *
+ * $willNotTerminateInf
+ *
*/
def lastIndexOf[B >: A](elem: B): Int = lastIndexWhere(elem ==)
@@ -162,6 +168,7 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
* to `elem`, or `-1`, if none exists.
*
* @usecase def lastIndexOf(elem: A, end: Int): Int
+ * @inheritdoc
*/
def lastIndexOf[B >: A](elem: B, end: Int): Int = lastIndexWhere(elem ==, end)
@@ -195,10 +202,6 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
* Builds a new collection by applying a function to all elements of this $coll and
* collecting the results in reversed order.
*
- * $willNotTerminateInf
- *
- * Note: `xs.reverseMap(f)` is the same as `xs.reverse.map(f)` but might be more efficient.
- *
* @param f the function to apply to each element.
* @tparam B the element type of the returned collection.
* @tparam That $thatinfo
@@ -207,10 +210,14 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
* `f` to each element of this $coll and collecting the results in reversed order.
*
* @usecase def reverseMap[B](f: A => B): $Coll[B]
+ * @inheritdoc
*
- * Note: `xs.reverseMap(f)` is the same as `xs.reverse.map(f)` but might be more efficient.
- * @return a new $coll resulting from applying the given function
- * `f` to each element of this $coll and collecting the results in reversed order.
+ * $willNotTerminateInf
+ *
+ * Note: `xs.reverseMap(f)` is the same as `xs.reverse.map(f)` but might be more efficient.
+ *
+ * @return a new $coll resulting from applying the given function
+ * `f` to each element of this $coll and collecting the results in reversed order.
*/
def reverseMap[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That
@@ -254,10 +261,13 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
* @return a new $coll consisting of all elements of this $coll
* except that `replaced` elements starting from `from` are replaced
* by `patch`.
+ *
* @usecase def patch(from: Int, that: GenSeq[A], replaced: Int): $Coll[A]
- * @return a new $coll consisting of all elements of this $coll
- * except that `replaced` elements starting from `from` are replaced
- * by `patch`.
+ * @inheritdoc
+ *
+ * @return a new $coll consisting of all elements of this $coll
+ * except that `replaced` elements starting from `from` are replaced
+ * by `patch`.
*/
def patch[B >: A, That](from: Int, patch: GenSeq[B], replaced: Int)(implicit bf: CanBuildFrom[Repr, B, That]): That
@@ -268,20 +278,33 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
* @tparam That $thatinfo
* @param bf $bfinfo
* @return a new $coll` which is a copy of this $coll with the element at position `index` replaced by `elem`.
+ *
* @usecase def updated(index: Int, elem: A): $Coll[A]
- * @return a copy of this $coll with the element at position `index` replaced by `elem`.
+ * @inheritdoc
+ *
+ * @return a copy of this $coll with the element at position `index` replaced by `elem`.
*/
def updated[B >: A, That](index: Int, elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That
/** A copy of the $coll with an element prepended.
*
- * Note that :-ending operators are right associative (see example).
- * A mnemonic for `+:` vs. `:+` is: the COLon goes on the COLlection side.
+ * @param elem the prepended element
+ * @tparam B the element type of the returned $coll.
+ * @tparam That $thatinfo
+ * @param bf $bfinfo
+ * @return a new collection of type `That` consisting of `elem` followed
+ * by all elements of this $coll.
+ *
+ * @usecase def +:(elem: A): $Coll[A]
+ * @inheritdoc
+ *
+ * Note that :-ending operators are right associative (see example).
+ * A mnemonic for `+:` vs. `:+` is: the COLon goes on the COLlection side.
*
- * Also, the original $coll is not modified, so you will want to capture the result.
+ * Also, the original $coll is not modified, so you will want to capture the result.
*
- * Example:
- * {{{
+ * Example:
+ * {{{
* scala> val x = LinkedList(1)
* x: scala.collection.mutable.LinkedList[Int] = LinkedList(1)
*
@@ -290,17 +313,10 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
*
* scala> println(x)
* LinkedList(1)
- * }}}
+ * }}}
*
- * @param elem the prepended element
- * @tparam B the element type of the returned $coll.
- * @tparam That $thatinfo
- * @param bf $bfinfo
- * @return a new collection of type `That` consisting of `elem` followed
- * by all elements of this $coll.
- * @usecase def +:(elem: A): $Coll[A]
- * @return a new $coll consisting of `elem` followed
- * by all elements of this $coll.
+ * @return a new $coll consisting of `elem` followed
+ * by all elements of this $coll.
*/
def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That
@@ -308,18 +324,20 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
*
* A mnemonic for `+:` vs. `:+` is: the COLon goes on the COLlection side.
*
- * $willNotTerminateInf
* @param elem the appended element
* @tparam B the element type of the returned $coll.
* @tparam That $thatinfo
* @param bf $bfinfo
* @return a new collection of type `That` consisting of
* all elements of this $coll followed by `elem`.
+ *
* @usecase def :+(elem: A): $Coll[A]
- * @return a new $coll consisting of
- * all elements of this $coll followed by `elem`.
- * @example
- * {{{
+ * @inheritdoc
+ *
+ * $willNotTerminateInf
+ *
+ * Example:
+ * {{{
* scala> import scala.collection.mutable.LinkedList
* import scala.collection.mutable.LinkedList
*
@@ -331,7 +349,10 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
*
* scala> println(a)
* LinkedList(1)
- * }}}
+ * }}}
+ *
+ * @return a new $coll consisting of
+ * all elements of this $coll followed by `elem`.
*/
def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That
@@ -346,9 +367,11 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
* all elements of this $coll followed by the minimal number of occurrences of `elem` so
* that the resulting collection has a length of at least `len`.
* @usecase def padTo(len: Int, elem: A): $Coll[A]
- * @return a new $coll consisting of
- * all elements of this $coll followed by the minimal number of occurrences of `elem` so
- * that the resulting $coll has a length of at least `len`.
+ * @inheritdoc
+ *
+ * @return a new $coll consisting of
+ * all elements of this $coll followed by the minimal number of occurrences of `elem` so
+ * that the resulting $coll has a length of at least `len`.
*/
def padTo[B >: A, That](len: Int, elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That
@@ -368,13 +391,6 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
/** Produces a new sequence which contains all elements of this $coll and also all elements of
* a given sequence. `xs union ys` is equivalent to `xs ++ ys`.
- * $willNotTerminateInf
- *
- * Another way to express this
- * is that `xs union ys` computes the order-presevring multi-set union of `xs` and `ys`.
- * `union` is hence a counter-part of `diff` and `intersect` which also work on multi-sets.
- *
- * $willNotTerminateInf
*
* @param that the sequence to add.
* @tparam B the element type of the returned $coll.
@@ -382,9 +398,18 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
* @param bf $bfinfo
* @return a new collection of type `That` which contains all elements of this $coll
* followed by all elements of `that`.
+ *
* @usecase def union(that: GenSeq[A]): $Coll[A]
- * @return a new $coll which contains all elements of this $coll
- * followed by all elements of `that`.
+ * @inheritdoc
+ *
+ * Another way to express this
+ * is that `xs union ys` computes the order-presevring multi-set union of `xs` and `ys`.
+ * `union` is hence a counter-part of `diff` and `intersect` which also work on multi-sets.
+ *
+ * $willNotTerminateInf
+ *
+ * @return a new $coll which contains all elements of this $coll
+ * followed by all elements of `that`.
*/
def union[B >: A, That](that: GenSeq[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = this ++ that
@@ -393,7 +418,6 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
union(that: GenSeq[B])(bf)
/** Computes the multiset difference between this $coll and another sequence.
- * $willNotTerminateInf
*
* @param that the sequence of elements to remove
* @tparam B the element type of the returned $coll.
@@ -404,17 +428,21 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
* If an element value `x` appears
* ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form
* part of the result, but any following occurrences will.
+ *
* @usecase def diff(that: GenSeq[A]): $Coll[A]
- * @return a new $coll which contains all elements of this $coll
- * except some of occurrences of elements that also appear in `that`.
- * If an element value `x` appears
- * ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form
- * part of the result, but any following occurrences will.
+ * @inheritdoc
+ *
+ * $willNotTerminateInf
+ *
+ * @return a new $coll which contains all elements of this $coll
+ * except some of occurrences of elements that also appear in `that`.
+ * If an element value `x` appears
+ * ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form
+ * part of the result, but any following occurrences will.
*/
def diff[B >: A](that: GenSeq[B]): Repr
/** Computes the multiset intersection between this $coll and another sequence.
- * $mayNotTerminateInf
*
* @param that the sequence of elements to intersect with.
* @tparam B the element type of the returned $coll.
@@ -425,12 +453,17 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
* If an element value `x` appears
* ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained
* in the result, but any following occurrences will be omitted.
+ *
* @usecase def intersect(that: GenSeq[A]): $Coll[A]
- * @return a new $coll which contains all elements of this $coll
- * which also appear in `that`.
- * If an element value `x` appears
- * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained
- * in the result, but any following occurrences will be omitted.
+ * @inheritdoc
+ *
+ * $mayNotTerminateInf
+ *
+ * @return a new $coll which contains all elements of this $coll
+ * which also appear in `that`.
+ * If an element value `x` appears
+ * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained
+ * in the result, but any following occurrences will be omitted.
*/
def intersect[B >: A](that: GenSeq[B]): Repr
diff --git a/src/library/scala/collection/GenTraversableLike.scala b/src/library/scala/collection/GenTraversableLike.scala
index dd5f602c41..fd03e0f446 100644
--- a/src/library/scala/collection/GenTraversableLike.scala
+++ b/src/library/scala/collection/GenTraversableLike.scala
@@ -136,6 +136,7 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with
* but this is not necessary.
*
* @usecase def foreach(f: A => Unit): Unit
+ * @inheritdoc
*/
def foreach[U](f: A => U): Unit
@@ -149,17 +150,15 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with
* `f` to each element of this $coll and collecting the results.
*
* @usecase def map[B](f: A => B): $Coll[B]
- *
- * @return a new $coll resulting from applying the given function
- * `f` to each element of this $coll and collecting the results.
+ * @inheritdoc
+ * @return a new $coll resulting from applying the given function
+ * `f` to each element of this $coll and collecting the results.
*/
def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That
/** Builds a new collection by applying a partial function to all elements of this $coll
* on which the function is defined.
*
- * $collectExample
- *
* @param pf the partial function which filters and maps the $coll.
* @tparam B the element type of the returned collection.
* @tparam That $thatinfo
@@ -169,36 +168,18 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with
* The order of the elements is preserved.
*
* @usecase def collect[B](pf: PartialFunction[A, B]): $Coll[B]
+ * @inheritdoc
*
- * @return a new $coll resulting from applying the given partial function
- * `pf` to each element on which it is defined and collecting the results.
- * The order of the elements is preserved.
+ * $collectExample
+ *
+ * @return a new $coll resulting from applying the given partial function
+ * `pf` to each element on which it is defined and collecting the results.
+ * The order of the elements is preserved.
*/
def collect[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[Repr, B, That]): That
/** Builds a new collection by applying a function to all elements of this $coll
- * and using the elements of the resulting collections. For example:
- *
- * {{{
- * def getWords(lines: Seq[String]): Seq[String] = lines flatMap (line => line split "\\W+")
- * }}}
- *
- * The type of the resulting collection is guided by the static type of $coll. This might
- * cause unexpected results sometimes. For example:
- *
- * {{{
- * // lettersOf will return a Seq[Char] of likely repeated letters, instead of a Set
- * def lettersOf(words: Seq[String]) = words flatMap (word => word.toSet)
- *
- * // lettersOf will return a Set[Char], not a Seq
- * def lettersOf(words: Seq[String]) = words.toSet flatMap (word => word.toSeq)
- *
- * // xs will be a an Iterable[Int]
- * val xs = Map("a" -> List(11,111), "b" -> List(22,222)).flatMap(_._2)
- *
- * // ys will be a Map[Int, Int]
- * val ys = Map("a" -> List(1 -> 11,1 -> 111), "b" -> List(2 -> 22,2 -> 222)).flatMap(_._2)
- * }}}
+ * and using the elements of the resulting collections.
*
* @param f the function to apply to each element.
* @tparam B the element type of the returned collection.
@@ -208,33 +189,39 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with
* `f` to each element of this $coll and concatenating the results.
*
* @usecase def flatMap[B](f: A => GenTraversableOnce[B]): $Coll[B]
+ * @inheritdoc
*
- * @return a new $coll resulting from applying the given collection-valued function
- * `f` to each element of this $coll and concatenating the results.
- */
- def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That
-
- /** Returns a new $coll containing the elements from the left hand operand followed by the elements from the
- * right hand operand. The element type of the $coll is the most specific superclass encompassing
- * the element types of the two operands (see example).
+ * For example:
*
- * Example:
- * {{{
- * scala> val a = LinkedList(1)
- * a: scala.collection.mutable.LinkedList[Int] = LinkedList(1)
+ * {{{
+ * def getWords(lines: Seq[String]): Seq[String] = lines flatMap (line => line split "\\W+")
+ * }}}
*
- * scala> val b = LinkedList(2)
- * b: scala.collection.mutable.LinkedList[Int] = LinkedList(2)
+ * The type of the resulting collection is guided by the static type of $coll. This might
+ * cause unexpected results sometimes. For example:
*
- * scala> val c = a ++ b
- * c: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2)
+ * {{{
+ * // lettersOf will return a Seq[Char] of likely repeated letters, instead of a Set
+ * def lettersOf(words: Seq[String]) = words flatMap (word => word.toSet)
*
- * scala> val d = LinkedList('a')
- * d: scala.collection.mutable.LinkedList[Char] = LinkedList(a)
+ * // lettersOf will return a Set[Char], not a Seq
+ * def lettersOf(words: Seq[String]) = words.toSet flatMap (word => word.toSeq)
*
- * scala> val e = c ++ d
- * e: scala.collection.mutable.LinkedList[AnyVal] = LinkedList(1, 2, a)
- * }}}
+ * // xs will be a an Iterable[Int]
+ * val xs = Map("a" -> List(11,111), "b" -> List(22,222)).flatMap(_._2)
+ *
+ * // ys will be a Map[Int, Int]
+ * val ys = Map("a" -> List(1 -> 11,1 -> 111), "b" -> List(2 -> 22,2 -> 222)).flatMap(_._2)
+ * }}}
+ *
+ * @return a new $coll resulting from applying the given collection-valued function
+ * `f` to each element of this $coll and concatenating the results.
+ */
+ def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That
+
+ /** Returns a new $coll containing the elements from the left hand operand followed by the elements from the
+ * right hand operand. The element type of the $coll is the most specific superclass encompassing
+ * the element types of the two operands.
*
* @param that the traversable to append.
* @tparam B the element type of the returned collection.
@@ -244,9 +231,28 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with
* of this $coll followed by all elements of `that`.
*
* @usecase def ++[B](that: GenTraversableOnce[B]): $Coll[B]
+ * @inheritdoc
+ *
+ * Example:
+ * {{{
+ * scala> val a = LinkedList(1)
+ * a: scala.collection.mutable.LinkedList[Int] = LinkedList(1)
+ *
+ * scala> val b = LinkedList(2)
+ * b: scala.collection.mutable.LinkedList[Int] = LinkedList(2)
+ *
+ * scala> val c = a ++ b
+ * c: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2)
+ *
+ * scala> val d = LinkedList('a')
+ * d: scala.collection.mutable.LinkedList[Char] = LinkedList(a)
+ *
+ * scala> val e = c ++ d
+ * e: scala.collection.mutable.LinkedList[AnyVal] = LinkedList(1, 2, a)
+ * }}}
*
- * @return a new $coll which contains all elements of this $coll
- * followed by all elements of `that`.
+ * @return a new $coll which contains all elements of this $coll
+ * followed by all elements of `that`.
*/
def ++[B >: A, That](that: GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That
diff --git a/src/library/scala/collection/GenTraversableOnce.scala b/src/library/scala/collection/GenTraversableOnce.scala
index 67ea4cdb00..a7ec7618b7 100644
--- a/src/library/scala/collection/GenTraversableOnce.scala
+++ b/src/library/scala/collection/GenTraversableOnce.scala
@@ -124,7 +124,7 @@ trait GenTraversableOnce[+A] extends Any {
* scala> val b = (a /:\ 5)(_+_)
* b: Int = 15
* }}}*/
- @deprecated("use fold instead")
+ @deprecated("use fold instead", "2.10.0")
def /:\[A1 >: A](z: A1)(op: (A1, A1) => A1): A1 = fold(z)(op)
/** Applies a binary operator to a start value and all elements of this $coll,
@@ -156,7 +156,7 @@ trait GenTraversableOnce[+A] extends Any {
* @return the result of inserting `op` between consecutive elements of this $coll,
* going left to right with the start value `z` on the left:
* {{{
- * op(...op(op(z, x1), x2), ..., xn)
+ * op(...op(op(z, x_1), x_2), ..., x_n)
* }}}
* where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
*/
@@ -191,7 +191,7 @@ trait GenTraversableOnce[+A] extends Any {
* @return the result of inserting `op` between consecutive elements of this $coll,
* going right to left with the start value `z` on the right:
* {{{
- * op(x1, op(x2, ... op(xn, z)...))
+ * op(x_1, op(x_2, ... op(x_n, z)...))
* }}}
* where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
*/
@@ -209,7 +209,7 @@ trait GenTraversableOnce[+A] extends Any {
* @return the result of inserting `op` between consecutive elements of this $coll,
* going left to right with the start value `z` on the left:
* {{{
- * op(...op(z, x1), x2, ..., xn)
+ * op(...op(z, x_1), x_2, ..., x_n)
* }}}
* where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
*/
@@ -226,7 +226,7 @@ trait GenTraversableOnce[+A] extends Any {
* @return the result of inserting `op` between consecutive elements of this $coll,
* going right to left with the start value `z` on the right:
* {{{
- * op(x1, op(x2, ... op(xn, z)...))
+ * op(x_1, op(x_2, ... op(x_n, z)...))
* }}}
* where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
*/
@@ -271,7 +271,7 @@ trait GenTraversableOnce[+A] extends Any {
* @return the result of inserting `op` between consecutive elements of this $coll,
* going right to left:
* {{{
- * op(x,,1,,, op(x,,2,,, ..., op(x,,n-1,,, x,,n,,)...))
+ * op(x_1, op(x_2, ..., op(x_{n-1}, x_n)...))
* }}}
* where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
* @throws `UnsupportedOperationException` if this $coll is empty.
@@ -316,11 +316,12 @@ trait GenTraversableOnce[+A] extends Any {
* @return the sum of all elements of this $coll with respect to the `+` operator in `num`.
*
* @usecase def sum: A
+ * @inheritdoc
*
- * @return the sum of all elements in this $coll of numbers of type `Int`.
- * Instead of `Int`, any other type `T` with an implicit `Numeric[T]` implementation
- * can be used as element type of the $coll and as result type of `sum`.
- * Examples of such types are: `Long`, `Float`, `Double`, `BigInt`.
+ * @return the sum of all elements in this $coll of numbers of type `Int`.
+ * Instead of `Int`, any other type `T` with an implicit `Numeric[T]` implementation
+ * can be used as element type of the $coll and as result type of `sum`.
+ * Examples of such types are: `Long`, `Float`, `Double`, `BigInt`.
*
*/
def sum[A1 >: A](implicit num: Numeric[A1]): A1
@@ -333,11 +334,12 @@ trait GenTraversableOnce[+A] extends Any {
* @return the product of all elements of this $coll with respect to the `*` operator in `num`.
*
* @usecase def product: A
+ * @inheritdoc
*
- * @return the product of all elements in this $coll of numbers of type `Int`.
- * Instead of `Int`, any other type `T` with an implicit `Numeric[T]` implementation
- * can be used as element type of the $coll and as result type of `product`.
- * Examples of such types are: `Long`, `Float`, `Double`, `BigInt`.
+ * @return the product of all elements in this $coll of numbers of type `Int`.
+ * Instead of `Int`, any other type `T` with an implicit `Numeric[T]` implementation
+ * can be used as element type of the $coll and as result type of `product`.
+ * Examples of such types are: `Long`, `Float`, `Double`, `BigInt`.
*/
def product[A1 >: A](implicit num: Numeric[A1]): A1
@@ -348,7 +350,9 @@ trait GenTraversableOnce[+A] extends Any {
* @return the smallest element of this $coll with respect to the ordering `cmp`.
*
* @usecase def min: A
- * @return the smallest element of this $coll
+ * @inheritdoc
+ *
+ * @return the smallest element of this $coll
*/
def min[A1 >: A](implicit ord: Ordering[A1]): A
@@ -359,7 +363,9 @@ trait GenTraversableOnce[+A] extends Any {
* @return the largest element of this $coll with respect to the ordering `cmp`.
*
* @usecase def max: A
- * @return the largest element of this $coll.
+ * @inheritdoc
+ *
+ * @return the largest element of this $coll.
*/
def max[A1 >: A](implicit ord: Ordering[A1]): A
@@ -387,12 +393,13 @@ trait GenTraversableOnce[+A] extends Any {
* Copying will stop once either the end of the current $coll is reached,
* or the end of the array is reached.
*
- * $willNotTerminateInf
- *
* @param xs the array to fill.
* @tparam B the type of the elements of the array.
*
* @usecase def copyToArray(xs: Array[A]): Unit
+ * @inheritdoc
+ *
+ * $willNotTerminateInf
*/
def copyToArray[B >: A](xs: Array[B]): Unit
@@ -401,13 +408,14 @@ trait GenTraversableOnce[+A] extends Any {
* Copying will stop once either the end of the current $coll is reached,
* or the end of the array is reached.
*
- * $willNotTerminateInf
- *
* @param xs the array to fill.
* @param start the starting index.
* @tparam B the type of the elements of the array.
*
* @usecase def copyToArray(xs: Array[A], start: Int): Unit
+ * @inheritdoc
+ *
+ * $willNotTerminateInf
*/
def copyToArray[B >: A](xs: Array[B], start: Int): Unit
@@ -450,15 +458,18 @@ trait GenTraversableOnce[+A] extends Any {
def mkString: String
/** Converts this $coll to an array.
- * $willNotTerminateInf
*
* @tparam B the type of the elements of the array. A `ClassManifest` for
* this type must be available.
* @return an array containing all elements of this $coll.
*
* @usecase def toArray: Array[A]
- * @return an array containing all elements of this $coll.
- * A `ClassManifest` must be available for the element type of this $coll.
+ * @inheritdoc
+ *
+ * $willNotTerminateInf
+ *
+ * @return an array containing all elements of this $coll.
+ * A `ClassManifest` must be available for the element type of this $coll.
*/
def toArray[A1 >: A: ClassManifest]: Array[A1]
@@ -530,11 +541,13 @@ trait GenTraversableOnce[+A] extends Any {
* pair in the map. Duplicate keys will be overwritten by later keys:
* if this is an unordered collection, which key is in the resulting map
* is undefined.
- * $willNotTerminateInf
* @return a map containing all elements of this $coll.
+ *
* @usecase def toMap[T, U]: Map[T, U]
- * @return a map of type `immutable.Map[T, U]`
- * containing all key/value pairs of type `(T, U)` of this $coll.
+ * @inheritdoc
+ * $willNotTerminateInf
+ * @return a map of type `immutable.Map[T, U]`
+ * containing all key/value pairs of type `(T, U)` of this $coll.
*/
def toMap[K, V](implicit ev: A <:< (K, V)): GenMap[K, V]
}
diff --git a/src/library/scala/collection/IterableLike.scala b/src/library/scala/collection/IterableLike.scala
index 73d4efe125..fb6d154952 100644
--- a/src/library/scala/collection/IterableLike.scala
+++ b/src/library/scala/collection/IterableLike.scala
@@ -67,6 +67,7 @@ self =>
* Subclasses should re-implement this method if a more efficient implementation exists.
*
* @usecase def foreach(f: A => Unit): Unit
+ * @inheritdoc
*/
def foreach[U](f: A => U): Unit =
iterator.foreach(f)
diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala
index d46d215e0c..7b57a91c41 100644
--- a/src/library/scala/collection/Iterator.scala
+++ b/src/library/scala/collection/Iterator.scala
@@ -322,7 +322,9 @@ trait Iterator[+A] extends TraversableOnce[A] {
* @return a new iterator that first yields the values produced by this
* iterator followed by the values produced by iterator `that`.
* @note Reuse: $consumesTwoAndProducesOneIterator
+ *
* @usecase def ++(that: => Iterator[A]): Iterator[A]
+ * @inheritdoc
*/
def ++[B >: A](that: => GenTraversableOnce[B]): Iterator[B] = new AbstractIterator[B] {
// optimize a little bit to prevent n log n behavior.
@@ -609,7 +611,9 @@ trait Iterator[+A] extends TraversableOnce[A] {
* followed by the minimal number of occurrences of `elem` so
* that the number of produced values is at least `len`.
* @note Reuse: $consumesAndProducesIterator
+ *
* @usecase def padTo(len: Int, elem: A): Iterator[A]
+ * @inheritdoc
*/
def padTo[A1 >: A](len: Int, elem: A1): Iterator[A1] = new AbstractIterator[A1] {
private var count = 0
@@ -658,7 +662,9 @@ trait Iterator[+A] extends TraversableOnce[A] {
* If this iterator is shorter than `that`, `thisElem` values are used to pad the result.
* If `that` is shorter than this iterator, `thatElem` values are used to pad the result.
* @note Reuse: $consumesTwoAndProducesOneIterator
+ *
* @usecase def zipAll[B](that: Iterator[B], thisElem: A, thatElem: B): Iterator[(A, B)]
+ * @inheritdoc
*/
def zipAll[B, A1 >: A, B1 >: B](that: Iterator[B], thisElem: A1, thatElem: B1): Iterator[(A1, B1)] = new AbstractIterator[(A1, B1)] {
def hasNext = self.hasNext || that.hasNext
@@ -682,7 +688,9 @@ trait Iterator[+A] extends TraversableOnce[A] {
* but this is not necessary.
*
* @note Reuse: $consumesIterator
+ *
* @usecase def foreach(f: A => Unit): Unit
+ * @inheritdoc
*/
def foreach[U](f: A => U) { while (hasNext) f(next()) }
@@ -1057,15 +1065,17 @@ trait Iterator[+A] extends TraversableOnce[A] {
* Copying will stop once either the end of the current iterator is reached,
* or the end of the array is reached, or `len` elements have been copied.
*
- * $willNotTerminateInf
- *
* @param xs the array to fill.
* @param start the starting index.
* @param len the maximal number of elements to copy.
* @tparam B the type of the elements of the array.
*
* @note Reuse: $consumesIterator
+ *
* @usecase def copyToArray(xs: Array[A], start: Int, len: Int): Unit
+ * @inheritdoc
+ *
+ * $willNotTerminateInf
*/
def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Unit = {
var i = start
diff --git a/src/library/scala/collection/JavaConversions.scala b/src/library/scala/collection/JavaConversions.scala
index 50919e506a..75ab3f28f5 100644
--- a/src/library/scala/collection/JavaConversions.scala
+++ b/src/library/scala/collection/JavaConversions.scala
@@ -8,6 +8,9 @@
package scala.collection
+import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
+import convert._
+
/** A collection of implicit conversions supporting interoperability between
* Scala and Java collections.
*
@@ -46,877 +49,81 @@ package scala.collection
* @author Martin Odersky
* @since 2.8
*/
-object JavaConversions {
+object JavaConversions extends WrapAsScala with WrapAsJava {
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type ConcurrentMapWrapper[A, B] = Wrappers.ConcurrentMapWrapper[A, B]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type DictionaryWrapper[A, B] = Wrappers.DictionaryWrapper[A, B]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type IterableWrapper[A] = Wrappers.IterableWrapper[A]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type IteratorWrapper[A] = Wrappers.IteratorWrapper[A]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JCollectionWrapper[A] = Wrappers.JCollectionWrapper[A]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JConcurrentMapWrapper[A, B] = Wrappers.JConcurrentMapWrapper[A, B]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JDictionaryWrapper[A, B] = Wrappers.JDictionaryWrapper[A, B]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JEnumerationWrapper[A] = Wrappers.JEnumerationWrapper[A]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JIterableWrapper[A] = Wrappers.JIterableWrapper[A]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JIteratorWrapper[A] = Wrappers.JIteratorWrapper[A]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JListWrapper[A] = Wrappers.JListWrapper[A]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JMapWrapper[A, B] = Wrappers.JMapWrapper[A, B]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JPropertiesWrapper = Wrappers.JPropertiesWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JSetWrapper[A] = Wrappers.JSetWrapper[A]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type MapWrapper[A, B] = Wrappers.MapWrapper[A, B]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type MutableBufferWrapper[A] = Wrappers.MutableBufferWrapper[A]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type MutableMapWrapper[A, B] = Wrappers.MutableMapWrapper[A, B]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type MutableSeqWrapper[A] = Wrappers.MutableSeqWrapper[A]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type MutableSetWrapper[A] = Wrappers.MutableSetWrapper[A]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type SeqWrapper[A] = Wrappers.SeqWrapper[A]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type SetWrapper[A] = Wrappers.SetWrapper[A]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type ToIteratorWrapper[A] = Wrappers.ToIteratorWrapper[A]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val DictionaryWrapper = Wrappers.DictionaryWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val IterableWrapper = Wrappers.IterableWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val IteratorWrapper = Wrappers.IteratorWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JCollectionWrapper = Wrappers.JCollectionWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JConcurrentMapWrapper = Wrappers.JConcurrentMapWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JDictionaryWrapper = Wrappers.JDictionaryWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JEnumerationWrapper = Wrappers.JEnumerationWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JIterableWrapper = Wrappers.JIterableWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JIteratorWrapper = Wrappers.JIteratorWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JListWrapper = Wrappers.JListWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JMapWrapper = Wrappers.JMapWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JPropertiesWrapper = Wrappers.JPropertiesWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JSetWrapper = Wrappers.JSetWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val MutableBufferWrapper = Wrappers.MutableBufferWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val MutableMapWrapper = Wrappers.MutableMapWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val MutableSeqWrapper = Wrappers.MutableSeqWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val MutableSetWrapper = Wrappers.MutableSetWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val SeqWrapper = Wrappers.SeqWrapper
+
// Note to implementors: the cavalcade of deprecated methods herein should
// serve as a warning to any who follow: don't overload implicit methods.
- import java.{ lang => jl, util => ju }
- import java.util.{ concurrent => juc }
-
- // Scala => Java
-
- /**
- * Implicitly converts a Scala Iterator to a Java Iterator.
- * The returned Java Iterator is backed by the provided Scala
- * Iterator and any side-effects of using it via the Java interface will
- * be visible via the Scala interface and vice versa.
- *
- * If the Scala Iterator was previously obtained from an implicit or
- * explicit call of `asIterator(java.util.Iterator)` then the original
- * Java Iterator will be returned.
- *
- * @param i The Iterator to be converted.
- * @return A Java Iterator view of the argument.
- */
- implicit def asJavaIterator[A](it: Iterator[A]): ju.Iterator[A] = it match {
- case JIteratorWrapper(wrapped) => wrapped.asInstanceOf[ju.Iterator[A]]
- case _ => IteratorWrapper(it)
- }
-
- /**
- * Implicitly converts a Scala Iterator to a Java Enumeration.
- * The returned Java Enumeration is backed by the provided Scala
- * Iterator and any side-effects of using it via the Java interface will
- * be visible via the Scala interface and vice versa.
- *
- * If the Scala Iterator was previously obtained from an implicit or
- * explicit call of `asIterator(java.util.Enumeration)` then the
- * original Java Enumeration will be returned.
- *
- * @param i The Iterator to be converted.
- * @return A Java Enumeration view of the argument.
- */
- implicit def asJavaEnumeration[A](it: Iterator[A]): ju.Enumeration[A] = it match {
- case JEnumerationWrapper(wrapped) => wrapped.asInstanceOf[ju.Enumeration[A]]
- case _ => IteratorWrapper(it)
- }
-
- /**
- * Implicitly converts a Scala Iterable to a Java Iterable.
- * The returned Java Iterable is backed by the provided Scala
- * Iterable and any side-effects of using it via the Java interface will
- * be visible via the Scala interface and vice versa.
- *
- * If the Scala Iterable was previously obtained from an implicit or
- * explicit call of `asIterable(java.lang.Iterable)` then the original
- * Java Iterable will be returned.
- *
- * @param i The Iterable to be converted.
- * @return A Java Iterable view of the argument.
- */
- implicit def asJavaIterable[A](i: Iterable[A]): jl.Iterable[A] = i match {
- case JIterableWrapper(wrapped) => wrapped.asInstanceOf[jl.Iterable[A]]
- case _ => IterableWrapper(i)
- }
-
- /**
- * Implicitly converts a Scala Iterable to an immutable Java
- * Collection.
- *
- * If the Scala Iterable was previously obtained from an implicit or
- * explicit call of `asSizedIterable(java.util.Collection)` then the original
- * Java Collection will be returned.
- *
- * @param i The SizedIterable to be converted.
- * @return A Java Collection view of the argument.
- */
- implicit def asJavaCollection[A](it: Iterable[A]): ju.Collection[A] = it match {
- case JCollectionWrapper(wrapped) => wrapped.asInstanceOf[ju.Collection[A]]
- case _ => new IterableWrapper(it)
- }
-
- /**
- * Implicitly converts a Scala mutable Buffer to a Java List.
- * The returned Java List is backed by the provided Scala
- * Buffer and any side-effects of using it via the Java interface will
- * be visible via the Scala interface and vice versa.
- *
- * If the Scala Buffer was previously obtained from an implicit or
- * explicit call of `asBuffer(java.util.List)` then the original
- * Java List will be returned.
- *
- * @param b The Buffer to be converted.
- * @return A Java List view of the argument.
- */
- implicit def bufferAsJavaList[A](b: mutable.Buffer[A]): ju.List[A] = b match {
- case JListWrapper(wrapped) => wrapped
- case _ => new MutableBufferWrapper(b)
- }
@deprecated("use bufferAsJavaList instead", "2.9.0")
def asJavaList[A](b : mutable.Buffer[A]): ju.List[A] = bufferAsJavaList[A](b)
- /**
- * Implicitly converts a Scala mutable Seq to a Java List.
- * The returned Java List is backed by the provided Scala
- * Seq and any side-effects of using it via the Java interface will
- * be visible via the Scala interface and vice versa.
- *
- * If the Scala Seq was previously obtained from an implicit or
- * explicit call of `asSeq(java.util.List)` then the original
- * Java List will be returned.
- *
- * @param b The Seq to be converted.
- * @return A Java List view of the argument.
- */
- implicit def mutableSeqAsJavaList[A](seq: mutable.Seq[A]): ju.List[A] = seq match {
- case JListWrapper(wrapped) => wrapped
- case _ => new MutableSeqWrapper(seq)
- }
@deprecated("use mutableSeqAsJavaList instead", "2.9.0")
def asJavaList[A](b : mutable.Seq[A]): ju.List[A] = mutableSeqAsJavaList[A](b)
- /**
- * Implicitly converts a Scala Seq to a Java List.
- * The returned Java List is backed by the provided Scala
- * Seq and any side-effects of using it via the Java interface will
- * be visible via the Scala interface and vice versa.
- *
- * If the Scala Seq was previously obtained from an implicit or
- * explicit call of `asSeq(java.util.List)` then the original
- * Java List will be returned.
- *
- * @param b The Seq to be converted.
- * @return A Java List view of the argument.
- */
- implicit def seqAsJavaList[A](seq: Seq[A]): ju.List[A] = seq match {
- case JListWrapper(wrapped) => wrapped.asInstanceOf[ju.List[A]]
- case _ => new SeqWrapper(seq)
- }
-
@deprecated("use seqAsJavaList instead", "2.9.0")
def asJavaList[A](b : Seq[A]): ju.List[A] = seqAsJavaList[A](b)
- /**
- * Implicitly converts a Scala mutable Set to a Java Set.
- * The returned Java Set is backed by the provided Scala
- * Set and any side-effects of using it via the Java interface will
- * be visible via the Scala interface and vice versa.
- *
- * If the Scala Set was previously obtained from an implicit or
- * explicit call of `asSet(java.util.Set)` then the original
- * Java Set will be returned.
- *
- * @param s The Set to be converted.
- * @return A Java Set view of the argument.
- */
- implicit def mutableSetAsJavaSet[A](s: mutable.Set[A]): ju.Set[A] = s match {
- case JSetWrapper(wrapped) => wrapped
- case _ => new MutableSetWrapper(s)
- }
-
@deprecated("use mutableSetAsJavaSet instead", "2.9.0")
def asJavaSet[A](s : mutable.Set[A]): ju.Set[A] = mutableSetAsJavaSet[A](s)
- /**
- * Implicitly converts a Scala Set to a Java Set.
- * The returned Java Set is backed by the provided Scala
- * Set and any side-effects of using it via the Java interface will
- * be visible via the Scala interface and vice versa.
- *
- * If the Scala Set was previously obtained from an implicit or
- * explicit call of asSet(java.util.Set) then the original
- * Java Set will be returned.
- *
- * @param s The Set to be converted.
- * @return A Java Set view of the argument.
- */
- implicit def setAsJavaSet[A](s: Set[A]): ju.Set[A] = s match {
- case JSetWrapper(wrapped) => wrapped
- case _ => new SetWrapper(s)
- }
-
@deprecated("use setAsJavaSet instead", "2.9.0")
def asJavaSet[A](s: Set[A]): ju.Set[A] = setAsJavaSet[A](s)
- /**
- * Implicitly converts a Scala mutable Map to a Java Map.
- * The returned Java Map is backed by the provided Scala
- * Map and any side-effects of using it via the Java interface will
- * be visible via the Scala interface and vice versa.
- *
- * If the Scala Map was previously obtained from an implicit or
- * explicit call of `asMap(java.util.Map)` then the original
- * Java Map will be returned.
- *
- * @param m The Map to be converted.
- * @return A Java Map view of the argument.
- */
- implicit def mutableMapAsJavaMap[A, B](m: mutable.Map[A, B]): ju.Map[A, B] = m match {
- //case JConcurrentMapWrapper(wrapped) => wrapped
- case JMapWrapper(wrapped) => wrapped
- case _ => new MutableMapWrapper(m)
- }
-
@deprecated("use mutableMapAsJavaMap instead", "2.9.0")
def asJavaMap[A, B](m : mutable.Map[A, B]): ju.Map[A, B] = mutableMapAsJavaMap[A, B](m)
- /**
- * Implicitly converts a Scala mutable `Map` to a Java `Dictionary`.
- *
- * The returned Java `Dictionary` is backed by the provided Scala
- * `Dictionary` and any side-effects of using it via the Java interface
- * will be visible via the Scala interface and vice versa.
- *
- * If the Scala `Dictionary` was previously obtained from an implicit or
- * explicit call of `asMap(java.util.Dictionary)` then the original
- * Java Dictionary will be returned.
- *
- * @param m The `Map` to be converted.
- * @return A Java `Dictionary` view of the argument.
- */
- implicit def asJavaDictionary[A, B](m: mutable.Map[A, B]): ju.Dictionary[A, B] = m match {
- //case JConcurrentMapWrapper(wrapped) => wrapped
- case JDictionaryWrapper(wrapped) => wrapped
- case _ => new DictionaryWrapper(m)
- }
-
- /**
- * Implicitly converts a Scala `Map` to a Java `Map`.
- *
- * The returned Java `Map` is backed by the provided Scala `Map` and
- * any side-effects of using it via the Java interface will be visible
- * via the Scala interface and vice versa.
- *
- * If the Scala `Map` was previously obtained from an implicit or
- * explicit call of `asMap(java.util.Map)` then the original
- * Java `Map` will be returned.
- *
- * @param m The `Map` to be converted.
- * @return A Java `Map` view of the argument.
- */
- implicit def mapAsJavaMap[A, B](m: Map[A, B]): ju.Map[A, B] = m match {
- //case JConcurrentMapWrapper(wrapped) => wrapped
- case JMapWrapper(wrapped) => wrapped.asInstanceOf[ju.Map[A, B]]
- case _ => new MapWrapper(m)
- }
-
@deprecated("use mapAsJavaMap instead", "2.9.0")
def asJavaMap[A, B](m : Map[A, B]): ju.Map[A, B] = mapAsJavaMap[A, B](m)
- /**
- * Implicitly converts a Scala mutable `ConcurrentMap` to a Java
- * `ConcurrentMap`.
- *
- * The returned Java `ConcurrentMap` is backed by the provided Scala
- * `ConcurrentMap` and any side-effects of using it via the Java interface
- * will be visible via the Scala interface and vice versa.
- *
- * If the Scala `ConcurrentMap` was previously obtained from an implicit or
- * explicit call of `asConcurrentMap(java.util.concurrect.ConcurrentMap)`
- * then the original Java ConcurrentMap will be returned.
- *
- * @param m The `ConcurrentMap` to be converted.
- * @return A Java `ConcurrentMap` view of the argument.
- */
- implicit def asJavaConcurrentMap[A, B](m: mutable.ConcurrentMap[A, B]): juc.ConcurrentMap[A, B] = m match {
- case JConcurrentMapWrapper(wrapped) => wrapped
- case _ => new ConcurrentMapWrapper(m)
- }
-
- // Java => Scala
-
- /**
- * Implicitly converts a Java `Iterator` to a Scala `Iterator`.
- *
- * The returned Scala `Iterator` is backed by the provided Java `Iterator`
- * and any side-effects of using it via the Scala interface will be visible
- * via the Java interface and vice versa.
- *
- * If the Java `Iterator` was previously obtained from an implicit or
- * explicit call of `asIterator(scala.collection.Iterator)` then the
- * original Scala `Iterator` will be returned.
- *
- * @param i The `Iterator` to be converted.
- * @return A Scala `Iterator` view of the argument.
- */
- implicit def asScalaIterator[A](it: ju.Iterator[A]): Iterator[A] = it match {
- case IteratorWrapper(wrapped) => wrapped
- case _ => JIteratorWrapper(it)
- }
-
- /**
- * Implicitly converts a Java Enumeration to a Scala Iterator.
- * The returned Scala Iterator is backed by the provided Java
- * Enumeration and any side-effects of using it via the Scala interface will
- * be visible via the Java interface and vice versa.
- *
- * If the Java Enumeration was previously obtained from an implicit or
- * explicit call of `enumerationAsScalaIterator(scala.collection.Iterator)`
- * then the original Scala Iterator will be returned.
- *
- * @param i The Enumeration to be converted.
- * @return A Scala Iterator view of the argument.
- */
- implicit def enumerationAsScalaIterator[A](i: ju.Enumeration[A]): Iterator[A] = i match {
- case IteratorWrapper(wrapped) => wrapped
- case _ => JEnumerationWrapper(i)
- }
-
- /**
- * Implicitly converts a Java `Iterable` to a Scala `Iterable`.
- *
- * The returned Scala `Iterable` is backed by the provided Java `Iterable`
- * and any side-effects of using it via the Scala interface will be visible
- * via the Java interface and vice versa.
- *
- * If the Java `Iterable` was previously obtained from an implicit or
- * explicit call of `iterableAsScalaIterable(scala.collection.Iterable)`
- * then the original Scala Iterable will be returned.
- *
- * @param i The Iterable to be converted.
- * @return A Scala Iterable view of the argument.
- */
- implicit def iterableAsScalaIterable[A](i: jl.Iterable[A]): Iterable[A] = i match {
- case IterableWrapper(wrapped) => wrapped
- case _ => JIterableWrapper(i)
- }
-
@deprecated("use iterableAsScalaIterable instead", "2.9.0")
def asScalaIterable[A](i : jl.Iterable[A]): Iterable[A] = iterableAsScalaIterable[A](i)
- /**
- * Implicitly converts a Java `Collection` to an Scala `Iterable`.
- *
- * If the Java `Collection` was previously obtained from an implicit or
- * explicit call of `collectionAsScalaIterable(scala.collection.SizedIterable)`
- * then the original Scala `Iterable` will be returned.
- *
- * @param i The Collection to be converted.
- * @return A Scala Iterable view of the argument.
- */
- implicit def collectionAsScalaIterable[A](i: ju.Collection[A]): Iterable[A] = i match {
- case IterableWrapper(wrapped) => wrapped
- case _ => JCollectionWrapper(i)
- }
@deprecated("use collectionAsScalaIterable instead", "2.9.0")
def asScalaIterable[A](i : ju.Collection[A]): Iterable[A] = collectionAsScalaIterable[A](i)
- /**
- * Implicitly converts a Java `List` to a Scala mutable `Buffer`.
- *
- * The returned Scala `Buffer` is backed by the provided Java `List`
- * and any side-effects of using it via the Scala interface will
- * be visible via the Java interface and vice versa.
- *
- * If the Java `List` was previously obtained from an implicit or
- * explicit call of `asScalaBuffer(scala.collection.mutable.Buffer)`
- * then the original Scala `Buffer` will be returned.
- *
- * @param l The `List` to be converted.
- * @return A Scala mutable `Buffer` view of the argument.
- */
- implicit def asScalaBuffer[A](l: ju.List[A]): mutable.Buffer[A] = l match {
- case MutableBufferWrapper(wrapped) => wrapped
- case _ =>new JListWrapper(l)
- }
-
- /**
- * Implicitly converts a Java Set to a Scala mutable Set.
- * The returned Scala Set is backed by the provided Java
- * Set and any side-effects of using it via the Scala interface will
- * be visible via the Java interface and vice versa.
- *
- * If the Java Set was previously obtained from an implicit or
- * explicit call of `asScalaSet(scala.collection.mutable.Set)` then
- * the original Scala Set will be returned.
- *
- * @param s The Set to be converted.
- * @return A Scala mutable Set view of the argument.
- */
- implicit def asScalaSet[A](s: ju.Set[A]): mutable.Set[A] = s match {
- case MutableSetWrapper(wrapped) => wrapped
- case _ =>new JSetWrapper(s)
- }
-
- /**
- * Implicitly converts a Java `Map` to a Scala mutable `Map`.
- *
- * The returned Scala `Map` is backed by the provided Java `Map` and any
- * side-effects of using it via the Scala interface will be visible via
- * the Java interface and vice versa.
- *
- * If the Java `Map` was previously obtained from an implicit or
- * explicit call of `mapAsScalaMap(scala.collection.mutable.Map)` then
- * the original Scala Map will be returned.
- *
- * @param m The Map to be converted.
- * @return A Scala mutable Map view of the argument.
- */
- implicit def mapAsScalaMap[A, B](m: ju.Map[A, B]): mutable.Map[A, B] = m match {
- //case ConcurrentMapWrapper(wrapped) => wrapped
- case MutableMapWrapper(wrapped) => wrapped
- case _ => new JMapWrapper(m)
- }
-
@deprecated("use mapAsScalaMap instead", "2.9.0")
def asScalaMap[A, B](m: ju.Map[A, B]): mutable.Map[A, B] = mapAsScalaMap[A, B](m)
- /**
- * Implicitly converts a Java ConcurrentMap to a Scala mutable ConcurrentMap.
- * The returned Scala ConcurrentMap is backed by the provided Java
- * ConcurrentMap and any side-effects of using it via the Scala interface will
- * be visible via the Java interface and vice versa.
- *
- * If the Java ConcurrentMap was previously obtained from an implicit or
- * explicit call of `asConcurrentMap(scala.collection.mutable.ConcurrentMap)`
- * then the original Scala ConcurrentMap will be returned.
- *
- * @param m The ConcurrentMap to be converted.
- * @return A Scala mutable ConcurrentMap view of the argument.
- */
- implicit def asScalaConcurrentMap[A, B](m: juc.ConcurrentMap[A, B]): mutable.ConcurrentMap[A, B] = m match {
- case cmw: ConcurrentMapWrapper[a, b] => cmw.underlying
- case _ => new JConcurrentMapWrapper(m)
- }
-
- /**
- * Implicitly converts a Java `Dictionary` to a Scala mutable
- * `Map[String, String]`.
- *
- * The returned Scala `Map[String, String]` is backed by the provided Java
- * `Dictionary` and any side-effects of using it via the Scala interface
- * will be visible via the Java interface and vice versa.
- *
- * @param m The Dictionary to be converted.
- * @return A Scala mutable Map[String, String] view of the argument.
- */
- implicit def dictionaryAsScalaMap[A, B](p: ju.Dictionary[A, B]): mutable.Map[A, B] = p match {
- case DictionaryWrapper(wrapped) => wrapped
- case _ => new JDictionaryWrapper(p)
- }
-
- /**
- * Implicitly converts a Java `Properties` to a Scala `mutable Map[String, String]`.
- *
- * The returned Scala `Map[String, String]` is backed by the provided Java
- * `Properties` and any side-effects of using it via the Scala interface
- * will be visible via the Java interface and vice versa.
- *
- * @param m The Properties to be converted.
- * @return A Scala mutable Map[String, String] view of the argument.
- */
- implicit def propertiesAsScalaMap(p: ju.Properties): mutable.Map[String, String] = p match {
- case _ => new JPropertiesWrapper(p)
- }
-
@deprecated("use propertiesAsScalaMap instead", "2.9.0")
def asScalaMap(p: ju.Properties): mutable.Map[String, String] = propertiesAsScalaMap(p)
-
- // Private implementations (shared by JavaConverters) ...
-
- trait IterableWrapperTrait[A] extends ju.AbstractCollection[A] {
- val underlying: Iterable[A]
- def size = underlying.size
- override def iterator = IteratorWrapper(underlying.iterator)
- override def isEmpty = underlying.isEmpty
- }
-
- case class IteratorWrapper[A](underlying: Iterator[A])
- extends ju.Iterator[A] with ju.Enumeration[A] {
- def hasNext = underlying.hasNext
- def next() = underlying.next
- def hasMoreElements = underlying.hasNext
- def nextElement() = underlying.next
- def remove() = throw new UnsupportedOperationException
- }
-
- class ToIteratorWrapper[A](underlying : Iterator[A]) {
- def asJava = new IteratorWrapper(underlying)
- }
-
- case class JIteratorWrapper[A](underlying: ju.Iterator[A]) extends AbstractIterator[A] with Iterator[A] {
- def hasNext = underlying.hasNext
- def next() = underlying.next
- }
-
- case class JEnumerationWrapper[A](underlying: ju.Enumeration[A]) extends AbstractIterator[A] with Iterator[A] {
- def hasNext = underlying.hasMoreElements
- def next() = underlying.nextElement
- }
-
- case class IterableWrapper[A](underlying: Iterable[A])
- extends ju.AbstractCollection[A]
- with IterableWrapperTrait[A] { }
-
- case class JIterableWrapper[A](underlying: jl.Iterable[A]) extends AbstractIterable[A] with Iterable[A] {
- def iterator = underlying.iterator
- def newBuilder[B] = new mutable.ArrayBuffer[B]
- }
-
- case class JCollectionWrapper[A](underlying: ju.Collection[A]) extends AbstractIterable[A] with Iterable[A] {
- def iterator = underlying.iterator
- override def size = underlying.size
- override def isEmpty = underlying.isEmpty
- def newBuilder[B] = new mutable.ArrayBuffer[B]
- }
-
- case class SeqWrapper[A](underlying: Seq[A]) extends ju.AbstractList[A] with IterableWrapperTrait[A] {
- def get(i: Int) = underlying(i)
- }
-
- case class MutableSeqWrapper[A](underlying: mutable.Seq[A])
- extends ju.AbstractList[A] with IterableWrapperTrait[A] {
- def get(i: Int) = underlying(i)
- override def set(i: Int, elem: A) = {
- val p = underlying(i)
- underlying(i) = elem
- p
- }
- }
-
- case class MutableBufferWrapper[A](underlying: mutable.Buffer[A])
- extends ju.AbstractList[A] with IterableWrapperTrait[A] {
- def get(i: Int) = underlying(i)
- override def set(i: Int, elem: A) = { val p = underlying(i); underlying(i) = elem; p }
- override def add(elem: A) = { underlying append elem; true }
- override def remove(i: Int) = underlying remove i
- }
-
- case class JListWrapper[A](val underlying: ju.List[A]) extends mutable.AbstractBuffer[A] with mutable.Buffer[A] {
- def length = underlying.size
- override def isEmpty = underlying.isEmpty
- override def iterator: Iterator[A] = underlying.iterator
- def apply(i: Int) = underlying.get(i)
- def update(i: Int, elem: A) = underlying.set(i, elem)
- def +=:(elem: A) = { underlying.subList(0, 0) add elem; this }
- def +=(elem: A): this.type = { underlying add elem; this }
- def insertAll(i: Int, elems: Traversable[A]) = {
- val ins = underlying.subList(0, i)
- elems.seq.foreach(ins.add(_))
- }
- def remove(i: Int) = underlying.remove(i)
- def clear() = underlying.clear()
- def result = this
- }
-
- class SetWrapper[A](underlying: Set[A]) extends ju.AbstractSet[A] {
- self =>
- def size = underlying.size
- def iterator = new ju.Iterator[A] {
- val ui = underlying.iterator
- var prev: Option[A] = None
- def hasNext = ui.hasNext
- def next = { val e = ui.next; prev = Some(e); e }
- def remove = prev match {
- case Some(e) =>
- underlying match {
- case ms: mutable.Set[a] =>
- ms remove e
- prev = None
- case _ =>
- throw new UnsupportedOperationException("remove")
- }
- case _ =>
- throw new IllegalStateException("next must be called at least once before remove")
- }
- }
- }
-
- case class MutableSetWrapper[A](underlying: mutable.Set[A]) extends SetWrapper[A](underlying) {
- override def add(elem: A) = {
- val sz = underlying.size
- underlying += elem
- sz < underlying.size
- }
- override def remove(elem: AnyRef) =
- try underlying remove elem.asInstanceOf[A]
- catch { case ex: ClassCastException => false }
- override def clear() = underlying.clear()
- }
-
- case class JSetWrapper[A](underlying: ju.Set[A])
- extends mutable.AbstractSet[A]
- with mutable.Set[A]
- with mutable.SetLike[A, JSetWrapper[A]] {
-
- override def size = underlying.size
-
- def iterator = underlying.iterator
-
- def contains(elem: A): Boolean = underlying.contains(elem)
-
- def +=(elem: A): this.type = { underlying add elem; this }
- def -=(elem: A): this.type = { underlying remove elem; this }
-
- override def add(elem: A): Boolean = underlying add elem
- override def remove(elem: A): Boolean = underlying remove elem
- override def clear() = underlying.clear()
-
- override def empty = JSetWrapper(new ju.HashSet[A])
- }
-
- class MapWrapper[A, B](underlying: Map[A, B]) extends ju.AbstractMap[A, B] { self =>
- override def size = underlying.size
-
- override def get(key: AnyRef): B = try {
- underlying get key.asInstanceOf[A] match {
- case None => null.asInstanceOf[B]
- case Some(v) => v
- }
- } catch {
- case ex: ClassCastException => null.asInstanceOf[B]
- }
-
- override def entrySet: ju.Set[ju.Map.Entry[A, B]] = new ju.AbstractSet[ju.Map.Entry[A, B]] {
- def size = self.size
-
- def iterator = new ju.Iterator[ju.Map.Entry[A, B]] {
- val ui = underlying.iterator
- var prev : Option[A] = None
-
- def hasNext = ui.hasNext
-
- def next() = {
- val (k, v) = ui.next
- prev = Some(k)
- new ju.Map.Entry[A, B] {
- def getKey = k
- def getValue = v
- def setValue(v1 : B) = self.put(k, v1)
- override def hashCode = k.hashCode + v.hashCode
- override def equals(other: Any) = other match {
- case e: ju.Map.Entry[_, _] => k == e.getKey && v == e.getValue
- case _ => false
- }
- }
- }
-
- def remove() {
- prev match {
- case Some(k) =>
- underlying match {
- case mm: mutable.Map[a, _] =>
- mm remove k
- prev = None
- case _ =>
- throw new UnsupportedOperationException("remove")
- }
- case _ =>
- throw new IllegalStateException("next must be called at least once before remove")
- }
- }
- }
- }
- }
-
- case class MutableMapWrapper[A, B](underlying: mutable.Map[A, B])
- extends MapWrapper[A, B](underlying) {
- override def put(k: A, v: B) = underlying.put(k, v) match {
- case Some(v1) => v1
- case None => null.asInstanceOf[B]
- }
-
- override def remove(k: AnyRef): B = try {
- underlying remove k.asInstanceOf[A] match {
- case None => null.asInstanceOf[B]
- case Some(v) => v
- }
- } catch {
- case ex: ClassCastException => null.asInstanceOf[B]
- }
-
- override def clear() = underlying.clear()
- }
-
- trait JMapWrapperLike[A, B, +Repr <: mutable.MapLike[A, B, Repr] with mutable.Map[A, B]]
- extends mutable.Map[A, B] with mutable.MapLike[A, B, Repr] {
- def underlying: ju.Map[A, B]
-
- override def size = underlying.size
-
- def get(k: A) = {
- val v = underlying get k
- if (v != null)
- Some(v)
- else if (underlying containsKey k)
- Some(null.asInstanceOf[B])
- else
- None
- }
-
- def +=(kv: (A, B)): this.type = { underlying.put(kv._1, kv._2); this }
- def -=(key: A): this.type = { underlying remove key; this }
-
- override def put(k: A, v: B): Option[B] = {
- val r = underlying.put(k, v)
- if (r != null) Some(r) else None
- }
-
- override def update(k: A, v: B) { underlying.put(k, v) }
-
- override def remove(k: A): Option[B] = {
- val r = underlying remove k
- if (r != null) Some(r) else None
- }
-
- def iterator: Iterator[(A, B)] = new AbstractIterator[(A, B)] {
- val ui = underlying.entrySet.iterator
- def hasNext = ui.hasNext
- def next() = { val e = ui.next(); (e.getKey, e.getValue) }
- }
-
- override def clear() = underlying.clear()
-
- override def empty: Repr = null.asInstanceOf[Repr]
- }
-
- case class JMapWrapper[A, B](val underlying : ju.Map[A, B])
- extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, JMapWrapper[A, B]] {
- override def empty = JMapWrapper(new ju.HashMap[A, B])
- }
-
- class ConcurrentMapWrapper[A, B](override val underlying: mutable.ConcurrentMap[A, B])
- extends MutableMapWrapper[A, B](underlying) with juc.ConcurrentMap[A, B] {
-
- def putIfAbsent(k: A, v: B) = underlying.putIfAbsent(k, v) match {
- case Some(v) => v
- case None => null.asInstanceOf[B]
- }
-
- def remove(k: AnyRef, v: AnyRef) = try {
- underlying.remove(k.asInstanceOf[A], v.asInstanceOf[B])
- } catch {
- case ex: ClassCastException =>
- false
- }
-
- def replace(k: A, v: B): B = underlying.replace(k, v) match {
- case Some(v) => v
- case None => null.asInstanceOf[B]
- }
-
- def replace(k: A, oldval: B, newval: B) = underlying.replace(k, oldval, newval)
- }
-
- case class JConcurrentMapWrapper[A, B](val underlying: juc.ConcurrentMap[A, B])
- extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, JConcurrentMapWrapper[A, B]] with mutable.ConcurrentMap[A, B] {
- override def get(k: A) = {
- val v = underlying get k
- if (v != null) Some(v)
- else None
- }
-
- override def empty = new JConcurrentMapWrapper(new juc.ConcurrentHashMap[A, B])
-
- def putIfAbsent(k: A, v: B): Option[B] = {
- val r = underlying.putIfAbsent(k, v)
- if (r != null) Some(r) else None
- }
-
- def remove(k: A, v: B): Boolean = underlying.remove(k, v)
-
- def replace(k: A, v: B): Option[B] = {
- val prev = underlying.replace(k, v)
- if (prev != null) Some(prev) else None
- }
-
- def replace(k: A, oldvalue: B, newvalue: B): Boolean =
- underlying.replace(k, oldvalue, newvalue)
- }
-
- case class DictionaryWrapper[A, B](underlying: mutable.Map[A, B])
- extends ju.Dictionary[A, B] {
- def size: Int = underlying.size
- def isEmpty: Boolean = underlying.isEmpty
- def keys: ju.Enumeration[A] = asJavaEnumeration(underlying.keysIterator)
- def elements: ju.Enumeration[B] = asJavaEnumeration(underlying.valuesIterator)
- def get(key: AnyRef) = try {
- underlying get key.asInstanceOf[A] match {
- case None => null.asInstanceOf[B]
- case Some(v) => v
- }
- } catch {
- case ex: ClassCastException => null.asInstanceOf[B]
- }
- def put(key: A, value: B): B = underlying.put(key, value) match {
- case Some(v) => v
- case None => null.asInstanceOf[B]
- }
- override def remove(key: AnyRef) = try {
- underlying remove key.asInstanceOf[A] match {
- case None => null.asInstanceOf[B]
- case Some(v) => v
- }
- } catch {
- case ex: ClassCastException => null.asInstanceOf[B]
- }
- }
-
- case class JDictionaryWrapper[A, B](underlying: ju.Dictionary[A, B])
- extends mutable.AbstractMap[A, B] with mutable.Map[A, B] {
- override def size: Int = underlying.size
-
- def get(k: A) = {
- val v = underlying get k
- if (v != null) Some(v) else None
- }
-
- def +=(kv: (A, B)): this.type = { underlying.put(kv._1, kv._2); this }
- def -=(key: A): this.type = { underlying remove key; this }
-
- override def put(k: A, v: B): Option[B] = {
- val r = underlying.put(k, v)
- if (r != null) Some(r) else None
- }
-
- override def update(k: A, v: B) { underlying.put(k, v) }
-
- override def remove(k: A): Option[B] = {
- val r = underlying remove k
- if (r != null) Some(r) else None
- }
-
- def iterator = enumerationAsScalaIterator(underlying.keys) map (k => (k, underlying get k))
-
- override def clear() = underlying.clear()
- }
-
- case class JPropertiesWrapper(underlying: ju.Properties)
- extends mutable.AbstractMap[String, String]
- with mutable.Map[String, String]
- with mutable.MapLike[String, String, JPropertiesWrapper] {
-
- override def size = underlying.size
-
- def get(k: String) = {
- val v = underlying get k
- if (v != null) Some(v.asInstanceOf[String]) else None
- }
-
- def +=(kv: (String, String)): this.type = { underlying.put(kv._1, kv._2); this }
- def -=(key: String): this.type = { underlying remove key; this }
-
- override def put(k: String, v: String): Option[String] = {
- val r = underlying.put(k, v)
- if (r != null) Some(r.asInstanceOf[String]) else None
- }
-
- override def update(k: String, v: String) { underlying.put(k, v) }
-
- override def remove(k: String): Option[String] = {
- val r = underlying remove k
- if (r != null) Some(r.asInstanceOf[String]) else None
- }
-
- def iterator: Iterator[(String, String)] = new AbstractIterator[(String, String)] {
- val ui = underlying.entrySet.iterator
- def hasNext = ui.hasNext
- def next() = {
- val e = ui.next()
- (e.getKey.asInstanceOf[String], e.getValue.asInstanceOf[String])
- }
- }
-
- override def clear() = underlying.clear()
-
- override def empty = JPropertiesWrapper(new ju.Properties)
-
- def getProperty(key: String) = underlying.getProperty(key)
-
- def getProperty(key: String, defaultValue: String) =
- underlying.getProperty(key, defaultValue)
-
- def setProperty(key: String, value: String) =
- underlying.setProperty(key, value)
- }
}
diff --git a/src/library/scala/collection/JavaConverters.scala b/src/library/scala/collection/JavaConverters.scala
index d213e60112..13f1f19f81 100755
--- a/src/library/scala/collection/JavaConverters.scala
+++ b/src/library/scala/collection/JavaConverters.scala
@@ -8,7 +8,15 @@
package scala.collection
-/** A collection of decorators that allow to convert between
+import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
+import convert._
+
+// TODO: I cleaned all this documentation up in JavaConversions, but the
+// documentation in here is basically the pre-cleaned-up version with minor
+// additions. Would be nice to have in one place.
+
+
+/** A collection of decorators that allow converting between
* Scala and Java collections using `asScala` and `asJava` methods.
*
* The following conversions are supported via `asJava`, `asScala`
@@ -48,494 +56,48 @@ package scala.collection
* @author Martin Odersky
* @since 2.8.1
*/
-
-trait JavaConverters {
- import java.{ lang => jl, util => ju }
- import java.util.{ concurrent => juc }
- import JavaConversions._
-
- // TODO: I cleaned all this documentation up in JavaConversions, but the
- // documentation in here is basically the pre-cleaned-up version with minor
- // additions. Would be nice to have in one place.
-
- // Conversion decorator classes
-
- /** Generic class containing the `asJava` converter method */
- class AsJava[C](op: => C) {
- /** Converts a Scala collection to the corresponding Java collection */
- def asJava: C = op
- }
-
- /** Generic class containing the `asScala` converter method */
- class AsScala[C](op: => C) {
- /** Converts a Java collection to the corresponding Scala collection */
- def asScala: C = op
- }
-
- /** Generic class containing the `asJavaCollection` converter method */
- class AsJavaCollection[A](i: Iterable[A]) {
- /** Converts a Scala `Iterable` to a Java `Collection` */
- def asJavaCollection: ju.Collection[A] = JavaConversions.asJavaCollection(i)
- }
-
- /** Generic class containing the `asJavaEnumeration` converter method */
- class AsJavaEnumeration[A](i: Iterator[A]) {
- /** Converts a Scala `Iterator` to a Java `Enumeration` */
- def asJavaEnumeration: ju.Enumeration[A] = JavaConversions.asJavaEnumeration(i)
- }
-
- /** Generic class containing the `asJavaDictionary` converter method */
- class AsJavaDictionary[A, B](m : mutable.Map[A, B]) {
- /** Converts a Scala `Map` to a Java `Dictionary` */
- def asJavaDictionary: ju.Dictionary[A, B] = JavaConversions.asJavaDictionary(m)
- }
-
- // Scala => Java
-
- /**
- * Adds an `asJava` method that implicitly converts a Scala `Iterator` to a
- * Java `Iterator`. The returned Java `Iterator` is backed by the provided Scala
- * `Iterator` and any side-effects of using it via the Java interface will
- * be visible via the Scala interface and vice versa.
- *
- * If the Scala `Iterator` was previously obtained from an implicit or explicit
- * call of `asIterator(java.util.Iterator)` then the original Java `Iterator`
- * will be returned by the `asJava` method.
- *
- * @param i The `Iterator` to be converted.
- * @return An object with an `asJava` method that returns a Java `Iterator` view of the argument.
- */
- implicit def asJavaIteratorConverter[A](i : Iterator[A]): AsJava[ju.Iterator[A]] =
- new AsJava(asJavaIterator(i))
-
- /**
- * Adds an `asJavaEnumeration` method that implicitly converts a Scala
- * `Iterator` to a Java `Enumeration`. The returned Java `Enumeration` is
- * backed by the provided Scala `Iterator` and any side-effects of using
- * it via the Java interface will be visible via the Scala interface and
- * vice versa.
- *
- * If the Scala `Iterator` was previously obtained from an implicit or
- * explicit call of `asIterator(java.util.Enumeration)` then the
- * original Java `Enumeration` will be returned.
- *
- * @param i The `Iterator` to be converted.
- * @return An object with an `asJavaEnumeration` method that returns a Java
- * `Enumeration` view of the argument.
- */
- implicit def asJavaEnumerationConverter[A](i : Iterator[A]): AsJavaEnumeration[A] =
- new AsJavaEnumeration(i)
-
- /**
- * Adds an `asJava` method that implicitly converts a Scala `Iterable` to
- * a Java `Iterable`.
- *
- * The returned Java `Iterable` is backed by the provided Scala `Iterable`
- * and any side-effects of using it via the Java interface will be visible
- * via the Scala interface and vice versa.
- *
- * If the Scala `Iterable` was previously obtained from an implicit or
- * explicit call of `asIterable(java.lang.Iterable)` then the original
- * Java `Iterable` will be returned.
- *
- * @param i The `Iterable` to be converted.
- * @return An object with an `asJavaCollection` method that returns a Java
- * `Iterable` view of the argument.
- */
- implicit def asJavaIterableConverter[A](i : Iterable[A]): AsJava[jl.Iterable[A]] =
- new AsJava(asJavaIterable(i))
-
- /**
- * Adds an `asJavaCollection` method that implicitly converts a Scala
- * `Iterable` to an immutable Java `Collection`.
- *
- * If the Scala `Iterable` was previously obtained from an implicit or
- * explicit call of `asSizedIterable(java.util.Collection)` then the
- * original Java `Collection` will be returned.
- *
- * @param i The `SizedIterable` to be converted.
- * @return An object with an `asJava` method that returns a Java
- * `Collection` view of the argument.
- */
- implicit def asJavaCollectionConverter[A](i : Iterable[A]): AsJavaCollection[A] =
- new AsJavaCollection(i)
-
- /**
- * Adds an `asJava` method that implicitly converts a Scala mutable `Buffer`
- * to a Java `List`.
- *
- * The returned Java `List` is backed by the provided Scala `Buffer` and any
- * side-effects of using it via the Java interface will be visible via the
- * Scala interface and vice versa.
- *
- * If the Scala `Buffer` was previously obtained from an implicit or explicit
- * call of `asBuffer(java.util.List)` then the original Java `List` will be
- * returned.
- *
- * @param b The `Buffer` to be converted.
- * @return An object with an `asJava` method that returns a Java `List` view
- * of the argument.
- */
- implicit def bufferAsJavaListConverter[A](b : mutable.Buffer[A]): AsJava[ju.List[A]] =
- new AsJava(bufferAsJavaList(b))
-
- /**
- * Adds an `asJava` method that implicitly converts a Scala mutable `Seq`
- * to a Java `List`.
- *
- * The returned Java `List` is backed by the provided Scala `Seq` and any
- * side-effects of using it via the Java interface will be visible via the
- * Scala interface and vice versa.
- *
- * If the Scala `Seq` was previously obtained from an implicit or explicit
- * call of `asSeq(java.util.List)` then the original Java `List` will be
- * returned.
- *
- * @param b The `Seq` to be converted.
- * @return An object with an `asJava` method that returns a Java `List`
- * view of the argument.
- */
- implicit def mutableSeqAsJavaListConverter[A](b : mutable.Seq[A]): AsJava[ju.List[A]] =
- new AsJava(mutableSeqAsJavaList(b))
-
- /**
- * Adds an `asJava` method that implicitly converts a Scala `Seq` to a
- * Java `List`.
- *
- * The returned Java `List` is backed by the provided Scala `Seq` and any
- * side-effects of using it via the Java interface will be visible via the
- * Scala interface and vice versa.
- *
- * If the Scala `Seq` was previously obtained from an implicit or explicit
- * call of `asSeq(java.util.List)` then the original Java `List` will be
- * returned.
- *
- * @param b The `Seq` to be converted.
- * @return An object with an `asJava` method that returns a Java `List`
- * view of the argument.
- */
- implicit def seqAsJavaListConverter[A](b : Seq[A]): AsJava[ju.List[A]] =
- new AsJava(seqAsJavaList(b))
+object JavaConverters extends DecorateAsJava with DecorateAsScala {
+ @deprecated("Don't access these decorators directly.", "2.10.0")
+ type AsJava[A] = Decorators.AsJava[A]
+ @deprecated("Don't access these decorators directly.", "2.10.0")
+ type AsScala[A] = Decorators.AsScala[A]
+ @deprecated("Don't access these decorators directly.", "2.10.0")
+ type AsJavaCollection[A] = Decorators.AsJavaCollection[A]
+ @deprecated("Don't access these decorators directly.", "2.10.0")
+ type AsJavaEnumeration[A] = Decorators.AsJavaEnumeration[A]
+ @deprecated("Don't access these decorators directly.", "2.10.0")
+ type AsJavaDictionary[A, B] = Decorators.AsJavaDictionary[A, B]
@deprecated("Use bufferAsJavaListConverter instead", "2.9.0")
def asJavaListConverter[A](b : mutable.Buffer[A]): AsJava[ju.List[A]] = bufferAsJavaListConverter(b)
+
@deprecated("Use mutableSeqAsJavaListConverter instead", "2.9.0")
def asJavaListConverter[A](b : mutable.Seq[A]): AsJava[ju.List[A]] = mutableSeqAsJavaListConverter(b)
+
@deprecated("Use seqAsJavaListConverter instead", "2.9.0")
def asJavaListConverter[A](b : Seq[A]): AsJava[ju.List[A]] = seqAsJavaListConverter(b)
- /**
- * Adds an `asJava` method that implicitly converts a Scala mutable `Set`>
- * to a Java `Set`.
- *
- * The returned Java `Set` is backed by the provided Scala `Set` and any
- * side-effects of using it via the Java interface will be visible via
- * the Scala interface and vice versa.
- *
- * If the Scala `Set` was previously obtained from an implicit or explicit
- * call of `asSet(java.util.Set)` then the original Java `Set` will be
- * returned.
- *
- * @param s The `Set` to be converted.
- * @return An object with an `asJava` method that returns a Java `Set` view
- * of the argument.
- */
- implicit def mutableSetAsJavaSetConverter[A](s : mutable.Set[A]): AsJava[ju.Set[A]] =
- new AsJava(mutableSetAsJavaSet(s))
-
@deprecated("Use mutableSetAsJavaSetConverter instead", "2.9.0")
def asJavaSetConverter[A](s : mutable.Set[A]): AsJava[ju.Set[A]] = mutableSetAsJavaSetConverter(s)
- /**
- * Adds an `asJava` method that implicitly converts a Scala `Set` to a
- * Java `Set`.
- *
- * The returned Java `Set` is backed by the provided Scala `Set` and any
- * side-effects of using it via the Java interface will be visible via
- * the Scala interface and vice versa.
- *
- * If the Scala `Set` was previously obtained from an implicit or explicit
- * call of `asSet(java.util.Set)` then the original Java `Set` will be
- * returned.
- *
- * @param s The `Set` to be converted.
- * @return An object with an `asJava` method that returns a Java `Set` view
- * of the argument.
- */
- implicit def setAsJavaSetConverter[A](s : Set[A]): AsJava[ju.Set[A]] =
- new AsJava(setAsJavaSet(s))
-
@deprecated("Use setAsJavaSetConverter instead", "2.9.0")
def asJavaSetConverter[A](s : Set[A]): AsJava[ju.Set[A]] = setAsJavaSetConverter(s)
- /**
- * Adds an `asJava` method that implicitly converts a Scala mutable `Map`
- * to a Java `Map`.
- *
- * The returned Java `Map` is backed by the provided Scala `Map` and any
- * side-effects of using it via the Java interface will be visible via the
- * Scala interface and vice versa.
- *
- * If the Scala `Map` was previously obtained from an implicit or explicit
- * call of `asMap(java.util.Map)` then the original Java `Map` will be
- * returned.
- *
- * @param m The `Map` to be converted.
- * @return An object with an `asJava` method that returns a Java `Map` view
- * of the argument.
- */
- implicit def mutableMapAsJavaMapConverter[A, B](m : mutable.Map[A, B]): AsJava[ju.Map[A, B]] =
- new AsJava(mutableMapAsJavaMap(m))
-
@deprecated("use mutableMapAsJavaMapConverter instead", "2.9.0")
def asJavaMapConverter[A, B](m : mutable.Map[A, B]): AsJava[ju.Map[A, B]] = mutableMapAsJavaMapConverter(m)
- /**
- * Adds an `asJavaDictionary` method that implicitly converts a Scala
- * mutable `Map` to a Java `Dictionary`.
- *
- * The returned Java `Dictionary` is backed by the provided Scala
- * `Dictionary` and any side-effects of using it via the Java interface
- * will be visible via the Scala interface and vice versa.
- *
- * If the Scala `Dictionary` was previously obtained from an implicit or
- * explicit call of `asMap(java.util.Dictionary)` then the original
- * Java `Dictionary` will be returned.
- *
- * @param m The `Map` to be converted.
- * @return An object with an `asJavaDictionary` method that returns a
- * Java `Dictionary` view of the argument.
- */
- implicit def asJavaDictionaryConverter[A, B](m : mutable.Map[A, B]): AsJavaDictionary[A, B] =
- new AsJavaDictionary(m)
-
- /**
- * Adds an `asJava` method that implicitly converts a Scala `Map` to
- * a Java `Map`.
- *
- * The returned Java `Map` is backed by the provided Scala `Map` and any
- * side-effects of using it via the Java interface will be visible via
- * the Scala interface and vice versa.
- *
- * If the Scala `Map` was previously obtained from an implicit or explicit
- * call of `asMap(java.util.Map)` then the original Java `Map` will be
- * returned.
- *
- * @param m The `Map` to be converted.
- * @return An object with an `asJava` method that returns a Java `Map` view
- * of the argument.
- */
- implicit def mapAsJavaMapConverter[A, B](m : Map[A, B]): AsJava[ju.Map[A, B]] =
- new AsJava(mapAsJavaMap(m))
-
@deprecated("Use mapAsJavaMapConverter instead", "2.9.0")
def asJavaMapConverter[A, B](m : Map[A, B]): AsJava[ju.Map[A, B]] = mapAsJavaMapConverter(m)
- /**
- * Adds an `asJava` method that implicitly converts a Scala mutable
- * `ConcurrentMap` to a Java `ConcurrentMap`.
- *
- * The returned Java `ConcurrentMap` is backed by the provided Scala
- * `ConcurrentMap` and any side-effects of using it via the Java interface
- * will be visible via the Scala interface and vice versa.
- *
- * If the Scala `ConcurrentMap` was previously obtained from an implicit or
- * explicit call of `asConcurrentMap(java.util.concurrect.ConcurrentMap)`
- * then the original Java `ConcurrentMap` will be returned.
- *
- * @param m The `ConcurrentMap` to be converted.
- * @return An object with an `asJava` method that returns a Java
- * `ConcurrentMap` view of the argument.
- */
- implicit def asJavaConcurrentMapConverter[A, B](m: mutable.ConcurrentMap[A, B]): AsJava[juc.ConcurrentMap[A, B]] =
- new AsJava(asJavaConcurrentMap(m))
-
- /**
- * Adds an `asScala` method that implicitly converts a Java `Iterator` to
- * a Scala `Iterator`.
- *
- * The returned Scala `Iterator` is backed by the provided Java `Iterator`
- * and any side-effects of using it via the Scala interface will be visible
- * via the Java interface and vice versa.
- *
- * If the Java `Iterator` was previously obtained from an implicit or
- * explicit call of `asIterator(scala.collection.Iterator)` then the
- * original Scala `Iterator` will be returned.
- *
- * @param i The `Iterator` to be converted.
- * @return An object with an `asScala` method that returns a Scala
- * `Iterator` view of the argument.
- */
- implicit def asScalaIteratorConverter[A](i : ju.Iterator[A]): AsScala[Iterator[A]] =
- new AsScala(asScalaIterator(i))
-
- /**
- * Adds an `asScala` method that implicitly converts a Java `Enumeration`
- * to a Scala `Iterator`.
- *
- * The returned Scala `Iterator` is backed by the provided Java
- * `Enumeration` and any side-effects of using it via the Scala interface
- * will be visible via the Java interface and vice versa.
- *
- * If the Java `Enumeration` was previously obtained from an implicit or
- * explicit call of `asEnumeration(scala.collection.Iterator)` then the
- * original Scala `Iterator` will be returned.
- *
- * @param i The `Enumeration` to be converted.
- * @return An object with an `asScala` method that returns a Scala
- * `Iterator` view of the argument.
- */
- implicit def enumerationAsScalaIteratorConverter[A](i : ju.Enumeration[A]): AsScala[Iterator[A]] =
- new AsScala(enumerationAsScalaIterator(i))
-
- /**
- * Adds an `asScala` method that implicitly converts a Java `Iterable` to
- * a Scala `Iterable`.
- *
- * The returned Scala `Iterable` is backed by the provided Java `Iterable`
- * and any side-effects of using it via the Scala interface will be visible
- * via the Java interface and vice versa.
- *
- * If the Java `Iterable` was previously obtained from an implicit or
- * explicit call of `asIterable(scala.collection.Iterable)` then the original
- * Scala `Iterable` will be returned.
- *
- * @param i The `Iterable` to be converted.
- * @return An object with an `asScala` method that returns a Scala `Iterable`
- * view of the argument.
- */
- implicit def iterableAsScalaIterableConverter[A](i : jl.Iterable[A]): AsScala[Iterable[A]] =
- new AsScala(iterableAsScalaIterable(i))
-
@deprecated("Use iterableAsScalaIterableConverter instead", "2.9.0")
def asScalaIterableConverter[A](i : jl.Iterable[A]): AsScala[Iterable[A]] = iterableAsScalaIterableConverter(i)
- /**
- * Adds an `asScala` method that implicitly converts a Java `Collection` to
- * an Scala `Iterable`.
- *
- * If the Java `Collection` was previously obtained from an implicit or
- * explicit call of `asCollection(scala.collection.SizedIterable)` then
- * the original Scala `SizedIterable` will be returned.
- *
- * @param i The `Collection` to be converted.
- * @return An object with an `asScala` method that returns a Scala
- * `SizedIterable` view of the argument.
- */
- implicit def collectionAsScalaIterableConverter[A](i : ju.Collection[A]): AsScala[Iterable[A]] =
- new AsScala(collectionAsScalaIterable(i))
-
@deprecated("Use collectionAsScalaIterableConverter instead", "2.9.0")
def asScalaIterableConverter[A](i : ju.Collection[A]): AsScala[Iterable[A]] = collectionAsScalaIterableConverter(i)
- /**
- * Adds an `asScala` method that implicitly converts a Java `List` to a
- * Scala mutable `Buffer`.
- *
- * The returned Scala `Buffer` is backed by the provided Java `List` and
- * any side-effects of using it via the Scala interface will be visible via
- * the Java interface and vice versa.
- *
- * If the Java `List` was previously obtained from an implicit or explicit
- * call of `asList(scala.collection.mutable.Buffer)` then the original
- * Scala `Buffer` will be returned.
- *
- * @param l The `List` to be converted.
- * @return An object with an `asScala` method that returns a Scala mutable
- * `Buffer` view of the argument.
- */
- implicit def asScalaBufferConverter[A](l : ju.List[A]): AsScala[mutable.Buffer[A]] =
- new AsScala(asScalaBuffer(l))
-
- /**
- * Adds an `asScala` method that implicitly converts a Java `Set` to a
- * Scala mutable `Set`.
- *
- * The returned Scala `Set` is backed by the provided Java `Set` and any
- * side-effects of using it via the Scala interface will be visible via
- * the Java interface and vice versa.
- *
- * If the Java `Set` was previously obtained from an implicit or explicit
- * call of `asSet(scala.collection.mutable.Set)` then the original
- * Scala `Set` will be returned.
- *
- * @param s The `Set` to be converted.
- * @return An object with an `asScala` method that returns a Scala mutable
- * `Set` view of the argument.
- */
- implicit def asScalaSetConverter[A](s : ju.Set[A]): AsScala[mutable.Set[A]] =
- new AsScala(asScalaSet(s))
-
- /**
- * Adds an `asScala` method that implicitly converts a Java `Map` to a Scala
- * mutable `Map`. The returned Scala `Map` is backed by the provided Java
- * `Map` and any side-effects of using it via the Scala interface will
- * be visible via the Java interface and vice versa.
- *
- * If the Java `Map` was previously obtained from an implicit or explicit
- * call of `asMap(scala.collection.mutable.Map)` then the original
- * Scala `Map` will be returned.
- *
- * @param m The `Map` to be converted.
- * @return An object with an `asScala` method that returns a Scala mutable
- * `Map` view of the argument.
- */
- implicit def mapAsScalaMapConverter[A, B](m : ju.Map[A, B]): AsScala[mutable.Map[A, B]] =
- new AsScala(mapAsScalaMap(m))
-
@deprecated("Use mapAsScalaMapConverter instead", "2.9.0")
def asScalaMapConverter[A, B](m : ju.Map[A, B]): AsScala[mutable.Map[A, B]] = mapAsScalaMapConverter(m)
- /**
- * Adds an `asScala` method that implicitly converts a Java `ConcurrentMap`
- * to a Scala mutable `ConcurrentMap`. The returned Scala `ConcurrentMap` is
- * backed by the provided Java `ConcurrentMap` and any side-effects of using
- * it via the Scala interface will be visible via the Java interface and
- * vice versa.
- *
- * If the Java `ConcurrentMap` was previously obtained from an implicit or
- * explicit call of `asConcurrentMap(scala.collection.mutable.ConcurrentMap)`
- * then the original Scala `ConcurrentMap` will be returned.
- *
- * @param m The `ConcurrentMap` to be converted.
- * @return An object with an `asScala` method that returns a Scala mutable
- * `ConcurrentMap` view of the argument.
- */
- implicit def asScalaConcurrentMapConverter[A, B](m: juc.ConcurrentMap[A, B]): AsScala[mutable.ConcurrentMap[A, B]] =
- new AsScala(asScalaConcurrentMap(m))
-
- /**
- * Adds an `asScala` method that implicitly converts a Java `Dictionary`
- * to a Scala mutable `Map[String, String]`. The returned Scala
- * `Map[String, String]` is backed by the provided Java `Dictionary` and
- * any side-effects of using it via the Scala interface will be visible via
- * the Java interface and vice versa.
- *
- * @param m The `Dictionary` to be converted.
- * @return An object with an `asScala` method that returns a Scala mutable
- * `Map[String, String]` view of the argument.
- */
- implicit def dictionaryAsScalaMapConverter[A, B](p: ju.Dictionary[A, B]): AsScala[mutable.Map[A, B]] =
- new AsScala(dictionaryAsScalaMap(p))
-
- /**
- * Adds an `asScala` method that implicitly converts a Java `Properties`
- * to a Scala mutable `Map[String, String]`. The returned Scala
- * `Map[String, String]` is backed by the provided Java `Properties` and
- * any side-effects of using it via the Scala interface will be visible via
- * the Java interface and vice versa.
- *
- * @param m The `Properties` to be converted.
- * @return An object with an `asScala` method that returns a Scala mutable
- * `Map[String, String]` view of the argument.
- */
- implicit def propertiesAsScalaMapConverter(p: ju.Properties): AsScala[mutable.Map[String, String]] =
- new AsScala(propertiesAsScalaMap(p))
-
@deprecated("Use propertiesAsScalaMapConverter instead", "2.9.0")
- def asScalaMapConverter(p: ju.Properties): AsScala[mutable.Map[String, String]] =
- propertiesAsScalaMapConverter(p)
-
-}
-
-object JavaConverters extends JavaConverters \ No newline at end of file
+ def asScalaMapConverter(p: ju.Properties): AsScala[mutable.Map[String, String]] = propertiesAsScalaMapConverter(p)
+} \ No newline at end of file
diff --git a/src/library/scala/collection/MapLike.scala b/src/library/scala/collection/MapLike.scala
index 07116e99dd..8f88e62791 100644
--- a/src/library/scala/collection/MapLike.scala
+++ b/src/library/scala/collection/MapLike.scala
@@ -91,14 +91,18 @@ self =>
* @param kv the key/value pair
* @tparam B1 the type of the value in the key/value pair.
* @return a new map with the new binding added to this map
+ *
* @usecase def + (kv: (A, B)): Map[A, B]
+ * @inheritdoc
*/
def + [B1 >: B] (kv: (A, B1)): Map[A, B1]
/** Removes a key from this map, returning a new map.
* @param key the key to be removed
* @return a new map without a binding for `key`
+ *
* @usecase def - (key: A): Map[A, B]
+ * @inheritdoc
*/
def - (key: A): This
@@ -115,7 +119,9 @@ self =>
* @tparam B1 the result type of the default computation.
* @return the value associated with `key` if it exists,
* otherwise the result of the `default` computation.
+ *
* @usecase def getOrElse(key: A, default: => B): B
+ * @inheritdoc
*/
def getOrElse[B1 >: B](key: A, default: => B1): B1 = get(key) match {
case Some(v) => v
@@ -255,7 +261,9 @@ self =>
* @param value the value
* @tparam B1 the type of the added value
* @return A new map with the new key/value mapping added to this map.
+ *
* @usecase def updated(key: A, value: B): Map[A, B]
+ * @inheritdoc
*/
def updated [B1 >: B](key: A, value: B1): Map[A, B1] = this + ((key, value))
@@ -269,8 +277,10 @@ self =>
* @param kvs the remaining key/value pairs
* @tparam B1 the type of the added values
* @return a new map with the given bindings added to this map
+ *
* @usecase def + (kvs: (A, B)*): Map[A, B]
- * @param the key/value pairs
+ * @inheritdoc
+ * @param the key/value pairs
*/
def + [B1 >: B] (kv1: (A, B1), kv2: (A, B1), kvs: (A, B1) *): Map[A, B1] =
this + kv1 + kv2 ++ kvs
@@ -280,7 +290,9 @@ self =>
* @param kvs the collection containing the added key/value pairs
* @tparam B1 the type of the added values
* @return a new map with the given bindings added to this map
+ *
* @usecase def ++ (xs: Traversable[(A, B)]): Map[A, B]
+ * @inheritdoc
*/
def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): Map[A, B1] =
((repr: Map[A, B1]) /: xs.seq) (_ + _)
diff --git a/src/library/scala/collection/SeqLike.scala b/src/library/scala/collection/SeqLike.scala
index 526ea7e240..fd1d42d7e9 100644
--- a/src/library/scala/collection/SeqLike.scala
+++ b/src/library/scala/collection/SeqLike.scala
@@ -410,13 +410,6 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
/** Produces a new sequence which contains all elements of this $coll and also all elements of
* a given sequence. `xs union ys` is equivalent to `xs ++ ys`.
- * $willNotTerminateInf
- *
- * Another way to express this
- * is that `xs union ys` computes the order-presevring multi-set union of `xs` and `ys`.
- * `union` is hence a counter-part of `diff` and `intersect` which also work on multi-sets.
- *
- * $willNotTerminateInf
*
* @param that the sequence to add.
* @tparam B the element type of the returned $coll.
@@ -425,14 +418,21 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
* @return a new collection of type `That` which contains all elements of this $coll
* followed by all elements of `that`.
* @usecase def union(that: Seq[A]): $Coll[A]
- * @return a new $coll which contains all elements of this $coll
- * followed by all elements of `that`.
+ * @inheritdoc
+ *
+ * Another way to express this
+ * is that `xs union ys` computes the order-presevring multi-set union of `xs` and `ys`.
+ * `union` is hence a counter-part of `diff` and `intersect` which also work on multi-sets.
+ *
+ * $willNotTerminateInf
+ *
+ * @return a new $coll which contains all elements of this $coll
+ * followed by all elements of `that`.
*/
override def union[B >: A, That](that: GenSeq[B])(implicit bf: CanBuildFrom[Repr, B, That]): That =
this ++ that
/** Computes the multiset difference between this $coll and another sequence.
- * $willNotTerminateInf
*
* @param that the sequence of elements to remove
* @tparam B the element type of the returned $coll.
@@ -444,11 +444,15 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
* ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form
* part of the result, but any following occurrences will.
* @usecase def diff(that: Seq[A]): $Coll[A]
- * @return a new $coll which contains all elements of this $coll
- * except some of occurrences of elements that also appear in `that`.
- * If an element value `x` appears
- * ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form
- * part of the result, but any following occurrences will.
+ * @inheritdoc
+ *
+ * $willNotTerminateInf
+ *
+ * @return a new $coll which contains all elements of this $coll
+ * except some of occurrences of elements that also appear in `that`.
+ * If an element value `x` appears
+ * ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form
+ * part of the result, but any following occurrences will.
*/
def diff[B >: A](that: GenSeq[B]): Repr = {
val occ = occCounts(that.seq)
@@ -463,7 +467,6 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
def diff[B >: A](that: Seq[B]): Repr = diff(that: GenSeq[B])
/** Computes the multiset intersection between this $coll and another sequence.
- * $mayNotTerminateInf
*
* @param that the sequence of elements to intersect with.
* @tparam B the element type of the returned $coll.
@@ -475,11 +478,15 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
* ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained
* in the result, but any following occurrences will be omitted.
* @usecase def intersect(that: Seq[A]): $Coll[A]
- * @return a new $coll which contains all elements of this $coll
- * which also appear in `that`.
- * If an element value `x` appears
- * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained
- * in the result, but any following occurrences will be omitted.
+ * @inheritdoc
+ *
+ * $mayNotTerminateInf
+ *
+ * @return a new $coll which contains all elements of this $coll
+ * which also appear in `that`.
+ * If an element value `x` appears
+ * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained
+ * in the result, but any following occurrences will be omitted.
*/
def intersect[B >: A](that: GenSeq[B]): Repr = {
val occ = occCounts(that.seq)
diff --git a/src/library/scala/collection/TraversableLike.scala b/src/library/scala/collection/TraversableLike.scala
index 0da1cfb913..1f5beb5109 100644
--- a/src/library/scala/collection/TraversableLike.scala
+++ b/src/library/scala/collection/TraversableLike.scala
@@ -103,10 +103,6 @@ trait TraversableLike[+A, +Repr] extends Any
/** Applies a function `f` to all elements of this $coll.
*
- * Note: this method underlies the implementation of most other bulk operations.
- * It's important to implement this method in an efficient way.
- *
- *
* @param f the function that is applied for its side-effect to every element.
* The result of function `f` is discarded.
*
@@ -115,6 +111,11 @@ trait TraversableLike[+A, +Repr] extends Any
* but this is not necessary.
*
* @usecase def foreach(f: A => Unit): Unit
+ * @inheritdoc
+ *
+ * Note: this method underlies the implementation of most other bulk operations.
+ * It's important to implement this method in an efficient way.
+ *
*/
def foreach[U](f: A => U): Unit
@@ -165,17 +166,6 @@ trait TraversableLike[+A, +Repr] extends Any
* the resulting collection rather than the left one.
* Mnemonic: the COLon is on the side of the new COLlection type.
*
- * Example:
- * {{{
- * scala> val x = List(1)
- * x: List[Int] = List(1)
- *
- * scala> val y = LinkedList(2)
- * y: scala.collection.mutable.LinkedList[Int] = LinkedList(2)
- *
- * scala> val z = x ++: y
- * z: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2)
- * }}}
* @param that the traversable to append.
* @tparam B the element type of the returned collection.
* @tparam That $thatinfo
@@ -184,9 +174,22 @@ trait TraversableLike[+A, +Repr] extends Any
* of this $coll followed by all elements of `that`.
*
* @usecase def ++:[B](that: TraversableOnce[B]): $Coll[B]
- *
- * @return a new $coll which contains all elements of this $coll
- * followed by all elements of `that`.
+ * @inheritdoc
+ *
+ * Example:
+ * {{{
+ * scala> val x = List(1)
+ * x: List[Int] = List(1)
+ *
+ * scala> val y = LinkedList(2)
+ * y: scala.collection.mutable.LinkedList[Int] = LinkedList(2)
+ *
+ * scala> val z = x ++: y
+ * z: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2)
+ * }}}
+ *
+ * @return a new $coll which contains all elements of this $coll
+ * followed by all elements of `that`.
*/
def ++:[B >: A, That](that: TraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
@@ -284,11 +287,12 @@ trait TraversableLike[+A, +Repr] extends Any
* The order of the elements is preserved.
*
* @usecase def filterMap[B](f: A => Option[B]): $Coll[B]
+ * @inheritdoc
*
- * @param pf the partial function which filters and maps the $coll.
- * @return a new $coll resulting from applying the given option-valued function
- * `f` to each element and collecting all defined results.
- * The order of the elements is preserved.
+ * @param pf the partial function which filters and maps the $coll.
+ * @return a new $coll resulting from applying the given option-valued function
+ * `f` to each element and collecting all defined results.
+ * The order of the elements is preserved.
def filterMap[B, That](f: A => Option[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
for (x <- this)
@@ -588,8 +592,6 @@ trait TraversableLike[+A, +Repr] extends Any
* Copying will stop once either the end of the current $coll is reached,
* or the end of the array is reached, or `len` elements have been copied.
*
- * $willNotTerminateInf
- *
* @param xs the array to fill.
* @param start the starting index.
* @param len the maximal number of elements to copy.
@@ -597,6 +599,9 @@ trait TraversableLike[+A, +Repr] extends Any
*
*
* @usecase def copyToArray(xs: Array[A], start: Int, len: Int): Unit
+ * @inheritdoc
+ *
+ * $willNotTerminateInf
*/
def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) {
var i = start
@@ -694,10 +699,11 @@ trait TraversableLike[+A, +Repr] extends Any
* that satisfies predicate `p` and collecting the results.
*
* @usecase def map[B](f: A => B): $Coll[B]
+ * @inheritdoc
*
- * @return a new $coll resulting from applying the given function
- * `f` to each element of the outer $coll that satisfies
- * predicate `p` and collecting the results.
+ * @return a new $coll resulting from applying the given function
+ * `f` to each element of the outer $coll that satisfies
+ * predicate `p` and collecting the results.
*/
def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
@@ -710,9 +716,6 @@ trait TraversableLike[+A, +Repr] extends Any
* outer $coll containing this `WithFilter` instance that satisfy
* predicate `p` and concatenating the results.
*
- * The type of the resulting collection will be guided by the static type
- * of the outer $coll.
- *
* @param f the function to apply to each element.
* @tparam B the element type of the returned collection.
* @tparam That $thatinfo
@@ -723,11 +726,15 @@ trait TraversableLike[+A, +Repr] extends Any
* concatenating the results.
*
* @usecase def flatMap[B](f: A => TraversableOnce[B]): $Coll[B]
+ * @inheritdoc
+ *
+ * The type of the resulting collection will be guided by the static type
+ * of the outer $coll.
*
- * @return a new $coll resulting from applying the given
- * collection-valued function `f` to each element of the
- * outer $coll that satisfies predicate `p` and concatenating
- * the results.
+ * @return a new $coll resulting from applying the given
+ * collection-valued function `f` to each element of the
+ * outer $coll that satisfies predicate `p` and concatenating
+ * the results.
*/
def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
@@ -747,6 +754,7 @@ trait TraversableLike[+A, +Repr] extends Any
* but this is not necessary.
*
* @usecase def foreach(f: A => Unit): Unit
+ * @inheritdoc
*/
def foreach[U](f: A => U): Unit =
for (x <- self)
diff --git a/src/library/scala/collection/concurrent/TrieMap.scala b/src/library/scala/collection/concurrent/TrieMap.scala
index 092598e260..2a908aebb1 100644
--- a/src/library/scala/collection/concurrent/TrieMap.scala
+++ b/src/library/scala/collection/concurrent/TrieMap.scala
@@ -1027,7 +1027,7 @@ private[collection] class TrieMapIterator[K, V](var level: Int, private var ct:
Seq(this)
}
- def printDebug {
+ def printDebug() {
println("ctrie iterator")
println(stackpos.mkString(","))
println("depth: " + depth)
diff --git a/src/library/scala/collection/convert/DecorateAsJava.scala b/src/library/scala/collection/convert/DecorateAsJava.scala
new file mode 100644
index 0000000000..76837d937c
--- /dev/null
+++ b/src/library/scala/collection/convert/DecorateAsJava.scala
@@ -0,0 +1,296 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package convert
+
+import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
+import Decorators._
+import WrapAsJava._
+
+/** A collection of decorators that allow to convert between
+ * Scala and Java collections using `asScala` and `asJava` methods.
+ *
+ * The following conversions are supported via `asJava`, `asScala`
+ *
+ * - `scala.collection.Iterable` <=> `java.lang.Iterable`
+ * - `scala.collection.Iterator` <=> `java.util.Iterator`
+ * - `scala.collection.mutable.Buffer` <=> `java.util.List`
+ * - `scala.collection.mutable.Set` <=> `java.util.Set`
+ * - `scala.collection.mutable.Map` <=> `java.util.Map`
+ * - `scala.collection.mutable.ConcurrentMap` <=> `java.util.concurrent.ConcurrentMap`
+ *
+ * In all cases, converting from a source type to a target type and back
+ * again will return the original source object, e.g.
+ * {{{
+ * import scala.collection.JavaConverters._
+ *
+ * val sl = new scala.collection.mutable.ListBuffer[Int]
+ * val jl : java.util.List[Int] = sl.asJava
+ * val sl2 : scala.collection.mutable.Buffer[Int] = jl.asScala
+ * assert(sl eq sl2)
+ * }}}
+ * The following conversions also are supported, but the
+ * direction Scala to Java is done my a more specifically named method:
+ * `asJavaCollection`, `asJavaEnumeration`, `asJavaDictionary`.
+ *
+ * - `scala.collection.Iterable` <=> `java.util.Collection`
+ * - `scala.collection.Iterator` <=> `java.util.Enumeration`
+ * - `scala.collection.mutable.Map` <=> `java.util.Dictionary`
+ *
+ * In addition, the following one way conversions are provided via `asJava`:
+ *
+ * - `scala.collection.Seq` => `java.util.List`
+ * - `scala.collection.mutable.Seq` => `java.util.List`
+ * - `scala.collection.Set` => `java.util.Set`
+ * - `scala.collection.Map` => `java.util.Map`
+ *
+ * @author Martin Odersky
+ * @since 2.8.1
+ */
+
+trait DecorateAsJava {
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala `Iterator` to a
+ * Java `Iterator`. The returned Java `Iterator` is backed by the provided Scala
+ * `Iterator` and any side-effects of using it via the Java interface will
+ * be visible via the Scala interface and vice versa.
+ *
+ * If the Scala `Iterator` was previously obtained from an implicit or explicit
+ * call of `asIterator(java.util.Iterator)` then the original Java `Iterator`
+ * will be returned by the `asJava` method.
+ *
+ * @param i The `Iterator` to be converted.
+ * @return An object with an `asJava` method that returns a Java `Iterator` view of the argument.
+ */
+ implicit def asJavaIteratorConverter[A](i : Iterator[A]): AsJava[ju.Iterator[A]] =
+ new AsJava(asJavaIterator(i))
+
+ /**
+ * Adds an `asJavaEnumeration` method that implicitly converts a Scala
+ * `Iterator` to a Java `Enumeration`. The returned Java `Enumeration` is
+ * backed by the provided Scala `Iterator` and any side-effects of using
+ * it via the Java interface will be visible via the Scala interface and
+ * vice versa.
+ *
+ * If the Scala `Iterator` was previously obtained from an implicit or
+ * explicit call of `asIterator(java.util.Enumeration)` then the
+ * original Java `Enumeration` will be returned.
+ *
+ * @param i The `Iterator` to be converted.
+ * @return An object with an `asJavaEnumeration` method that returns a Java
+ * `Enumeration` view of the argument.
+ */
+ implicit def asJavaEnumerationConverter[A](i : Iterator[A]): AsJavaEnumeration[A] =
+ new AsJavaEnumeration(i)
+
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala `Iterable` to
+ * a Java `Iterable`.
+ *
+ * The returned Java `Iterable` is backed by the provided Scala `Iterable`
+ * and any side-effects of using it via the Java interface will be visible
+ * via the Scala interface and vice versa.
+ *
+ * If the Scala `Iterable` was previously obtained from an implicit or
+ * explicit call of `asIterable(java.lang.Iterable)` then the original
+ * Java `Iterable` will be returned.
+ *
+ * @param i The `Iterable` to be converted.
+ * @return An object with an `asJavaCollection` method that returns a Java
+ * `Iterable` view of the argument.
+ */
+ implicit def asJavaIterableConverter[A](i : Iterable[A]): AsJava[jl.Iterable[A]] =
+ new AsJava(asJavaIterable(i))
+
+ /**
+ * Adds an `asJavaCollection` method that implicitly converts a Scala
+ * `Iterable` to an immutable Java `Collection`.
+ *
+ * If the Scala `Iterable` was previously obtained from an implicit or
+ * explicit call of `asSizedIterable(java.util.Collection)` then the
+ * original Java `Collection` will be returned.
+ *
+ * @param i The `SizedIterable` to be converted.
+ * @return An object with an `asJava` method that returns a Java
+ * `Collection` view of the argument.
+ */
+ implicit def asJavaCollectionConverter[A](i : Iterable[A]): AsJavaCollection[A] =
+ new AsJavaCollection(i)
+
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala mutable `Buffer`
+ * to a Java `List`.
+ *
+ * The returned Java `List` is backed by the provided Scala `Buffer` and any
+ * side-effects of using it via the Java interface will be visible via the
+ * Scala interface and vice versa.
+ *
+ * If the Scala `Buffer` was previously obtained from an implicit or explicit
+ * call of `asBuffer(java.util.List)` then the original Java `List` will be
+ * returned.
+ *
+ * @param b The `Buffer` to be converted.
+ * @return An object with an `asJava` method that returns a Java `List` view
+ * of the argument.
+ */
+ implicit def bufferAsJavaListConverter[A](b : mutable.Buffer[A]): AsJava[ju.List[A]] =
+ new AsJava(bufferAsJavaList(b))
+
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala mutable `Seq`
+ * to a Java `List`.
+ *
+ * The returned Java `List` is backed by the provided Scala `Seq` and any
+ * side-effects of using it via the Java interface will be visible via the
+ * Scala interface and vice versa.
+ *
+ * If the Scala `Seq` was previously obtained from an implicit or explicit
+ * call of `asSeq(java.util.List)` then the original Java `List` will be
+ * returned.
+ *
+ * @param b The `Seq` to be converted.
+ * @return An object with an `asJava` method that returns a Java `List`
+ * view of the argument.
+ */
+ implicit def mutableSeqAsJavaListConverter[A](b : mutable.Seq[A]): AsJava[ju.List[A]] =
+ new AsJava(mutableSeqAsJavaList(b))
+
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala `Seq` to a
+ * Java `List`.
+ *
+ * The returned Java `List` is backed by the provided Scala `Seq` and any
+ * side-effects of using it via the Java interface will be visible via the
+ * Scala interface and vice versa.
+ *
+ * If the Scala `Seq` was previously obtained from an implicit or explicit
+ * call of `asSeq(java.util.List)` then the original Java `List` will be
+ * returned.
+ *
+ * @param b The `Seq` to be converted.
+ * @return An object with an `asJava` method that returns a Java `List`
+ * view of the argument.
+ */
+ implicit def seqAsJavaListConverter[A](b : Seq[A]): AsJava[ju.List[A]] =
+ new AsJava(seqAsJavaList(b))
+
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala mutable `Set`>
+ * to a Java `Set`.
+ *
+ * The returned Java `Set` is backed by the provided Scala `Set` and any
+ * side-effects of using it via the Java interface will be visible via
+ * the Scala interface and vice versa.
+ *
+ * If the Scala `Set` was previously obtained from an implicit or explicit
+ * call of `asSet(java.util.Set)` then the original Java `Set` will be
+ * returned.
+ *
+ * @param s The `Set` to be converted.
+ * @return An object with an `asJava` method that returns a Java `Set` view
+ * of the argument.
+ */
+ implicit def mutableSetAsJavaSetConverter[A](s : mutable.Set[A]): AsJava[ju.Set[A]] =
+ new AsJava(mutableSetAsJavaSet(s))
+
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala `Set` to a
+ * Java `Set`.
+ *
+ * The returned Java `Set` is backed by the provided Scala `Set` and any
+ * side-effects of using it via the Java interface will be visible via
+ * the Scala interface and vice versa.
+ *
+ * If the Scala `Set` was previously obtained from an implicit or explicit
+ * call of `asSet(java.util.Set)` then the original Java `Set` will be
+ * returned.
+ *
+ * @param s The `Set` to be converted.
+ * @return An object with an `asJava` method that returns a Java `Set` view
+ * of the argument.
+ */
+ implicit def setAsJavaSetConverter[A](s : Set[A]): AsJava[ju.Set[A]] =
+ new AsJava(setAsJavaSet(s))
+
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala mutable `Map`
+ * to a Java `Map`.
+ *
+ * The returned Java `Map` is backed by the provided Scala `Map` and any
+ * side-effects of using it via the Java interface will be visible via the
+ * Scala interface and vice versa.
+ *
+ * If the Scala `Map` was previously obtained from an implicit or explicit
+ * call of `asMap(java.util.Map)` then the original Java `Map` will be
+ * returned.
+ *
+ * @param m The `Map` to be converted.
+ * @return An object with an `asJava` method that returns a Java `Map` view
+ * of the argument.
+ */
+ implicit def mutableMapAsJavaMapConverter[A, B](m : mutable.Map[A, B]): AsJava[ju.Map[A, B]] =
+ new AsJava(mutableMapAsJavaMap(m))
+
+ /**
+ * Adds an `asJavaDictionary` method that implicitly converts a Scala
+ * mutable `Map` to a Java `Dictionary`.
+ *
+ * The returned Java `Dictionary` is backed by the provided Scala
+ * `Dictionary` and any side-effects of using it via the Java interface
+ * will be visible via the Scala interface and vice versa.
+ *
+ * If the Scala `Dictionary` was previously obtained from an implicit or
+ * explicit call of `asMap(java.util.Dictionary)` then the original
+ * Java `Dictionary` will be returned.
+ *
+ * @param m The `Map` to be converted.
+ * @return An object with an `asJavaDictionary` method that returns a
+ * Java `Dictionary` view of the argument.
+ */
+ implicit def asJavaDictionaryConverter[A, B](m : mutable.Map[A, B]): AsJavaDictionary[A, B] =
+ new AsJavaDictionary(m)
+
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala `Map` to
+ * a Java `Map`.
+ *
+ * The returned Java `Map` is backed by the provided Scala `Map` and any
+ * side-effects of using it via the Java interface will be visible via
+ * the Scala interface and vice versa.
+ *
+ * If the Scala `Map` was previously obtained from an implicit or explicit
+ * call of `asMap(java.util.Map)` then the original Java `Map` will be
+ * returned.
+ *
+ * @param m The `Map` to be converted.
+ * @return An object with an `asJava` method that returns a Java `Map` view
+ * of the argument.
+ */
+ implicit def mapAsJavaMapConverter[A, B](m : Map[A, B]): AsJava[ju.Map[A, B]] =
+ new AsJava(mapAsJavaMap(m))
+
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala mutable
+ * `ConcurrentMap` to a Java `ConcurrentMap`.
+ *
+ * The returned Java `ConcurrentMap` is backed by the provided Scala
+ * `ConcurrentMap` and any side-effects of using it via the Java interface
+ * will be visible via the Scala interface and vice versa.
+ *
+ * If the Scala `ConcurrentMap` was previously obtained from an implicit or
+ * explicit call of `asConcurrentMap(java.util.concurrect.ConcurrentMap)`
+ * then the original Java `ConcurrentMap` will be returned.
+ *
+ * @param m The `ConcurrentMap` to be converted.
+ * @return An object with an `asJava` method that returns a Java
+ * `ConcurrentMap` view of the argument.
+ */
+ implicit def asJavaConcurrentMapConverter[A, B](m: mutable.ConcurrentMap[A, B]): AsJava[juc.ConcurrentMap[A, B]] =
+ new AsJava(asJavaConcurrentMap(m))
+}
diff --git a/src/library/scala/collection/convert/DecorateAsScala.scala b/src/library/scala/collection/convert/DecorateAsScala.scala
new file mode 100644
index 0000000000..bb14228e67
--- /dev/null
+++ b/src/library/scala/collection/convert/DecorateAsScala.scala
@@ -0,0 +1,189 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package convert
+
+import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
+import Decorators._
+import WrapAsScala._
+
+trait DecorateAsScala {
+ /**
+ * Adds an `asScala` method that implicitly converts a Java `Iterator` to
+ * a Scala `Iterator`.
+ *
+ * The returned Scala `Iterator` is backed by the provided Java `Iterator`
+ * and any side-effects of using it via the Scala interface will be visible
+ * via the Java interface and vice versa.
+ *
+ * If the Java `Iterator` was previously obtained from an implicit or
+ * explicit call of `asIterator(scala.collection.Iterator)` then the
+ * original Scala `Iterator` will be returned.
+ *
+ * @param i The `Iterator` to be converted.
+ * @return An object with an `asScala` method that returns a Scala
+ * `Iterator` view of the argument.
+ */
+ implicit def asScalaIteratorConverter[A](i : ju.Iterator[A]): AsScala[Iterator[A]] =
+ new AsScala(asScalaIterator(i))
+
+ /**
+ * Adds an `asScala` method that implicitly converts a Java `Enumeration`
+ * to a Scala `Iterator`.
+ *
+ * The returned Scala `Iterator` is backed by the provided Java
+ * `Enumeration` and any side-effects of using it via the Scala interface
+ * will be visible via the Java interface and vice versa.
+ *
+ * If the Java `Enumeration` was previously obtained from an implicit or
+ * explicit call of `asEnumeration(scala.collection.Iterator)` then the
+ * original Scala `Iterator` will be returned.
+ *
+ * @param i The `Enumeration` to be converted.
+ * @return An object with an `asScala` method that returns a Scala
+ * `Iterator` view of the argument.
+ */
+ implicit def enumerationAsScalaIteratorConverter[A](i : ju.Enumeration[A]): AsScala[Iterator[A]] =
+ new AsScala(enumerationAsScalaIterator(i))
+
+ /**
+ * Adds an `asScala` method that implicitly converts a Java `Iterable` to
+ * a Scala `Iterable`.
+ *
+ * The returned Scala `Iterable` is backed by the provided Java `Iterable`
+ * and any side-effects of using it via the Scala interface will be visible
+ * via the Java interface and vice versa.
+ *
+ * If the Java `Iterable` was previously obtained from an implicit or
+ * explicit call of `asIterable(scala.collection.Iterable)` then the original
+ * Scala `Iterable` will be returned.
+ *
+ * @param i The `Iterable` to be converted.
+ * @return An object with an `asScala` method that returns a Scala `Iterable`
+ * view of the argument.
+ */
+ implicit def iterableAsScalaIterableConverter[A](i : jl.Iterable[A]): AsScala[Iterable[A]] =
+ new AsScala(iterableAsScalaIterable(i))
+
+ /**
+ * Adds an `asScala` method that implicitly converts a Java `Collection` to
+ * an Scala `Iterable`.
+ *
+ * If the Java `Collection` was previously obtained from an implicit or
+ * explicit call of `asCollection(scala.collection.SizedIterable)` then
+ * the original Scala `SizedIterable` will be returned.
+ *
+ * @param i The `Collection` to be converted.
+ * @return An object with an `asScala` method that returns a Scala
+ * `SizedIterable` view of the argument.
+ */
+ implicit def collectionAsScalaIterableConverter[A](i : ju.Collection[A]): AsScala[Iterable[A]] =
+ new AsScala(collectionAsScalaIterable(i))
+
+ /**
+ * Adds an `asScala` method that implicitly converts a Java `List` to a
+ * Scala mutable `Buffer`.
+ *
+ * The returned Scala `Buffer` is backed by the provided Java `List` and
+ * any side-effects of using it via the Scala interface will be visible via
+ * the Java interface and vice versa.
+ *
+ * If the Java `List` was previously obtained from an implicit or explicit
+ * call of `asList(scala.collection.mutable.Buffer)` then the original
+ * Scala `Buffer` will be returned.
+ *
+ * @param l The `List` to be converted.
+ * @return An object with an `asScala` method that returns a Scala mutable
+ * `Buffer` view of the argument.
+ */
+ implicit def asScalaBufferConverter[A](l : ju.List[A]): AsScala[mutable.Buffer[A]] =
+ new AsScala(asScalaBuffer(l))
+
+ /**
+ * Adds an `asScala` method that implicitly converts a Java `Set` to a
+ * Scala mutable `Set`.
+ *
+ * The returned Scala `Set` is backed by the provided Java `Set` and any
+ * side-effects of using it via the Scala interface will be visible via
+ * the Java interface and vice versa.
+ *
+ * If the Java `Set` was previously obtained from an implicit or explicit
+ * call of `asSet(scala.collection.mutable.Set)` then the original
+ * Scala `Set` will be returned.
+ *
+ * @param s The `Set` to be converted.
+ * @return An object with an `asScala` method that returns a Scala mutable
+ * `Set` view of the argument.
+ */
+ implicit def asScalaSetConverter[A](s : ju.Set[A]): AsScala[mutable.Set[A]] =
+ new AsScala(asScalaSet(s))
+
+ /**
+ * Adds an `asScala` method that implicitly converts a Java `Map` to a Scala
+ * mutable `Map`. The returned Scala `Map` is backed by the provided Java
+ * `Map` and any side-effects of using it via the Scala interface will
+ * be visible via the Java interface and vice versa.
+ *
+ * If the Java `Map` was previously obtained from an implicit or explicit
+ * call of `asMap(scala.collection.mutable.Map)` then the original
+ * Scala `Map` will be returned.
+ *
+ * @param m The `Map` to be converted.
+ * @return An object with an `asScala` method that returns a Scala mutable
+ * `Map` view of the argument.
+ */
+ implicit def mapAsScalaMapConverter[A, B](m : ju.Map[A, B]): AsScala[mutable.Map[A, B]] =
+ new AsScala(mapAsScalaMap(m))
+
+ /**
+ * Adds an `asScala` method that implicitly converts a Java `ConcurrentMap`
+ * to a Scala mutable `ConcurrentMap`. The returned Scala `ConcurrentMap` is
+ * backed by the provided Java `ConcurrentMap` and any side-effects of using
+ * it via the Scala interface will be visible via the Java interface and
+ * vice versa.
+ *
+ * If the Java `ConcurrentMap` was previously obtained from an implicit or
+ * explicit call of `asConcurrentMap(scala.collection.mutable.ConcurrentMap)`
+ * then the original Scala `ConcurrentMap` will be returned.
+ *
+ * @param m The `ConcurrentMap` to be converted.
+ * @return An object with an `asScala` method that returns a Scala mutable
+ * `ConcurrentMap` view of the argument.
+ */
+ implicit def asScalaConcurrentMapConverter[A, B](m: juc.ConcurrentMap[A, B]): AsScala[mutable.ConcurrentMap[A, B]] =
+ new AsScala(asScalaConcurrentMap(m))
+
+ /**
+ * Adds an `asScala` method that implicitly converts a Java `Dictionary`
+ * to a Scala mutable `Map[String, String]`. The returned Scala
+ * `Map[String, String]` is backed by the provided Java `Dictionary` and
+ * any side-effects of using it via the Scala interface will be visible via
+ * the Java interface and vice versa.
+ *
+ * @param m The `Dictionary` to be converted.
+ * @return An object with an `asScala` method that returns a Scala mutable
+ * `Map[String, String]` view of the argument.
+ */
+ implicit def dictionaryAsScalaMapConverter[A, B](p: ju.Dictionary[A, B]): AsScala[mutable.Map[A, B]] =
+ new AsScala(dictionaryAsScalaMap(p))
+
+ /**
+ * Adds an `asScala` method that implicitly converts a Java `Properties`
+ * to a Scala mutable `Map[String, String]`. The returned Scala
+ * `Map[String, String]` is backed by the provided Java `Properties` and
+ * any side-effects of using it via the Scala interface will be visible via
+ * the Java interface and vice versa.
+ *
+ * @param m The `Properties` to be converted.
+ * @return An object with an `asScala` method that returns a Scala mutable
+ * `Map[String, String]` view of the argument.
+ */
+ implicit def propertiesAsScalaMapConverter(p: ju.Properties): AsScala[mutable.Map[String, String]] =
+ new AsScala(propertiesAsScalaMap(p))
+}
diff --git a/src/library/scala/collection/convert/Decorators.scala b/src/library/scala/collection/convert/Decorators.scala
new file mode 100644
index 0000000000..3bdd9a0f1c
--- /dev/null
+++ b/src/library/scala/collection/convert/Decorators.scala
@@ -0,0 +1,46 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package convert
+
+import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
+
+private[collection] trait Decorators {
+ /** Generic class containing the `asJava` converter method */
+ class AsJava[A](op: => A) {
+ /** Converts a Scala collection to the corresponding Java collection */
+ def asJava: A = op
+ }
+
+ /** Generic class containing the `asScala` converter method */
+ class AsScala[A](op: => A) {
+ /** Converts a Java collection to the corresponding Scala collection */
+ def asScala: A = op
+ }
+
+ /** Generic class containing the `asJavaCollection` converter method */
+ class AsJavaCollection[A](i: Iterable[A]) {
+ /** Converts a Scala `Iterable` to a Java `Collection` */
+ def asJavaCollection: ju.Collection[A] = JavaConversions.asJavaCollection(i)
+ }
+
+ /** Generic class containing the `asJavaEnumeration` converter method */
+ class AsJavaEnumeration[A](i: Iterator[A]) {
+ /** Converts a Scala `Iterator` to a Java `Enumeration` */
+ def asJavaEnumeration: ju.Enumeration[A] = JavaConversions.asJavaEnumeration(i)
+ }
+
+ /** Generic class containing the `asJavaDictionary` converter method */
+ class AsJavaDictionary[A, B](m : mutable.Map[A, B]) {
+ /** Converts a Scala `Map` to a Java `Dictionary` */
+ def asJavaDictionary: ju.Dictionary[A, B] = JavaConversions.asJavaDictionary(m)
+ }
+}
+
+private[collection] object Decorators extends Decorators
diff --git a/src/library/scala/collection/convert/WrapAsJava.scala b/src/library/scala/collection/convert/WrapAsJava.scala
new file mode 100644
index 0000000000..6274518d1a
--- /dev/null
+++ b/src/library/scala/collection/convert/WrapAsJava.scala
@@ -0,0 +1,256 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package convert
+
+import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
+import Wrappers._
+
+trait WrapAsJava {
+ /**
+ * Implicitly converts a Scala Iterator to a Java Iterator.
+ * The returned Java Iterator is backed by the provided Scala
+ * Iterator and any side-effects of using it via the Java interface will
+ * be visible via the Scala interface and vice versa.
+ *
+ * If the Scala Iterator was previously obtained from an implicit or
+ * explicit call of `asIterator(java.util.Iterator)` then the original
+ * Java Iterator will be returned.
+ *
+ * @param i The Iterator to be converted.
+ * @return A Java Iterator view of the argument.
+ */
+ implicit def asJavaIterator[A](it: Iterator[A]): ju.Iterator[A] = it match {
+ case JIteratorWrapper(wrapped) => wrapped.asInstanceOf[ju.Iterator[A]]
+ case _ => IteratorWrapper(it)
+ }
+
+ /**
+ * Implicitly converts a Scala Iterator to a Java Enumeration.
+ * The returned Java Enumeration is backed by the provided Scala
+ * Iterator and any side-effects of using it via the Java interface will
+ * be visible via the Scala interface and vice versa.
+ *
+ * If the Scala Iterator was previously obtained from an implicit or
+ * explicit call of `asIterator(java.util.Enumeration)` then the
+ * original Java Enumeration will be returned.
+ *
+ * @param i The Iterator to be converted.
+ * @return A Java Enumeration view of the argument.
+ */
+ implicit def asJavaEnumeration[A](it: Iterator[A]): ju.Enumeration[A] = it match {
+ case JEnumerationWrapper(wrapped) => wrapped.asInstanceOf[ju.Enumeration[A]]
+ case _ => IteratorWrapper(it)
+ }
+
+ /**
+ * Implicitly converts a Scala Iterable to a Java Iterable.
+ * The returned Java Iterable is backed by the provided Scala
+ * Iterable and any side-effects of using it via the Java interface will
+ * be visible via the Scala interface and vice versa.
+ *
+ * If the Scala Iterable was previously obtained from an implicit or
+ * explicit call of `asIterable(java.lang.Iterable)` then the original
+ * Java Iterable will be returned.
+ *
+ * @param i The Iterable to be converted.
+ * @return A Java Iterable view of the argument.
+ */
+ implicit def asJavaIterable[A](i: Iterable[A]): jl.Iterable[A] = i match {
+ case JIterableWrapper(wrapped) => wrapped.asInstanceOf[jl.Iterable[A]]
+ case _ => IterableWrapper(i)
+ }
+
+ /**
+ * Implicitly converts a Scala Iterable to an immutable Java
+ * Collection.
+ *
+ * If the Scala Iterable was previously obtained from an implicit or
+ * explicit call of `asSizedIterable(java.util.Collection)` then the original
+ * Java Collection will be returned.
+ *
+ * @param i The SizedIterable to be converted.
+ * @return A Java Collection view of the argument.
+ */
+ implicit def asJavaCollection[A](it: Iterable[A]): ju.Collection[A] = it match {
+ case JCollectionWrapper(wrapped) => wrapped.asInstanceOf[ju.Collection[A]]
+ case _ => new IterableWrapper(it)
+ }
+
+ /**
+ * Implicitly converts a Scala mutable Buffer to a Java List.
+ * The returned Java List is backed by the provided Scala
+ * Buffer and any side-effects of using it via the Java interface will
+ * be visible via the Scala interface and vice versa.
+ *
+ * If the Scala Buffer was previously obtained from an implicit or
+ * explicit call of `asBuffer(java.util.List)` then the original
+ * Java List will be returned.
+ *
+ * @param b The Buffer to be converted.
+ * @return A Java List view of the argument.
+ */
+ implicit def bufferAsJavaList[A](b: mutable.Buffer[A]): ju.List[A] = b match {
+ case JListWrapper(wrapped) => wrapped
+ case _ => new MutableBufferWrapper(b)
+ }
+
+ /**
+ * Implicitly converts a Scala mutable Seq to a Java List.
+ * The returned Java List is backed by the provided Scala
+ * Seq and any side-effects of using it via the Java interface will
+ * be visible via the Scala interface and vice versa.
+ *
+ * If the Scala Seq was previously obtained from an implicit or
+ * explicit call of `asSeq(java.util.List)` then the original
+ * Java List will be returned.
+ *
+ * @param b The Seq to be converted.
+ * @return A Java List view of the argument.
+ */
+ implicit def mutableSeqAsJavaList[A](seq: mutable.Seq[A]): ju.List[A] = seq match {
+ case JListWrapper(wrapped) => wrapped
+ case _ => new MutableSeqWrapper(seq)
+ }
+
+ /**
+ * Implicitly converts a Scala Seq to a Java List.
+ * The returned Java List is backed by the provided Scala
+ * Seq and any side-effects of using it via the Java interface will
+ * be visible via the Scala interface and vice versa.
+ *
+ * If the Scala Seq was previously obtained from an implicit or
+ * explicit call of `asSeq(java.util.List)` then the original
+ * Java List will be returned.
+ *
+ * @param b The Seq to be converted.
+ * @return A Java List view of the argument.
+ */
+ implicit def seqAsJavaList[A](seq: Seq[A]): ju.List[A] = seq match {
+ case JListWrapper(wrapped) => wrapped.asInstanceOf[ju.List[A]]
+ case _ => new SeqWrapper(seq)
+ }
+
+ /**
+ * Implicitly converts a Scala mutable Set to a Java Set.
+ * The returned Java Set is backed by the provided Scala
+ * Set and any side-effects of using it via the Java interface will
+ * be visible via the Scala interface and vice versa.
+ *
+ * If the Scala Set was previously obtained from an implicit or
+ * explicit call of `asSet(java.util.Set)` then the original
+ * Java Set will be returned.
+ *
+ * @param s The Set to be converted.
+ * @return A Java Set view of the argument.
+ */
+ implicit def mutableSetAsJavaSet[A](s: mutable.Set[A]): ju.Set[A] = s match {
+ case JSetWrapper(wrapped) => wrapped
+ case _ => new MutableSetWrapper(s)
+ }
+
+ /**
+ * Implicitly converts a Scala Set to a Java Set.
+ * The returned Java Set is backed by the provided Scala
+ * Set and any side-effects of using it via the Java interface will
+ * be visible via the Scala interface and vice versa.
+ *
+ * If the Scala Set was previously obtained from an implicit or
+ * explicit call of asSet(java.util.Set) then the original
+ * Java Set will be returned.
+ *
+ * @param s The Set to be converted.
+ * @return A Java Set view of the argument.
+ */
+ implicit def setAsJavaSet[A](s: Set[A]): ju.Set[A] = s match {
+ case JSetWrapper(wrapped) => wrapped
+ case _ => new SetWrapper(s)
+ }
+
+ /**
+ * Implicitly converts a Scala mutable Map to a Java Map.
+ * The returned Java Map is backed by the provided Scala
+ * Map and any side-effects of using it via the Java interface will
+ * be visible via the Scala interface and vice versa.
+ *
+ * If the Scala Map was previously obtained from an implicit or
+ * explicit call of `asMap(java.util.Map)` then the original
+ * Java Map will be returned.
+ *
+ * @param m The Map to be converted.
+ * @return A Java Map view of the argument.
+ */
+ implicit def mutableMapAsJavaMap[A, B](m: mutable.Map[A, B]): ju.Map[A, B] = m match {
+ //case JConcurrentMapWrapper(wrapped) => wrapped
+ case JMapWrapper(wrapped) => wrapped
+ case _ => new MutableMapWrapper(m)
+ }
+
+ /**
+ * Implicitly converts a Scala mutable `Map` to a Java `Dictionary`.
+ *
+ * The returned Java `Dictionary` is backed by the provided Scala
+ * `Dictionary` and any side-effects of using it via the Java interface
+ * will be visible via the Scala interface and vice versa.
+ *
+ * If the Scala `Dictionary` was previously obtained from an implicit or
+ * explicit call of `asMap(java.util.Dictionary)` then the original
+ * Java Dictionary will be returned.
+ *
+ * @param m The `Map` to be converted.
+ * @return A Java `Dictionary` view of the argument.
+ */
+ implicit def asJavaDictionary[A, B](m: mutable.Map[A, B]): ju.Dictionary[A, B] = m match {
+ //case JConcurrentMapWrapper(wrapped) => wrapped
+ case JDictionaryWrapper(wrapped) => wrapped
+ case _ => new DictionaryWrapper(m)
+ }
+
+ /**
+ * Implicitly converts a Scala `Map` to a Java `Map`.
+ *
+ * The returned Java `Map` is backed by the provided Scala `Map` and
+ * any side-effects of using it via the Java interface will be visible
+ * via the Scala interface and vice versa.
+ *
+ * If the Scala `Map` was previously obtained from an implicit or
+ * explicit call of `asMap(java.util.Map)` then the original
+ * Java `Map` will be returned.
+ *
+ * @param m The `Map` to be converted.
+ * @return A Java `Map` view of the argument.
+ */
+ implicit def mapAsJavaMap[A, B](m: Map[A, B]): ju.Map[A, B] = m match {
+ //case JConcurrentMapWrapper(wrapped) => wrapped
+ case JMapWrapper(wrapped) => wrapped.asInstanceOf[ju.Map[A, B]]
+ case _ => new MapWrapper(m)
+ }
+
+ /**
+ * Implicitly converts a Scala mutable `ConcurrentMap` to a Java
+ * `ConcurrentMap`.
+ *
+ * The returned Java `ConcurrentMap` is backed by the provided Scala
+ * `ConcurrentMap` and any side-effects of using it via the Java interface
+ * will be visible via the Scala interface and vice versa.
+ *
+ * If the Scala `ConcurrentMap` was previously obtained from an implicit or
+ * explicit call of `asConcurrentMap(java.util.concurrect.ConcurrentMap)`
+ * then the original Java ConcurrentMap will be returned.
+ *
+ * @param m The `ConcurrentMap` to be converted.
+ * @return A Java `ConcurrentMap` view of the argument.
+ */
+ implicit def asJavaConcurrentMap[A, B](m: mutable.ConcurrentMap[A, B]): juc.ConcurrentMap[A, B] = m match {
+ case JConcurrentMapWrapper(wrapped) => wrapped
+ case _ => new ConcurrentMapWrapper(m)
+ }
+}
+
+object WrapAsJava extends WrapAsJava { }
diff --git a/src/library/scala/collection/convert/WrapAsScala.scala b/src/library/scala/collection/convert/WrapAsScala.scala
new file mode 100644
index 0000000000..02b58f55a4
--- /dev/null
+++ b/src/library/scala/collection/convert/WrapAsScala.scala
@@ -0,0 +1,193 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package convert
+
+import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
+import Wrappers._
+
+trait WrapAsScala {
+ /**
+ * Implicitly converts a Java `Iterator` to a Scala `Iterator`.
+ *
+ * The returned Scala `Iterator` is backed by the provided Java `Iterator`
+ * and any side-effects of using it via the Scala interface will be visible
+ * via the Java interface and vice versa.
+ *
+ * If the Java `Iterator` was previously obtained from an implicit or
+ * explicit call of `asIterator(scala.collection.Iterator)` then the
+ * original Scala `Iterator` will be returned.
+ *
+ * @param i The `Iterator` to be converted.
+ * @return A Scala `Iterator` view of the argument.
+ */
+ implicit def asScalaIterator[A](it: ju.Iterator[A]): Iterator[A] = it match {
+ case IteratorWrapper(wrapped) => wrapped
+ case _ => JIteratorWrapper(it)
+ }
+
+ /**
+ * Implicitly converts a Java Enumeration to a Scala Iterator.
+ * The returned Scala Iterator is backed by the provided Java
+ * Enumeration and any side-effects of using it via the Scala interface will
+ * be visible via the Java interface and vice versa.
+ *
+ * If the Java Enumeration was previously obtained from an implicit or
+ * explicit call of `enumerationAsScalaIterator(scala.collection.Iterator)`
+ * then the original Scala Iterator will be returned.
+ *
+ * @param i The Enumeration to be converted.
+ * @return A Scala Iterator view of the argument.
+ */
+ implicit def enumerationAsScalaIterator[A](i: ju.Enumeration[A]): Iterator[A] = i match {
+ case IteratorWrapper(wrapped) => wrapped
+ case _ => JEnumerationWrapper(i)
+ }
+
+ /**
+ * Implicitly converts a Java `Iterable` to a Scala `Iterable`.
+ *
+ * The returned Scala `Iterable` is backed by the provided Java `Iterable`
+ * and any side-effects of using it via the Scala interface will be visible
+ * via the Java interface and vice versa.
+ *
+ * If the Java `Iterable` was previously obtained from an implicit or
+ * explicit call of `iterableAsScalaIterable(scala.collection.Iterable)`
+ * then the original Scala Iterable will be returned.
+ *
+ * @param i The Iterable to be converted.
+ * @return A Scala Iterable view of the argument.
+ */
+ implicit def iterableAsScalaIterable[A](i: jl.Iterable[A]): Iterable[A] = i match {
+ case IterableWrapper(wrapped) => wrapped
+ case _ => JIterableWrapper(i)
+ }
+
+ /**
+ * Implicitly converts a Java `Collection` to an Scala `Iterable`.
+ *
+ * If the Java `Collection` was previously obtained from an implicit or
+ * explicit call of `collectionAsScalaIterable(scala.collection.SizedIterable)`
+ * then the original Scala `Iterable` will be returned.
+ *
+ * @param i The Collection to be converted.
+ * @return A Scala Iterable view of the argument.
+ */
+ implicit def collectionAsScalaIterable[A](i: ju.Collection[A]): Iterable[A] = i match {
+ case IterableWrapper(wrapped) => wrapped
+ case _ => JCollectionWrapper(i)
+ }
+
+ /**
+ * Implicitly converts a Java `List` to a Scala mutable `Buffer`.
+ *
+ * The returned Scala `Buffer` is backed by the provided Java `List`
+ * and any side-effects of using it via the Scala interface will
+ * be visible via the Java interface and vice versa.
+ *
+ * If the Java `List` was previously obtained from an implicit or
+ * explicit call of `asScalaBuffer(scala.collection.mutable.Buffer)`
+ * then the original Scala `Buffer` will be returned.
+ *
+ * @param l The `List` to be converted.
+ * @return A Scala mutable `Buffer` view of the argument.
+ */
+ implicit def asScalaBuffer[A](l: ju.List[A]): mutable.Buffer[A] = l match {
+ case MutableBufferWrapper(wrapped) => wrapped
+ case _ =>new JListWrapper(l)
+ }
+
+ /**
+ * Implicitly converts a Java Set to a Scala mutable Set.
+ * The returned Scala Set is backed by the provided Java
+ * Set and any side-effects of using it via the Scala interface will
+ * be visible via the Java interface and vice versa.
+ *
+ * If the Java Set was previously obtained from an implicit or
+ * explicit call of `asScalaSet(scala.collection.mutable.Set)` then
+ * the original Scala Set will be returned.
+ *
+ * @param s The Set to be converted.
+ * @return A Scala mutable Set view of the argument.
+ */
+ implicit def asScalaSet[A](s: ju.Set[A]): mutable.Set[A] = s match {
+ case MutableSetWrapper(wrapped) => wrapped
+ case _ =>new JSetWrapper(s)
+ }
+
+ /**
+ * Implicitly converts a Java `Map` to a Scala mutable `Map`.
+ *
+ * The returned Scala `Map` is backed by the provided Java `Map` and any
+ * side-effects of using it via the Scala interface will be visible via
+ * the Java interface and vice versa.
+ *
+ * If the Java `Map` was previously obtained from an implicit or
+ * explicit call of `mapAsScalaMap(scala.collection.mutable.Map)` then
+ * the original Scala Map will be returned.
+ *
+ * @param m The Map to be converted.
+ * @return A Scala mutable Map view of the argument.
+ */
+ implicit def mapAsScalaMap[A, B](m: ju.Map[A, B]): mutable.Map[A, B] = m match {
+ //case ConcurrentMapWrapper(wrapped) => wrapped
+ case MutableMapWrapper(wrapped) => wrapped
+ case _ => new JMapWrapper(m)
+ }
+
+ /**
+ * Implicitly converts a Java ConcurrentMap to a Scala mutable ConcurrentMap.
+ * The returned Scala ConcurrentMap is backed by the provided Java
+ * ConcurrentMap and any side-effects of using it via the Scala interface will
+ * be visible via the Java interface and vice versa.
+ *
+ * If the Java ConcurrentMap was previously obtained from an implicit or
+ * explicit call of `asConcurrentMap(scala.collection.mutable.ConcurrentMap)`
+ * then the original Scala ConcurrentMap will be returned.
+ *
+ * @param m The ConcurrentMap to be converted.
+ * @return A Scala mutable ConcurrentMap view of the argument.
+ */
+ implicit def asScalaConcurrentMap[A, B](m: juc.ConcurrentMap[A, B]): mutable.ConcurrentMap[A, B] = m match {
+ case cmw: ConcurrentMapWrapper[a, b] => cmw.underlying
+ case _ => new JConcurrentMapWrapper(m)
+ }
+
+ /**
+ * Implicitly converts a Java `Dictionary` to a Scala mutable
+ * `Map[String, String]`.
+ *
+ * The returned Scala `Map[String, String]` is backed by the provided Java
+ * `Dictionary` and any side-effects of using it via the Scala interface
+ * will be visible via the Java interface and vice versa.
+ *
+ * @param m The Dictionary to be converted.
+ * @return A Scala mutable Map[String, String] view of the argument.
+ */
+ implicit def dictionaryAsScalaMap[A, B](p: ju.Dictionary[A, B]): mutable.Map[A, B] = p match {
+ case DictionaryWrapper(wrapped) => wrapped
+ case _ => new JDictionaryWrapper(p)
+ }
+
+ /**
+ * Implicitly converts a Java `Properties` to a Scala `mutable Map[String, String]`.
+ *
+ * The returned Scala `Map[String, String]` is backed by the provided Java
+ * `Properties` and any side-effects of using it via the Scala interface
+ * will be visible via the Java interface and vice versa.
+ *
+ * @param m The Properties to be converted.
+ * @return A Scala mutable Map[String, String] view of the argument.
+ */
+ implicit def propertiesAsScalaMap(p: ju.Properties): mutable.Map[String, String] = p match {
+ case _ => new JPropertiesWrapper(p)
+ }
+}
+
+object WrapAsScala extends WrapAsScala { }
diff --git a/src/library/scala/collection/convert/Wrappers.scala b/src/library/scala/collection/convert/Wrappers.scala
new file mode 100644
index 0000000000..8136e462cb
--- /dev/null
+++ b/src/library/scala/collection/convert/Wrappers.scala
@@ -0,0 +1,422 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package convert
+
+import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
+import WrapAsScala._
+import WrapAsJava._
+
+/** Don't put the implementations in the same scope as the implicits
+ * which utilize them, or they will stow away into every scope which
+ * extends one of those implementations. See SI-5580.
+ */
+private[collection] trait Wrappers {
+ trait IterableWrapperTrait[A] extends ju.AbstractCollection[A] {
+ val underlying: Iterable[A]
+ def size = underlying.size
+ override def iterator = IteratorWrapper(underlying.iterator)
+ override def isEmpty = underlying.isEmpty
+ }
+
+ case class IteratorWrapper[A](underlying: Iterator[A]) extends ju.Iterator[A] with ju.Enumeration[A] {
+ def hasNext = underlying.hasNext
+ def next() = underlying.next
+ def hasMoreElements = underlying.hasNext
+ def nextElement() = underlying.next
+ def remove() = throw new UnsupportedOperationException
+ }
+
+ class ToIteratorWrapper[A](underlying : Iterator[A]) {
+ def asJava = new IteratorWrapper(underlying)
+ }
+
+ case class JIteratorWrapper[A](underlying: ju.Iterator[A]) extends AbstractIterator[A] with Iterator[A] {
+ def hasNext = underlying.hasNext
+ def next() = underlying.next
+ }
+
+ case class JEnumerationWrapper[A](underlying: ju.Enumeration[A]) extends AbstractIterator[A] with Iterator[A] {
+ def hasNext = underlying.hasMoreElements
+ def next() = underlying.nextElement
+ }
+
+ case class IterableWrapper[A](underlying: Iterable[A]) extends ju.AbstractCollection[A] with IterableWrapperTrait[A] { }
+
+ case class JIterableWrapper[A](underlying: jl.Iterable[A]) extends AbstractIterable[A] with Iterable[A] {
+ def iterator = underlying.iterator
+ def newBuilder[B] = new mutable.ArrayBuffer[B]
+ }
+
+ case class JCollectionWrapper[A](underlying: ju.Collection[A]) extends AbstractIterable[A] with Iterable[A] {
+ def iterator = underlying.iterator
+ override def size = underlying.size
+ override def isEmpty = underlying.isEmpty
+ def newBuilder[B] = new mutable.ArrayBuffer[B]
+ }
+
+ case class SeqWrapper[A](underlying: Seq[A]) extends ju.AbstractList[A] with IterableWrapperTrait[A] {
+ def get(i: Int) = underlying(i)
+ }
+
+ case class MutableSeqWrapper[A](underlying: mutable.Seq[A]) extends ju.AbstractList[A] with IterableWrapperTrait[A] {
+ def get(i: Int) = underlying(i)
+ override def set(i: Int, elem: A) = {
+ val p = underlying(i)
+ underlying(i) = elem
+ p
+ }
+ }
+
+ case class MutableBufferWrapper[A](underlying: mutable.Buffer[A]) extends ju.AbstractList[A] with IterableWrapperTrait[A] {
+ def get(i: Int) = underlying(i)
+ override def set(i: Int, elem: A) = { val p = underlying(i); underlying(i) = elem; p }
+ override def add(elem: A) = { underlying append elem; true }
+ override def remove(i: Int) = underlying remove i
+ }
+
+ case class JListWrapper[A](val underlying: ju.List[A]) extends mutable.AbstractBuffer[A] with mutable.Buffer[A] {
+ def length = underlying.size
+ override def isEmpty = underlying.isEmpty
+ override def iterator: Iterator[A] = underlying.iterator
+ def apply(i: Int) = underlying.get(i)
+ def update(i: Int, elem: A) = underlying.set(i, elem)
+ def +=:(elem: A) = { underlying.subList(0, 0) add elem; this }
+ def +=(elem: A): this.type = { underlying add elem; this }
+ def insertAll(i: Int, elems: Traversable[A]) = {
+ val ins = underlying.subList(0, i)
+ elems.seq.foreach(ins.add(_))
+ }
+ def remove(i: Int) = underlying.remove(i)
+ def clear() = underlying.clear()
+ def result = this
+ }
+
+ class SetWrapper[A](underlying: Set[A]) extends ju.AbstractSet[A] {
+ self =>
+ def size = underlying.size
+ def iterator = new ju.Iterator[A] {
+ val ui = underlying.iterator
+ var prev: Option[A] = None
+ def hasNext = ui.hasNext
+ def next = { val e = ui.next; prev = Some(e); e }
+ def remove = prev match {
+ case Some(e) =>
+ underlying match {
+ case ms: mutable.Set[a] =>
+ ms remove e
+ prev = None
+ case _ =>
+ throw new UnsupportedOperationException("remove")
+ }
+ case _ =>
+ throw new IllegalStateException("next must be called at least once before remove")
+ }
+ }
+ }
+
+ case class MutableSetWrapper[A](underlying: mutable.Set[A]) extends SetWrapper[A](underlying) {
+ override def add(elem: A) = {
+ val sz = underlying.size
+ underlying += elem
+ sz < underlying.size
+ }
+ override def remove(elem: AnyRef) =
+ try underlying remove elem.asInstanceOf[A]
+ catch { case ex: ClassCastException => false }
+ override def clear() = underlying.clear()
+ }
+
+ case class JSetWrapper[A](underlying: ju.Set[A]) extends mutable.AbstractSet[A] with mutable.Set[A] with mutable.SetLike[A, JSetWrapper[A]] {
+
+ override def size = underlying.size
+
+ def iterator = underlying.iterator
+
+ def contains(elem: A): Boolean = underlying.contains(elem)
+
+ def +=(elem: A): this.type = { underlying add elem; this }
+ def -=(elem: A): this.type = { underlying remove elem; this }
+
+ override def add(elem: A): Boolean = underlying add elem
+ override def remove(elem: A): Boolean = underlying remove elem
+ override def clear() = underlying.clear()
+
+ override def empty = JSetWrapper(new ju.HashSet[A])
+ }
+
+ class MapWrapper[A, B](underlying: Map[A, B]) extends ju.AbstractMap[A, B] { self =>
+ override def size = underlying.size
+
+ override def get(key: AnyRef): B = try {
+ underlying get key.asInstanceOf[A] match {
+ case None => null.asInstanceOf[B]
+ case Some(v) => v
+ }
+ } catch {
+ case ex: ClassCastException => null.asInstanceOf[B]
+ }
+
+ override def entrySet: ju.Set[ju.Map.Entry[A, B]] = new ju.AbstractSet[ju.Map.Entry[A, B]] {
+ def size = self.size
+
+ def iterator = new ju.Iterator[ju.Map.Entry[A, B]] {
+ val ui = underlying.iterator
+ var prev : Option[A] = None
+
+ def hasNext = ui.hasNext
+
+ def next() = {
+ val (k, v) = ui.next
+ prev = Some(k)
+ new ju.Map.Entry[A, B] {
+ def getKey = k
+ def getValue = v
+ def setValue(v1 : B) = self.put(k, v1)
+ override def hashCode = k.hashCode + v.hashCode
+ override def equals(other: Any) = other match {
+ case e: ju.Map.Entry[_, _] => k == e.getKey && v == e.getValue
+ case _ => false
+ }
+ }
+ }
+
+ def remove() {
+ prev match {
+ case Some(k) =>
+ underlying match {
+ case mm: mutable.Map[a, _] =>
+ mm remove k
+ prev = None
+ case _ =>
+ throw new UnsupportedOperationException("remove")
+ }
+ case _ =>
+ throw new IllegalStateException("next must be called at least once before remove")
+ }
+ }
+ }
+ }
+ }
+
+ case class MutableMapWrapper[A, B](underlying: mutable.Map[A, B]) extends MapWrapper[A, B](underlying) {
+ override def put(k: A, v: B) = underlying.put(k, v) match {
+ case Some(v1) => v1
+ case None => null.asInstanceOf[B]
+ }
+
+ override def remove(k: AnyRef): B = try {
+ underlying remove k.asInstanceOf[A] match {
+ case None => null.asInstanceOf[B]
+ case Some(v) => v
+ }
+ } catch {
+ case ex: ClassCastException => null.asInstanceOf[B]
+ }
+
+ override def clear() = underlying.clear()
+ }
+
+ trait JMapWrapperLike[A, B, +Repr <: mutable.MapLike[A, B, Repr] with mutable.Map[A, B]] extends mutable.Map[A, B] with mutable.MapLike[A, B, Repr] {
+ def underlying: ju.Map[A, B]
+
+ override def size = underlying.size
+
+ def get(k: A) = {
+ val v = underlying get k
+ if (v != null)
+ Some(v)
+ else if (underlying containsKey k)
+ Some(null.asInstanceOf[B])
+ else
+ None
+ }
+
+ def +=(kv: (A, B)): this.type = { underlying.put(kv._1, kv._2); this }
+ def -=(key: A): this.type = { underlying remove key; this }
+
+ override def put(k: A, v: B): Option[B] = {
+ val r = underlying.put(k, v)
+ if (r != null) Some(r) else None
+ }
+
+ override def update(k: A, v: B) { underlying.put(k, v) }
+
+ override def remove(k: A): Option[B] = {
+ val r = underlying remove k
+ if (r != null) Some(r) else None
+ }
+
+ def iterator: Iterator[(A, B)] = new AbstractIterator[(A, B)] {
+ val ui = underlying.entrySet.iterator
+ def hasNext = ui.hasNext
+ def next() = { val e = ui.next(); (e.getKey, e.getValue) }
+ }
+
+ override def clear() = underlying.clear()
+
+ override def empty: Repr = null.asInstanceOf[Repr]
+ }
+
+ case class JMapWrapper[A, B](val underlying : ju.Map[A, B]) extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, JMapWrapper[A, B]] {
+ override def empty = JMapWrapper(new ju.HashMap[A, B])
+ }
+
+ class ConcurrentMapWrapper[A, B](override val underlying: mutable.ConcurrentMap[A, B]) extends MutableMapWrapper[A, B](underlying) with juc.ConcurrentMap[A, B] {
+
+ def putIfAbsent(k: A, v: B) = underlying.putIfAbsent(k, v) match {
+ case Some(v) => v
+ case None => null.asInstanceOf[B]
+ }
+
+ def remove(k: AnyRef, v: AnyRef) = try {
+ underlying.remove(k.asInstanceOf[A], v.asInstanceOf[B])
+ } catch {
+ case ex: ClassCastException =>
+ false
+ }
+
+ def replace(k: A, v: B): B = underlying.replace(k, v) match {
+ case Some(v) => v
+ case None => null.asInstanceOf[B]
+ }
+
+ def replace(k: A, oldval: B, newval: B) = underlying.replace(k, oldval, newval)
+ }
+
+ case class JConcurrentMapWrapper[A, B](val underlying: juc.ConcurrentMap[A, B]) extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, JConcurrentMapWrapper[A, B]] with mutable.ConcurrentMap[A, B] {
+ override def get(k: A) = {
+ val v = underlying get k
+ if (v != null) Some(v)
+ else None
+ }
+
+ override def empty = new JConcurrentMapWrapper(new juc.ConcurrentHashMap[A, B])
+
+ def putIfAbsent(k: A, v: B): Option[B] = {
+ val r = underlying.putIfAbsent(k, v)
+ if (r != null) Some(r) else None
+ }
+
+ def remove(k: A, v: B): Boolean = underlying.remove(k, v)
+
+ def replace(k: A, v: B): Option[B] = {
+ val prev = underlying.replace(k, v)
+ if (prev != null) Some(prev) else None
+ }
+
+ def replace(k: A, oldvalue: B, newvalue: B): Boolean =
+ underlying.replace(k, oldvalue, newvalue)
+ }
+
+ case class DictionaryWrapper[A, B](underlying: mutable.Map[A, B]) extends ju.Dictionary[A, B] {
+ def size: Int = underlying.size
+ def isEmpty: Boolean = underlying.isEmpty
+ def keys: ju.Enumeration[A] = asJavaEnumeration(underlying.keysIterator)
+ def elements: ju.Enumeration[B] = asJavaEnumeration(underlying.valuesIterator)
+ def get(key: AnyRef) = try {
+ underlying get key.asInstanceOf[A] match {
+ case None => null.asInstanceOf[B]
+ case Some(v) => v
+ }
+ } catch {
+ case ex: ClassCastException => null.asInstanceOf[B]
+ }
+ def put(key: A, value: B): B = underlying.put(key, value) match {
+ case Some(v) => v
+ case None => null.asInstanceOf[B]
+ }
+ override def remove(key: AnyRef) = try {
+ underlying remove key.asInstanceOf[A] match {
+ case None => null.asInstanceOf[B]
+ case Some(v) => v
+ }
+ } catch {
+ case ex: ClassCastException => null.asInstanceOf[B]
+ }
+ }
+
+ case class JDictionaryWrapper[A, B](underlying: ju.Dictionary[A, B]) extends mutable.AbstractMap[A, B] with mutable.Map[A, B] {
+ override def size: Int = underlying.size
+
+ def get(k: A) = {
+ val v = underlying get k
+ if (v != null) Some(v) else None
+ }
+
+ def +=(kv: (A, B)): this.type = { underlying.put(kv._1, kv._2); this }
+ def -=(key: A): this.type = { underlying remove key; this }
+
+ override def put(k: A, v: B): Option[B] = {
+ val r = underlying.put(k, v)
+ if (r != null) Some(r) else None
+ }
+
+ override def update(k: A, v: B) { underlying.put(k, v) }
+
+ override def remove(k: A): Option[B] = {
+ val r = underlying remove k
+ if (r != null) Some(r) else None
+ }
+
+ def iterator = enumerationAsScalaIterator(underlying.keys) map (k => (k, underlying get k))
+
+ override def clear() = underlying.clear()
+ }
+
+ case class JPropertiesWrapper(underlying: ju.Properties) extends mutable.AbstractMap[String, String]
+ with mutable.Map[String, String]
+ with mutable.MapLike[String, String, JPropertiesWrapper] {
+
+ override def size = underlying.size
+
+ def get(k: String) = {
+ val v = underlying get k
+ if (v != null) Some(v.asInstanceOf[String]) else None
+ }
+
+ def +=(kv: (String, String)): this.type = { underlying.put(kv._1, kv._2); this }
+ def -=(key: String): this.type = { underlying remove key; this }
+
+ override def put(k: String, v: String): Option[String] = {
+ val r = underlying.put(k, v)
+ if (r != null) Some(r.asInstanceOf[String]) else None
+ }
+
+ override def update(k: String, v: String) { underlying.put(k, v) }
+
+ override def remove(k: String): Option[String] = {
+ val r = underlying remove k
+ if (r != null) Some(r.asInstanceOf[String]) else None
+ }
+
+ def iterator: Iterator[(String, String)] = new AbstractIterator[(String, String)] {
+ val ui = underlying.entrySet.iterator
+ def hasNext = ui.hasNext
+ def next() = {
+ val e = ui.next()
+ (e.getKey.asInstanceOf[String], e.getValue.asInstanceOf[String])
+ }
+ }
+
+ override def clear() = underlying.clear()
+
+ override def empty = JPropertiesWrapper(new ju.Properties)
+
+ def getProperty(key: String) = underlying.getProperty(key)
+
+ def getProperty(key: String, defaultValue: String) =
+ underlying.getProperty(key, defaultValue)
+
+ def setProperty(key: String, value: String) =
+ underlying.setProperty(key, value)
+ }
+}
+
+object Wrappers extends Wrappers
diff --git a/src/library/scala/collection/convert/package.scala b/src/library/scala/collection/convert/package.scala
new file mode 100644
index 0000000000..2f8bca1e1f
--- /dev/null
+++ b/src/library/scala/collection/convert/package.scala
@@ -0,0 +1,18 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+
+package object convert {
+ val decorateAsJava = new DecorateAsJava { }
+ val decorateAsScala = new DecorateAsScala { }
+ val decorateAll = new DecorateAsJava with DecorateAsScala { }
+ val wrapAsJava = new WrapAsJava { }
+ val wrapAsScala = new WrapAsScala { }
+ val wrapAll = new WrapAsJava with WrapAsScala { }
+}
diff --git a/src/library/scala/collection/generic/GenericTraversableTemplate.scala b/src/library/scala/collection/generic/GenericTraversableTemplate.scala
index 7333074778..6586434924 100644
--- a/src/library/scala/collection/generic/GenericTraversableTemplate.scala
+++ b/src/library/scala/collection/generic/GenericTraversableTemplate.scala
@@ -119,22 +119,25 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew
* a $coll formed by the elements of these traversable
* collections.
*
- * The resulting collection's type will be guided by the
- * static type of $coll. For example:
- *
- * {{{
- * val xs = List(Set(1, 2, 3), Set(1, 2, 3))
- * // xs == List(1, 2, 3, 1, 2, 3)
- *
- * val ys = Set(List(1, 2, 3), List(3, 2, 1))
- * // ys == Set(1, 2, 3)
- * }}}
- *
* @tparam B the type of the elements of each traversable collection.
* @param asTraversable an implicit conversion which asserts that the element
* type of this $coll is a `GenTraversable`.
* @return a new $coll resulting from concatenating all element ${coll}s.
+ *
* @usecase def flatten[B]: $Coll[B]
+ *
+ * @inheritdoc
+ *
+ * The resulting collection's type will be guided by the
+ * static type of $coll. For example:
+ *
+ * {{{
+ * val xs = List(Set(1, 2, 3), Set(1, 2, 3))
+ * // xs == List(1, 2, 3, 1, 2, 3)
+ *
+ * val ys = Set(List(1, 2, 3), List(3, 2, 1))
+ * // ys == Set(1, 2, 3)
+ * }}}
*/
def flatten[B](implicit asTraversable: A => /*<:<!!!*/ GenTraversableOnce[B]): CC[B] = {
val b = genericBuilder[B]
diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala
index 381fcf3117..870c179b2d 100644
--- a/src/library/scala/collection/immutable/List.scala
+++ b/src/library/scala/collection/immutable/List.scala
@@ -93,8 +93,12 @@ sealed abstract class List[+A] extends AbstractSeq[A]
* @param x the element to prepend.
* @return a list which contains `x` as first element and
* which continues with this list.
- * @example `1 :: List(2, 3) = List(2, 3).::(1) = List(1, 2, 3)`
+ *
* @usecase def ::(x: A): List[A]
+ * @inheritdoc
+ *
+ * Example:
+ * {{{1 :: List(2, 3) = List(2, 3).::(1) = List(1, 2, 3)}}}
*/
def ::[B >: A] (x: B): List[B] =
new scala.collection.immutable.::(x, this)
@@ -103,8 +107,12 @@ sealed abstract class List[+A] extends AbstractSeq[A]
* @param prefix The list elements to prepend.
* @return a list resulting from the concatenation of the given
* list `prefix` and this list.
- * @example `List(1, 2) ::: List(3, 4) = List(3, 4).:::(List(1, 2)) = List(1, 2, 3, 4)`
+ *
* @usecase def :::(prefix: List[A]): List[A]
+ * @inheritdoc
+ *
+ * Example:
+ * {{{List(1, 2) ::: List(3, 4) = List(3, 4).:::(List(1, 2)) = List(1, 2, 3, 4)}}}
*/
def :::[B >: A](prefix: List[B]): List[B] =
if (isEmpty) prefix
@@ -117,7 +125,9 @@ sealed abstract class List[+A] extends AbstractSeq[A]
*
* @param prefix the prefix to reverse and then prepend
* @return the concatenation of the reversed prefix and the current list.
+ *
* @usecase def reverse_:::(prefix: List[A]): List[A]
+ * @inheritdoc
*/
def reverse_:::[B >: A](prefix: List[B]): List[B] = {
var these: List[B] = this
@@ -137,7 +147,9 @@ sealed abstract class List[+A] extends AbstractSeq[A]
* @tparam B the element type of the returned collection.
* @return a list resulting from applying the given function
* `f` to each element of this list and collecting the results.
+ *
* @usecase def mapConserve(f: A => A): List[A]
+ * @inheritdoc
*/
def mapConserve[B >: A <: AnyRef](f: A => B): List[B] = {
@tailrec
diff --git a/src/library/scala/collection/immutable/StringLike.scala b/src/library/scala/collection/immutable/StringLike.scala
index f9697565de..fc4e7bf0a8 100644
--- a/src/library/scala/collection/immutable/StringLike.scala
+++ b/src/library/scala/collection/immutable/StringLike.scala
@@ -207,9 +207,20 @@ self =>
/** You can follow a string with `.r`, turning it into a `Regex`. E.g.
*
- * """A\w*""".r is the regular expression for identifiers starting with `A`.
+ * `"""A\w*""".r` is the regular expression for identifiers starting with `A`.
*/
- def r: Regex = new Regex(toString)
+ def r: Regex = r()
+
+ /** You can follow a string with `.r(g1, ... , gn)`, turning it into a `Regex`,
+ * with group names g1 through gn.
+ *
+ * `"""(\d\d)-(\d\d)-(\d\d\d\d)""".r("month", "day", "year")` matches dates
+ * and provides its subcomponents through groups named "month", "day" and
+ * "year".
+ *
+ * @param groupNames The names of the groups in the pattern, in the order they appear.
+ */
+ def r(groupNames: String*): Regex = new Regex(toString, groupNames: _*)
def toBoolean: Boolean = parseBoolean(toString)
def toByte: Byte = java.lang.Byte.parseByte(toString)
diff --git a/src/library/scala/collection/mutable/FlatArray.scala b/src/library/scala/collection/mutable/FlatArray.scala
index a7f994bf74..3e43b66ecf 100644
--- a/src/library/scala/collection/mutable/FlatArray.scala
+++ b/src/library/scala/collection/mutable/FlatArray.scala
@@ -60,8 +60,15 @@ extends AbstractSeq[T]
*/
object FlatArray {
- def empty[Boxed, Unboxed](elems: Boxed*)
- (implicit boxings: BoxingConversions[Boxed, Unboxed], elemManifest: ClassManifest[Unboxed]): FlatArray[Boxed] = apply()
+ def ofDim[Boxed, Unboxed](size:Int)
+ (implicit boxings: BoxingConversions[Boxed, Unboxed],
+ manifest: ClassManifest[Unboxed]): FlatArray[Boxed] = {
+ val elems = Array.ofDim[Unboxed](size)
+ new FlatArray.Impl(elems, boxings, manifest)
+ }
+
+ def empty[Boxed, Unboxed](implicit boxings: BoxingConversions[Boxed, Unboxed],
+ elemManifest: ClassManifest[Unboxed]): FlatArray[Boxed] = apply()
def apply[Boxed, Unboxed](elems: Boxed*)
(implicit boxings: BoxingConversions[Boxed, Unboxed], elemManifest: ClassManifest[Unboxed]): FlatArray[Boxed] = {
diff --git a/src/library/scala/collection/mutable/WeakHashMap.scala b/src/library/scala/collection/mutable/WeakHashMap.scala
index 89d7c7a695..4e09755acf 100644
--- a/src/library/scala/collection/mutable/WeakHashMap.scala
+++ b/src/library/scala/collection/mutable/WeakHashMap.scala
@@ -6,14 +6,11 @@
** |/ **
\* */
-
-
package scala.collection
package mutable
-import JavaConversions._
import generic._
-
+import convert.Wrappers._
/** A hash map with references to entries which are weakly reachable. Entries are
* removed from this map when the key is no longer (strongly) referenced. This class wraps
diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala
index 7e0fa366ab..5551c04ce2 100644
--- a/src/library/scala/collection/parallel/ParIterableLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableLike.scala
@@ -668,8 +668,9 @@ self: ParIterableLike[T, Repr, Sequential] =>
* @return a collection containing the prefix scan of the elements in the original collection
*
* @usecase def scan(z: T)(op: (T, T) => T): $Coll[T]
+ * @inheritdoc
*
- * @return a new $coll containing the prefix scan of the elements in this $coll
+ * @return a new $coll containing the prefix scan of the elements in this $coll
*/
def scan[U >: T, That](z: U)(op: (U, U) => U)(implicit bf: CanBuildFrom[Repr, U, That]): That = if (bf(repr).isCombiner) {
if (tasksupport.parallelismLevel > 1) {
diff --git a/src/library/scala/collection/parallel/ParSeqLike.scala b/src/library/scala/collection/parallel/ParSeqLike.scala
index 9f28a286ca..b3c527da84 100644
--- a/src/library/scala/collection/parallel/ParSeqLike.scala
+++ b/src/library/scala/collection/parallel/ParSeqLike.scala
@@ -285,7 +285,6 @@ self =>
}
/** Computes the multiset intersection between this $coll and another sequence.
- * $mayNotTerminateInf
*
* @param that the sequence of elements to intersect with.
* @tparam B the element type of the returned $coll.
@@ -296,12 +295,17 @@ self =>
* If an element value `x` appears
* ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained
* in the result, but any following occurrences will be omitted.
+ *
* @usecase def intersect(that: Seq[T]): $Coll[T]
- * @return a new $coll which contains all elements of this $coll
- * which also appear in `that`.
- * If an element value `x` appears
- * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained
- * in the result, but any following occurrences will be omitted.
+ * @inheritdoc
+ *
+ * $mayNotTerminateInf
+ *
+ * @return a new $coll which contains all elements of this $coll
+ * which also appear in `that`.
+ * If an element value `x` appears
+ * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained
+ * in the result, but any following occurrences will be omitted.
*/
def intersect[U >: T](that: GenSeq[U]) = sequentially {
_ intersect that
diff --git a/src/library/scala/concurrent/Awaitable.scala b/src/library/scala/concurrent/Awaitable.scala
index c38e668f30..6c9995eb05 100644
--- a/src/library/scala/concurrent/Awaitable.scala
+++ b/src/library/scala/concurrent/Awaitable.scala
@@ -11,7 +11,7 @@ package scala.concurrent
import scala.annotation.implicitNotFound
-import scala.util.Duration
+import scala.concurrent.util.Duration
diff --git a/src/library/scala/concurrent/ConcurrentPackageObject.scala b/src/library/scala/concurrent/ConcurrentPackageObject.scala
index 7d005838d3..3471095051 100644
--- a/src/library/scala/concurrent/ConcurrentPackageObject.scala
+++ b/src/library/scala/concurrent/ConcurrentPackageObject.scala
@@ -12,7 +12,8 @@ package scala.concurrent
import java.util.concurrent.{ Executors, ExecutorService }
import scala.concurrent.forkjoin.ForkJoinPool
-import scala.util.{ Duration, Try, Success, Failure }
+import scala.util.{ Try, Success, Failure }
+import scala.concurrent.util.Duration
import ConcurrentPackageObject._
@@ -26,8 +27,8 @@ abstract class ConcurrentPackageObject {
new impl.ExecutionContextImpl(getExecutorService)
private[concurrent] def getExecutorService: AnyRef =
- if (util.Properties.isJavaAtLeast("1.6")) {
- val vendor = util.Properties.javaVmVendor
+ if (scala.util.Properties.isJavaAtLeast("1.6")) {
+ val vendor = scala.util.Properties.javaVmVendor
if ((vendor contains "Oracle") || (vendor contains "Sun") || (vendor contains "Apple")) new ForkJoinPool
else Executors.newCachedThreadPool()
} else Executors.newCachedThreadPool()
diff --git a/src/library/scala/concurrent/DelayedLazyVal.scala b/src/library/scala/concurrent/DelayedLazyVal.scala
index a17153bad5..96a66d83b6 100644
--- a/src/library/scala/concurrent/DelayedLazyVal.scala
+++ b/src/library/scala/concurrent/DelayedLazyVal.scala
@@ -40,9 +40,8 @@ class DelayedLazyVal[T](f: () => T, body: => Unit) {
def apply(): T = if (isDone) complete else f()
// TODO replace with scala.concurrent.future { ... }
- ops.future {
+ future {
body
_isDone = true
}
-
}
diff --git a/src/library/scala/concurrent/ExecutionContext.scala b/src/library/scala/concurrent/ExecutionContext.scala
index eb1b3355c0..c4a45f9fb5 100644
--- a/src/library/scala/concurrent/ExecutionContext.scala
+++ b/src/library/scala/concurrent/ExecutionContext.scala
@@ -12,7 +12,7 @@ package scala.concurrent
import java.util.concurrent.atomic.{ AtomicInteger }
import java.util.concurrent.{ Executors, Future => JFuture, Callable }
-import scala.util.Duration
+import scala.concurrent.util.Duration
import scala.util.{ Try, Success, Failure }
import scala.concurrent.forkjoin.{ ForkJoinPool, RecursiveTask => FJTask, RecursiveAction, ForkJoinWorkerThread }
import scala.collection.generic.CanBuildFrom
diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala
index eb54b61db0..1dc8e38355 100644
--- a/src/library/scala/concurrent/Future.scala
+++ b/src/library/scala/concurrent/Future.scala
@@ -17,7 +17,8 @@ import java.util.{ LinkedList => JLinkedList }
import java.{ lang => jl }
import java.util.concurrent.atomic.{ AtomicReferenceFieldUpdater, AtomicInteger, AtomicBoolean }
-import scala.util.{ Timeout, Duration, Try, Success, Failure }
+import scala.util.{ Try, Success, Failure }
+import scala.concurrent.util.Duration
import scala.Option
import scala.annotation.tailrec
diff --git a/src/library/scala/concurrent/Scheduler.scala b/src/library/scala/concurrent/Scheduler.scala
index 39d798e6b4..e2eb4d69fe 100644
--- a/src/library/scala/concurrent/Scheduler.scala
+++ b/src/library/scala/concurrent/Scheduler.scala
@@ -8,7 +8,7 @@
package scala.concurrent
-import scala.util.Duration
+import scala.concurrent.util.Duration
/** A service for scheduling tasks and thunks for one-time, or periodic execution.
*/
diff --git a/src/library/scala/concurrent/default/SchedulerImpl.scala.disabled b/src/library/scala/concurrent/default/SchedulerImpl.scala.disabled
index 745d2d1a15..241efa8857 100644
--- a/src/library/scala/concurrent/default/SchedulerImpl.scala.disabled
+++ b/src/library/scala/concurrent/default/SchedulerImpl.scala.disabled
@@ -9,7 +9,7 @@
package scala.concurrent
package default
-import scala.util.Duration
+import scala.concurrent.util.Duration
private[concurrent] final class SchedulerImpl extends Scheduler {
private val timer =
diff --git a/src/library/scala/concurrent/default/TaskImpl.scala.disabled b/src/library/scala/concurrent/default/TaskImpl.scala.disabled
index 94e54cb372..50753a7154 100644
--- a/src/library/scala/concurrent/default/TaskImpl.scala.disabled
+++ b/src/library/scala/concurrent/default/TaskImpl.scala.disabled
@@ -7,7 +7,7 @@ import java.util.concurrent.atomic.AtomicReferenceFieldUpdater
import scala.concurrent.forkjoin.{ ForkJoinPool, RecursiveAction, ForkJoinWorkerThread }
import scala.util.Try
import scala.util
-import scala.util.Duration
+import scala.concurrent.util.Duration
import scala.annotation.tailrec
diff --git a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
index 6201de14d7..8ac745fd25 100644
--- a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
+++ b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
@@ -13,7 +13,8 @@ package scala.concurrent.impl
import java.util.concurrent.{Callable, ExecutorService}
import scala.concurrent.forkjoin._
import scala.concurrent.{ExecutionContext, resolver, Awaitable, body2awaitable}
-import scala.util.{ Duration, Try, Success, Failure }
+import scala.util.{ Try, Success, Failure }
+import scala.concurrent.util.{ Duration }
import scala.collection.mutable.Stack
diff --git a/src/library/scala/concurrent/impl/Promise.scala b/src/library/scala/concurrent/impl/Promise.scala
index 5238bc51db..4a983b5001 100644
--- a/src/library/scala/concurrent/impl/Promise.scala
+++ b/src/library/scala/concurrent/impl/Promise.scala
@@ -14,7 +14,7 @@ import java.util.concurrent.TimeUnit.{ NANOSECONDS, MILLISECONDS }
import java.util.concurrent.atomic.AtomicReferenceFieldUpdater
import scala.concurrent.{Awaitable, ExecutionContext, resolve, resolver, blocking, CanAwait, TimeoutException}
//import scala.util.continuations._
-import scala.util.Duration
+import scala.concurrent.util.Duration
import scala.util.Try
import scala.util
import scala.annotation.tailrec
diff --git a/src/library/scala/concurrent/package.scala b/src/library/scala/concurrent/package.scala
index 7cc48c09b2..204b3f2673 100644
--- a/src/library/scala/concurrent/package.scala
+++ b/src/library/scala/concurrent/package.scala
@@ -8,7 +8,8 @@
package scala
-import scala.util.{ Duration, Try, Success, Failure }
+import scala.util.{ Try, Success, Failure }
+import scala.concurrent.util.Duration
/** This package object contains primitives for concurrent and parallel programming.
*/
diff --git a/src/library/scala/concurrent/util/Duration.scala b/src/library/scala/concurrent/util/Duration.scala
new file mode 100644
index 0000000000..33d034da76
--- /dev/null
+++ b/src/library/scala/concurrent/util/Duration.scala
@@ -0,0 +1,578 @@
+/**
+ * Copyright (C) 2009-2012 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.concurrent.util
+
+import java.util.concurrent.TimeUnit
+import TimeUnit._
+import java.lang.{ Double ⇒ JDouble }
+
+object DurationImplicits {
+ trait Classifier[C] {
+ type R
+ def convert(d: FiniteDuration): R
+ }
+
+ object span
+ implicit object spanConvert extends Classifier[span.type] {
+ type R = FiniteDuration
+ def convert(d: FiniteDuration) = d
+ }
+
+ object fromNow
+ implicit object fromNowConvert extends Classifier[fromNow.type] {
+ type R = Deadline
+ def convert(d: FiniteDuration) = Deadline.now + d
+ }
+
+ implicit def intToDurationInt(n: Int) = new DurationInt(n)
+ implicit def longToDurationLong(n: Long) = new DurationLong(n)
+ implicit def doubleToDurationDouble(d: Double) = new DurationDouble(d)
+
+ implicit def pairIntToDuration(p: (Int, TimeUnit)) = Duration(p._1, p._2)
+ implicit def pairLongToDuration(p: (Long, TimeUnit)) = Duration(p._1, p._2)
+ implicit def durationToPair(d: Duration) = (d.length, d.unit)
+
+ /*
+ * Avoid reflection based invocation by using non-duck type
+ */
+ class IntMult(i: Int) {
+ def *(d: Duration) = d * i
+ }
+ implicit def intMult(i: Int) = new IntMult(i)
+
+ class LongMult(l: Long) {
+ def *(d: Duration) = d * l
+ }
+ implicit def longMult(l: Long) = new LongMult(l)
+
+ class DoubleMult(f: Double) {
+ def *(d: Duration) = d * f
+ }
+ implicit def doubleMult(f: Double) = new DoubleMult(f)
+}
+
+case class Deadline private (time: Duration) {
+ def +(other: Duration): Deadline = copy(time = time + other)
+ def -(other: Duration): Deadline = copy(time = time - other)
+ def -(other: Deadline): Duration = time - other.time
+ def timeLeft: Duration = this - Deadline.now
+ def hasTimeLeft(): Boolean = !isOverdue()
+ def isOverdue(): Boolean = (time.toNanos - System.nanoTime()) < 0
+}
+
+object Deadline {
+ def now: Deadline = Deadline(Duration(System.nanoTime, NANOSECONDS))
+}
+
+object Duration {
+ implicit def timeLeft(implicit d: Deadline): Duration = d.timeLeft
+
+ def apply(length: Long, unit: TimeUnit): FiniteDuration = new FiniteDuration(length, unit)
+ def apply(length: Double, unit: TimeUnit): FiniteDuration = fromNanos(unit.toNanos(1) * length)
+ def apply(length: Long, unit: String): FiniteDuration = {
+ val (mult, timeUnit) = Duration.timeUnit(unit)
+ new FiniteDuration(length * mult, timeUnit)
+ }
+
+ /**
+ * Construct a Duration by parsing a String. In case of a format error, a
+ * RuntimeException is thrown. See `unapply(String)` for more information.
+ */
+ def apply(s: String): Duration = unapply(s) getOrElse sys.error("format error " + s)
+
+ private val RE = ("""^\s*([\+|-]?\d+(?:\.\d+)?)\s*""" + // length part
+ "(?:" + // units are distinguished in separate match groups
+ "(d|day|days)|" +
+ "(h|hour|hours)|" +
+ "(min|minute|minutes)|" +
+ "(s|sec|second|seconds)|" +
+ "(ms|milli|millis|millisecond|milliseconds)|" +
+ "(µs|micro|micros|microsecond|microseconds)|" +
+ "(ns|nano|nanos|nanosecond|nanoseconds)" +
+ """)\s*$""").r // close the non-capturing group
+ private val REinf = """^\s*(?:\+|Plus)?Inf\s*$""".r
+ private val REminf = """^\s*(?:-|Minus)Inf\s*""".r
+
+ /**
+ * Deconstruct a Duration into `Long` length and [[java.util.concurrent.TimeUnit]] if it is a
+ * [[scala.util.concurrent.FiniteDuration]].
+ *
+ * @param d Duration to be deconstructed.
+ */
+ def unapply(d: Duration): Option[(Long, TimeUnit)] = {
+ if (d.finite_?) {
+ Some((d.length, d.unit))
+ } else {
+ None
+ }
+ }
+
+ /**
+ * Parse String, return None if no match. Format is `"<length><unit>"`, where
+ * whitespace is allowed before, between and after the parts. Infinities are
+ * designated by `"Inf"`, `"PlusInf"`, `"+Inf"` and `"-Inf"` or `"MinusInf"`.
+ */
+ def unapply(s: String): Option[Duration] = s match {
+ case RE(length, d, h, m, s, ms, mus, ns) ⇒
+ if (d ne null)
+ Some(Duration(JDouble.parseDouble(length) * 86400, SECONDS))
+ else if (h ne null)
+ Some(Duration(JDouble.parseDouble(length) * 3600, SECONDS))
+ else if (m ne null)
+ Some(Duration(JDouble.parseDouble(length) * 60, SECONDS))
+ else if (s ne null)
+ Some(Duration(JDouble.parseDouble(length), SECONDS))
+ else if (ms ne null)
+ Some(Duration(JDouble.parseDouble(length), MILLISECONDS))
+ else if (mus ne null)
+ Some(Duration(JDouble.parseDouble(length), MICROSECONDS))
+ else if (ns ne null)
+ Some(Duration(JDouble.parseDouble(length), NANOSECONDS))
+ else
+ sys.error("made some error in regex (should not be possible)")
+ case REinf() ⇒ Some(Inf)
+ case REminf() ⇒ Some(MinusInf)
+ case _ ⇒ None
+ }
+
+ def fromNanos(nanos: Double): FiniteDuration =
+ fromNanos((nanos + 0.5).asInstanceOf[Long])
+
+ def fromNanos(nanos: Long): FiniteDuration = {
+ if (nanos % 86400000000000L == 0) {
+ Duration(nanos / 1000000000L, SECONDS)
+ } else if (nanos % 1000000000L == 0) {
+ Duration(nanos / 1000000000L, SECONDS)
+ } else if (nanos % 1000000000L == 0) {
+ Duration(nanos / 1000000000L, SECONDS)
+ } else if (nanos % 1000000000L == 0) {
+ Duration(nanos / 1000000000L, SECONDS)
+ } else if (nanos % 1000000L == 0) {
+ Duration(nanos / 1000000L, MILLISECONDS)
+ } else if (nanos % 1000L == 0) {
+ Duration(nanos / 1000L, MICROSECONDS)
+ } else {
+ Duration(nanos, NANOSECONDS)
+ }
+ }
+
+ /**
+ * Parse TimeUnit from string representation.
+ */
+ protected[util] def timeUnit(unit: String): (Long, TimeUnit) = unit.toLowerCase match {
+ case "d" | "day" | "days" ⇒ (86400, SECONDS)
+ case "h" | "hour" | "hours" ⇒ (3600, SECONDS)
+ case "min" | "minute" | "minutes" ⇒ (60, SECONDS)
+ case "s" | "sec" | "second" | "seconds" ⇒ (1, SECONDS)
+ case "ms" | "milli" | "millis" | "millisecond" | "milliseconds" ⇒ (1, MILLISECONDS)
+ case "µs" | "micro" | "micros" | "microsecond" | "microseconds" ⇒ (1, MICROSECONDS)
+ case "ns" | "nano" | "nanos" | "nanosecond" | "nanoseconds" ⇒ (1, NANOSECONDS)
+ }
+
+ val Zero: FiniteDuration = new FiniteDuration(0, NANOSECONDS)
+ val Undefined: Duration = new Duration with Infinite {
+ override def toString = "Duration.Undefined"
+ override def equals(other: Any) = other.asInstanceOf[AnyRef] eq this
+ override def +(other: Duration): Duration = throw new IllegalArgumentException("cannot add Undefined duration")
+ override def -(other: Duration): Duration = throw new IllegalArgumentException("cannot subtract Undefined duration")
+ override def *(factor: Double): Duration = throw new IllegalArgumentException("cannot multiply Undefined duration")
+ override def /(factor: Double): Duration = throw new IllegalArgumentException("cannot divide Undefined duration")
+ override def /(other: Duration): Double = throw new IllegalArgumentException("cannot divide Undefined duration")
+ def compare(other: Duration) = throw new IllegalArgumentException("cannot compare Undefined duration")
+ def unary_- : Duration = throw new IllegalArgumentException("cannot negate Undefined duration")
+ }
+
+ trait Infinite {
+ this: Duration ⇒
+
+ def +(other: Duration): Duration =
+ other match {
+ case _: this.type ⇒ this
+ case _: Infinite ⇒ throw new IllegalArgumentException("illegal addition of infinities")
+ case _ ⇒ this
+ }
+ def -(other: Duration): Duration =
+ other match {
+ case _: this.type ⇒ throw new IllegalArgumentException("illegal subtraction of infinities")
+ case _ ⇒ this
+ }
+ def *(factor: Double): Duration = this
+ def /(factor: Double): Duration = this
+ def /(other: Duration): Double =
+ other match {
+ case _: Infinite ⇒ throw new IllegalArgumentException("illegal division of infinities")
+ // maybe questionable but pragmatic: Inf / 0 => Inf
+ case x ⇒ Double.PositiveInfinity * (if ((this > Zero) ^ (other >= Zero)) -1 else 1)
+ }
+
+ def finite_? = false
+
+ def length: Long = throw new IllegalArgumentException("length not allowed on infinite Durations")
+ def unit: TimeUnit = throw new IllegalArgumentException("unit not allowed on infinite Durations")
+ def toNanos: Long = throw new IllegalArgumentException("toNanos not allowed on infinite Durations")
+ def toMicros: Long = throw new IllegalArgumentException("toMicros not allowed on infinite Durations")
+ def toMillis: Long = throw new IllegalArgumentException("toMillis not allowed on infinite Durations")
+ def toSeconds: Long = throw new IllegalArgumentException("toSeconds not allowed on infinite Durations")
+ def toMinutes: Long = throw new IllegalArgumentException("toMinutes not allowed on infinite Durations")
+ def toHours: Long = throw new IllegalArgumentException("toHours not allowed on infinite Durations")
+ def toDays: Long = throw new IllegalArgumentException("toDays not allowed on infinite Durations")
+ def toUnit(unit: TimeUnit): Double = throw new IllegalArgumentException("toUnit not allowed on infinite Durations")
+
+ }
+
+ /**
+ * Infinite duration: greater than any other and not equal to any other,
+ * including itself.
+ */
+ val Inf: Duration = new Duration with Infinite {
+ override def toString = "Duration.Inf"
+ def compare(other: Duration) = if (other eq this) 0 else 1
+ def unary_- : Duration = MinusInf
+ }
+
+ /**
+ * Infinite negative duration: lesser than any other and not equal to any other,
+ * including itself.
+ */
+ val MinusInf: Duration = new Duration with Infinite {
+ override def toString = "Duration.MinusInf"
+ def compare(other: Duration) = if (other eq this) 0 else -1
+ def unary_- : Duration = Inf
+ }
+
+ // Java Factories
+ def create(length: Long, unit: TimeUnit): FiniteDuration = apply(length, unit)
+ def create(length: Double, unit: TimeUnit): FiniteDuration = apply(length, unit)
+ def create(length: Long, unit: String): FiniteDuration = apply(length, unit)
+ def parse(s: String): Duration = unapply(s).get
+
+ implicit object DurationIsOrdered extends Ordering[Duration] {
+ def compare(a: Duration, b: Duration) = a compare b
+ }
+}
+
+/**
+ * Utility for working with java.util.concurrent.TimeUnit durations.
+ *
+ * <p/>
+ * Examples:
+ * <pre>
+ * import scala.concurrent.util.Duration
+ * import java.util.concurrent.TimeUnit
+ *
+ * val duration = Duration(100, MILLISECONDS)
+ * val duration = Duration(100, "millis")
+ *
+ * duration.toNanos
+ * duration < 1.second
+ * duration <= Duration.Inf
+ * </pre>
+ *
+ * <p/>
+ * Implicits are also provided for Int, Long and Double. Example usage:
+ * <pre>
+ * import scala.concurrent.util.Duration._
+ *
+ * val duration = 100 millis
+ * </pre>
+ *
+ * Extractors, parsing and arithmetic are also included:
+ * <pre>
+ * val d = Duration("1.2 µs")
+ * val Duration(length, unit) = 5 millis
+ * val d2 = d * 2.5
+ * val d3 = d2 + 1.millisecond
+ * </pre>
+ */
+abstract class Duration extends Serializable with Ordered[Duration] {
+ def length: Long
+ def unit: TimeUnit
+ def toNanos: Long
+ def toMicros: Long
+ def toMillis: Long
+ def toSeconds: Long
+ def toMinutes: Long
+ def toHours: Long
+ def toDays: Long
+ def toUnit(unit: TimeUnit): Double
+
+ def +(other: Duration): Duration
+ def -(other: Duration): Duration
+ def *(factor: Double): Duration
+ def /(factor: Double): Duration
+ def /(other: Duration): Double
+ def unary_- : Duration
+ def finite_? : Boolean
+ def min(other: Duration): Duration = if (this < other) this else other
+ def max(other: Duration): Duration = if (this > other) this else other
+ def fromNow: Deadline = Deadline.now + this
+
+ // Java API
+ def lt(other: Duration) = this < other
+ def lteq(other: Duration) = this <= other
+ def gt(other: Duration) = this > other
+ def gteq(other: Duration) = this >= other
+ def plus(other: Duration) = this + other
+ def minus(other: Duration) = this - other
+ def mul(factor: Double) = this * factor
+ def div(factor: Double) = this / factor
+ def div(other: Duration) = this / other
+ def neg() = -this
+ def isFinite() = finite_?
+}
+
+object FiniteDuration {
+ implicit object FiniteDurationIsOrdered extends Ordering[FiniteDuration] {
+ def compare(a: FiniteDuration, b: FiniteDuration) = a compare b
+ }
+
+ def apply(length: Long, unit: TimeUnit) =
+ new FiniteDuration(length, unit)
+
+ def apply(length: Long, unit: String) = {
+ val (mult, timeUnit) = Duration.timeUnit(unit)
+ new FiniteDuration(length * mult, timeUnit)
+ }
+
+}
+
+class FiniteDuration(val length: Long, val unit: TimeUnit) extends Duration {
+ import Duration._
+
+ def toNanos = unit.toNanos(length)
+ def toMicros = unit.toMicros(length)
+ def toMillis = unit.toMillis(length)
+ def toSeconds = unit.toSeconds(length)
+ def toMinutes = unit.toMinutes(length)
+ def toHours = unit.toHours(length)
+ def toDays = unit.toDays(length)
+ def toUnit(u: TimeUnit) = long2double(toNanos) / NANOSECONDS.convert(1, u)
+
+ override def toString = this match {
+ case Duration(1, SECONDS) ⇒ "1 second"
+ case Duration(x, SECONDS) ⇒ x + " seconds"
+ case Duration(1, MILLISECONDS) ⇒ "1 millisecond"
+ case Duration(x, MILLISECONDS) ⇒ x + " milliseconds"
+ case Duration(1, MICROSECONDS) ⇒ "1 microsecond"
+ case Duration(x, MICROSECONDS) ⇒ x + " microseconds"
+ case Duration(1, NANOSECONDS) ⇒ "1 nanosecond"
+ case Duration(x, NANOSECONDS) ⇒ x + " nanoseconds"
+ }
+
+ def compare(other: Duration) =
+ if (other.finite_?) {
+ val me = toNanos
+ val o = other.toNanos
+ if (me > o) 1 else if (me < o) -1 else 0
+ } else -other.compare(this)
+
+ def +(other: Duration) = {
+ if (!other.finite_?) {
+ other
+ } else {
+ val nanos = toNanos + other.asInstanceOf[FiniteDuration].toNanos
+ fromNanos(nanos)
+ }
+ }
+
+ def -(other: Duration) = {
+ if (!other.finite_?) {
+ other
+ } else {
+ val nanos = toNanos - other.asInstanceOf[FiniteDuration].toNanos
+ fromNanos(nanos)
+ }
+ }
+
+ def *(factor: Double) = fromNanos(long2double(toNanos) * factor)
+
+ def /(factor: Double) = fromNanos(long2double(toNanos) / factor)
+
+ def /(other: Duration) = if (other.finite_?) long2double(toNanos) / other.toNanos else 0
+
+ def unary_- = Duration(-length, unit)
+
+ def finite_? = true
+
+ override def equals(other: Any) =
+ other.isInstanceOf[FiniteDuration] &&
+ toNanos == other.asInstanceOf[FiniteDuration].toNanos
+
+ override def hashCode = toNanos.asInstanceOf[Int]
+}
+
+class DurationInt(n: Int) {
+ import DurationImplicits.Classifier
+
+ def nanoseconds = Duration(n, NANOSECONDS)
+ def nanos = Duration(n, NANOSECONDS)
+ def nanosecond = Duration(n, NANOSECONDS)
+ def nano = Duration(n, NANOSECONDS)
+
+ def microseconds = Duration(n, MICROSECONDS)
+ def micros = Duration(n, MICROSECONDS)
+ def microsecond = Duration(n, MICROSECONDS)
+ def micro = Duration(n, MICROSECONDS)
+
+ def milliseconds = Duration(n, MILLISECONDS)
+ def millis = Duration(n, MILLISECONDS)
+ def millisecond = Duration(n, MILLISECONDS)
+ def milli = Duration(n, MILLISECONDS)
+
+ def seconds = Duration(n, SECONDS)
+ def second = Duration(n, SECONDS)
+
+ def minutes = Duration(n * 60, SECONDS)
+ def minute = Duration(n * 60, SECONDS)
+
+ def hours = Duration(n * 3600, SECONDS)
+ def hour = Duration(n * 3600, SECONDS)
+
+ def days = Duration(n * 86400, SECONDS)
+ def day = Duration(n * 86400, SECONDS)
+
+ def nanoseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS))
+ def nanos[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS))
+ def nanosecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS))
+ def nano[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS))
+
+ def microseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MICROSECONDS))
+ def micros[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MICROSECONDS))
+ def microsecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MICROSECONDS))
+ def micro[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MICROSECONDS))
+
+ def milliseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MILLISECONDS))
+ def millis[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MILLISECONDS))
+ def millisecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MILLISECONDS))
+ def milli[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MILLISECONDS))
+
+ def seconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, SECONDS))
+ def second[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, SECONDS))
+
+ def minutes[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 60, SECONDS))
+ def minute[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 60, SECONDS))
+
+ def hours[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 3600, SECONDS))
+ def hour[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 3600, SECONDS))
+
+ def days[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 86400, SECONDS))
+ def day[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 86400, SECONDS))
+}
+
+class DurationLong(n: Long) {
+ import DurationImplicits.Classifier
+
+ def nanoseconds = Duration(n, NANOSECONDS)
+ def nanos = Duration(n, NANOSECONDS)
+ def nanosecond = Duration(n, NANOSECONDS)
+ def nano = Duration(n, NANOSECONDS)
+
+ def microseconds = Duration(n, MICROSECONDS)
+ def micros = Duration(n, MICROSECONDS)
+ def microsecond = Duration(n, MICROSECONDS)
+ def micro = Duration(n, MICROSECONDS)
+
+ def milliseconds = Duration(n, MILLISECONDS)
+ def millis = Duration(n, MILLISECONDS)
+ def millisecond = Duration(n, MILLISECONDS)
+ def milli = Duration(n, MILLISECONDS)
+
+ def seconds = Duration(n, SECONDS)
+ def second = Duration(n, SECONDS)
+
+ def minutes = Duration(n * 60, SECONDS)
+ def minute = Duration(n * 60, SECONDS)
+
+ def hours = Duration(n * 3600, SECONDS)
+ def hour = Duration(n * 3600, SECONDS)
+
+ def days = Duration(n * 86400, SECONDS)
+ def day = Duration(n * 86400, SECONDS)
+
+ def nanoseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS))
+ def nanos[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS))
+ def nanosecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS))
+ def nano[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS))
+
+ def microseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MICROSECONDS))
+ def micros[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MICROSECONDS))
+ def microsecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MICROSECONDS))
+ def micro[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MICROSECONDS))
+
+ def milliseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MILLISECONDS))
+ def millis[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MILLISECONDS))
+ def millisecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MILLISECONDS))
+ def milli[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MILLISECONDS))
+
+ def seconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, SECONDS))
+ def second[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, SECONDS))
+
+ def minutes[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 60, SECONDS))
+ def minute[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 60, SECONDS))
+
+ def hours[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 3600, SECONDS))
+ def hour[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 3600, SECONDS))
+
+ def days[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 86400, SECONDS))
+ def day[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 86400, SECONDS))
+}
+
+class DurationDouble(d: Double) {
+ import DurationImplicits.Classifier
+
+ def nanoseconds = Duration(d, NANOSECONDS)
+ def nanos = Duration(d, NANOSECONDS)
+ def nanosecond = Duration(d, NANOSECONDS)
+ def nano = Duration(d, NANOSECONDS)
+
+ def microseconds = Duration(d, MICROSECONDS)
+ def micros = Duration(d, MICROSECONDS)
+ def microsecond = Duration(d, MICROSECONDS)
+ def micro = Duration(d, MICROSECONDS)
+
+ def milliseconds = Duration(d, MILLISECONDS)
+ def millis = Duration(d, MILLISECONDS)
+ def millisecond = Duration(d, MILLISECONDS)
+ def milli = Duration(d, MILLISECONDS)
+
+ def seconds = Duration(d, SECONDS)
+ def second = Duration(d, SECONDS)
+
+ def minutes = Duration(d * 60, SECONDS)
+ def minute = Duration(d * 60, SECONDS)
+
+ def hours = Duration(d * 3600, SECONDS)
+ def hour = Duration(d * 3600, SECONDS)
+
+ def days = Duration(d * 86400, SECONDS)
+ def day = Duration(d * 86400, SECONDS)
+
+ def nanoseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, NANOSECONDS))
+ def nanos[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, NANOSECONDS))
+ def nanosecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, NANOSECONDS))
+ def nano[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, NANOSECONDS))
+
+ def microseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MICROSECONDS))
+ def micros[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MICROSECONDS))
+ def microsecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MICROSECONDS))
+ def micro[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MICROSECONDS))
+
+ def milliseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MILLISECONDS))
+ def millis[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MILLISECONDS))
+ def millisecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MILLISECONDS))
+ def milli[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MILLISECONDS))
+
+ def seconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, SECONDS))
+ def second[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, SECONDS))
+
+ def minutes[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d * 60, SECONDS))
+ def minute[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d * 60, SECONDS))
+
+ def hours[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d * 3600, SECONDS))
+ def hour[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d * 3600, SECONDS))
+
+ def days[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d * 86400, SECONDS))
+ def day[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d * 86400, SECONDS))
+}
diff --git a/src/library/scala/reflect/api/Positions.scala b/src/library/scala/reflect/api/Positions.scala
index 181e7c1525..4c8c33bec8 100644
--- a/src/library/scala/reflect/api/Positions.scala
+++ b/src/library/scala/reflect/api/Positions.scala
@@ -1,9 +1,21 @@
package scala.reflect
package api
-trait Positions {
+trait Positions { self: Universe =>
+ /** TreeAnnotation is a generalisation of Position.
+ *
+ * TreeAnnotation cannot be an upperbound of Position since the corresponding classes
+ * must live outside of the universe for backwards compatibility (see scala.tools.nsc.util.Position).
+ * Thus, represent subtyping as coercions.
+ *
+ * Typically, positionToAnnotation is the identity, and annotationToPosition returns annot.pos
+ */
+ type TreeAnnotation // <: { def pos: Position }
+ val NoTreeAnnotation: TreeAnnotation
+ implicit def positionToAnnotation(pos: Position): TreeAnnotation // = pos
+ def annotationToPosition(annot: TreeAnnotation): Position // = annot.pos
+ def _checkSetAnnotation(tree: Tree, annot: TreeAnnotation): Unit = () // check that annot may overwrite tree.annot
- type Position
+ type Position // <: TreeAnnotation, but not practical to enforce this (would require moving Position, SourceFile, Reporter,... into the universe)
val NoPosition: Position
-
} \ No newline at end of file
diff --git a/src/library/scala/reflect/api/TreePrinters.scala b/src/library/scala/reflect/api/TreePrinters.scala
index 21b55e9c0e..43865915d3 100644
--- a/src/library/scala/reflect/api/TreePrinters.scala
+++ b/src/library/scala/reflect/api/TreePrinters.scala
@@ -41,7 +41,7 @@ trait TreePrinters { self: Universe =>
else if (tree.original != null)
print(".setOriginal(", tree.original, ")")
case tree: Tree =>
- print(tree.productPrefix+"(")
+ print(tree.printingPrefix+"(")
val it = tree.productIterator
while (it.hasNext) {
it.next() match {
diff --git a/src/library/scala/reflect/api/Trees.scala b/src/library/scala/reflect/api/Trees.scala
index a355207ff0..4ee13adf52 100644
--- a/src/library/scala/reflect/api/Trees.scala
+++ b/src/library/scala/reflect/api/Trees.scala
@@ -74,11 +74,24 @@ trait Trees { self: Universe =>
val id = nodeCount
nodeCount += 1
- private[this] var rawpos: Position = NoPosition
+ /** Prefix under which to print this tree type. Defaults to product
+ * prefix (e.g. DefTree) but because that is used in reification
+ * it cannot be altered without breaking reflection.
+ */
+ def printingPrefix = productPrefix
+
+ def pos: Position = annotationToPosition(rawannot)
+ def pos_=(pos: Position): Unit = annotation = pos
+ def setPos(newpos: Position): this.type = { pos = newpos; this }
+
+ private[this] var rawannot: TreeAnnotation = NoTreeAnnotation
+ def annotation: TreeAnnotation = rawannot
+ def annotation_=(annot: TreeAnnotation): Unit = {
+ _checkSetAnnotation(this, annot)
+ rawannot = annot
+ }
- def pos = rawpos
- def pos_=(pos: Position) = rawpos = pos
- def setPos(pos: Position): this.type = { rawpos = pos; this }
+ def setAnnotation(annot: TreeAnnotation): this.type = { annotation = annot; this }
private[this] var rawtpe: Type = _
@@ -159,11 +172,12 @@ trait Trees { self: Universe =>
def foreach(f: Tree => Unit) { new ForeachTreeTraverser(f).traverse(this) }
/** Find all subtrees matching predicate `p` */
- def filter(f: Tree => Boolean): List[Tree] = {
+ def withFilter(f: Tree => Boolean): List[Tree] = {
val ft = new FilterTreeTraverser(f)
ft.traverse(this)
ft.hits.toList
}
+ def filter(f: Tree => Boolean): List[Tree] = withFilter(f)
/** Returns optionally first tree (in a preorder traversal) which satisfies predicate `p`,
* or None if none exists.
@@ -216,7 +230,7 @@ trait Trees { self: Universe =>
duplicateTree(this).asInstanceOf[this.type]
private[scala] def copyAttrs(tree: Tree): this.type = {
- pos = tree.pos
+ annotation = tree.annotation
tpe = tree.tpe
if (hasSymbol) symbol = tree.symbol
this
@@ -249,6 +263,7 @@ trait Trees { self: Universe =>
* are in DefTrees.
*/
trait RefTree extends SymTree {
+ def qualifier: Tree // empty for Idents
def name: Name
}
@@ -489,16 +504,14 @@ trait Trees { self: Universe =>
/** Factory method for object creation `new tpt(args_1)...(args_n)`
* A `New(t, as)` is expanded to: `(new t).<init>(as)`
*/
- def New(tpt: Tree, argss: List[List[Tree]]): Tree = {
- // todo. we need to expose names in scala.reflect.api
- val superRef: Tree = Select(New(tpt), nme.CONSTRUCTOR)
- if (argss.isEmpty) Apply(superRef, Nil)
- else (superRef /: argss) (Apply)
+ def New(tpt: Tree, argss: List[List[Tree]]): Tree = argss match {
+ case Nil => new ApplyConstructor(tpt, Nil)
+ case xs :: rest => rest.foldLeft(new ApplyConstructor(tpt, xs): Tree)(Apply)
}
/** 0-1 argument list new, based on a type.
*/
def New(tpe: Type, args: Tree*): Tree =
- New(TypeTree(tpe), List(args.toList))
+ new ApplyConstructor(TypeTree(tpe), args.toList)
/** Type annotation, eliminated by explicit outer */
case class Typed(expr: Tree, tpt: Tree)
@@ -537,6 +550,10 @@ trait Trees { self: Universe =>
class ApplyImplicitView(fun: Tree, args: List[Tree]) extends Apply(fun, args)
+ class ApplyConstructor(tpt: Tree, args: List[Tree]) extends Apply(Select(New(tpt), nme.CONSTRUCTOR), args) {
+ override def printingPrefix = "ApplyConstructor"
+ }
+
/** Dynamic value application.
* In a dynamic application q.f(as)
* - q is stored in qual
@@ -575,7 +592,9 @@ trait Trees { self: Universe =>
Select(qualifier, sym.name) setSymbol sym
/** Identifier <name> */
- case class Ident(name: Name) extends RefTree
+ case class Ident(name: Name) extends RefTree {
+ def qualifier: Tree = EmptyTree
+ }
def Ident(name: String): Ident =
Ident(newTermName(name))
diff --git a/src/library/scala/testing/Show.scala b/src/library/scala/testing/Show.scala
index b0d094e466..7570bf705c 100644
--- a/src/library/scala/testing/Show.scala
+++ b/src/library/scala/testing/Show.scala
@@ -11,7 +11,7 @@
package scala.testing
/** Classes inheriting trait `Show` can test their member methods using the
- * notattion `meth(arg,,1,,, ..., arg,,n,,)`, where `meth` is the name of
+ * notation `meth(arg,,1,,, ..., arg,,n,,)`, where `meth` is the name of
* the method and `arg,,1,,,...,arg,,n,,` are the arguments.
*
* The only difference to a normal method call is the leading quote
diff --git a/src/library/scala/util/Duration.scala b/src/library/scala/util/Duration.scala
deleted file mode 100644
index 4c118f8b3b..0000000000
--- a/src/library/scala/util/Duration.scala
+++ /dev/null
@@ -1,485 +0,0 @@
-/**
- * Copyright (C) 2009-2011 Typesafe Inc. <http://www.typesafe.com>
- */
-
-package scala.util
-
-import java.util.concurrent.TimeUnit
-import TimeUnit._
-import java.lang.{ Long ⇒ JLong, Double ⇒ JDouble }
-//import akka.actor.ActorSystem (commented methods)
-
-class TimerException(message: String) extends RuntimeException(message)
-
-/**
- * Simple timer class.
- * Usage:
- * <pre>
- * import akka.util.duration._
- * import akka.util.Timer
- *
- * val timer = Timer(30.seconds)
- * while (timer.isTicking) { ... }
- * </pre>
- */
-case class Timer(duration: Duration, throwExceptionOnTimeout: Boolean = false) {
- val startTimeInMillis = System.currentTimeMillis
- val timeoutInMillis = duration.toMillis
-
- /**
- * Returns true while the timer is ticking. After that it either throws and exception or
- * returns false. Depending on if the 'throwExceptionOnTimeout' argument is true or false.
- */
- def isTicking: Boolean = {
- if (!(timeoutInMillis > (System.currentTimeMillis - startTimeInMillis))) {
- if (throwExceptionOnTimeout) throw new TimerException("Time out after " + duration)
- else false
- } else true
- }
-}
-
-object Duration {
- def apply(length: Long, unit: TimeUnit): Duration = new FiniteDuration(length, unit)
- def apply(length: Double, unit: TimeUnit): Duration = fromNanos(unit.toNanos(1) * length)
- def apply(length: Long, unit: String): Duration = new FiniteDuration(length, timeUnit(unit))
-
- def fromNanos(nanos: Long): Duration = {
- if (nanos % 86400000000000L == 0) {
- Duration(nanos / 86400000000000L, DAYS)
- } else if (nanos % 3600000000000L == 0) {
- Duration(nanos / 3600000000000L, HOURS)
- } else if (nanos % 60000000000L == 0) {
- Duration(nanos / 60000000000L, MINUTES)
- } else if (nanos % 1000000000L == 0) {
- Duration(nanos / 1000000000L, SECONDS)
- } else if (nanos % 1000000L == 0) {
- Duration(nanos / 1000000L, MILLISECONDS)
- } else if (nanos % 1000L == 0) {
- Duration(nanos / 1000L, MICROSECONDS)
- } else {
- Duration(nanos, NANOSECONDS)
- }
- }
-
- def fromNanos(nanos: Double): Duration = fromNanos((nanos + 0.5).asInstanceOf[Long])
-
- /**
- * Construct a Duration by parsing a String. In case of a format error, a
- * RuntimeException is thrown. See `unapply(String)` for more information.
- */
- def apply(s: String): Duration = unapply(s) getOrElse sys.error("format error")
-
- /**
- * Deconstruct a Duration into length and unit if it is finite.
- */
- def unapply(d: Duration): Option[(Long, TimeUnit)] = {
- if (d.finite_?) {
- Some((d.length, d.unit))
- } else {
- None
- }
- }
-
- private val RE = ("""^\s*(\d+(?:\.\d+)?)\s*""" + // length part
- "(?:" + // units are distinguished in separate match groups
- "(d|day|days)|" +
- "(h|hour|hours)|" +
- "(min|minute|minutes)|" +
- "(s|sec|second|seconds)|" +
- "(ms|milli|millis|millisecond|milliseconds)|" +
- "(µs|micro|micros|microsecond|microseconds)|" +
- "(ns|nano|nanos|nanosecond|nanoseconds)" +
- """)\s*$""").r // close the non-capturing group
- private val REinf = """^\s*Inf\s*$""".r
- private val REminf = """^\s*(?:-\s*|Minus)Inf\s*""".r
-
- /**
- * Parse String, return None if no match. Format is `"<length><unit>"`, where
- * whitespace is allowed before, between and after the parts. Infinities are
- * designated by `"Inf"` and `"-Inf"` or `"MinusInf"`.
- */
- def unapply(s: String): Option[Duration] = s match {
- case RE(length, d, h, m, s, ms, mus, ns) ⇒
- if (d ne null) Some(Duration(JDouble.parseDouble(length), DAYS)) else if (h ne null) Some(Duration(JDouble.parseDouble(length), HOURS)) else if (m ne null) Some(Duration(JDouble.parseDouble(length), MINUTES)) else if (s ne null) Some(Duration(JDouble.parseDouble(length), SECONDS)) else if (ms ne null) Some(Duration(JDouble.parseDouble(length), MILLISECONDS)) else if (mus ne null) Some(Duration(JDouble.parseDouble(length), MICROSECONDS)) else if (ns ne null) Some(Duration(JDouble.parseDouble(length), NANOSECONDS)) else
- sys.error("made some error in regex (should not be possible)")
- case REinf() ⇒ Some(Inf)
- case REminf() ⇒ Some(MinusInf)
- case _ ⇒ None
- }
-
- /**
- * Parse TimeUnit from string representation.
- */
- def timeUnit(unit: String) = unit.toLowerCase match {
- case "d" | "day" | "days" ⇒ DAYS
- case "h" | "hour" | "hours" ⇒ HOURS
- case "min" | "minute" | "minutes" ⇒ MINUTES
- case "s" | "sec" | "second" | "seconds" ⇒ SECONDS
- case "ms" | "milli" | "millis" | "millisecond" | "milliseconds" ⇒ MILLISECONDS
- case "µs" | "micro" | "micros" | "microsecond" | "microseconds" ⇒ MICROSECONDS
- case "ns" | "nano" | "nanos" | "nanosecond" | "nanoseconds" ⇒ NANOSECONDS
- }
-
- val Zero: Duration = new FiniteDuration(0, NANOSECONDS)
- val Undefined: Duration = new Duration with Infinite {
- override def toString = "Duration.Undefined"
- override def equals(other: Any) = other.asInstanceOf[AnyRef] eq this
- override def +(other: Duration): Duration = throw new IllegalArgumentException("cannot add Undefined duration")
- override def -(other: Duration): Duration = throw new IllegalArgumentException("cannot subtract Undefined duration")
- override def *(factor: Double): Duration = throw new IllegalArgumentException("cannot multiply Undefined duration")
- override def /(factor: Double): Duration = throw new IllegalArgumentException("cannot divide Undefined duration")
- override def /(other: Duration): Double = throw new IllegalArgumentException("cannot divide Undefined duration")
- def >(other: Duration) = throw new IllegalArgumentException("cannot compare Undefined duration")
- def >=(other: Duration) = throw new IllegalArgumentException("cannot compare Undefined duration")
- def <(other: Duration) = throw new IllegalArgumentException("cannot compare Undefined duration")
- def <=(other: Duration) = throw new IllegalArgumentException("cannot compare Undefined duration")
- def unary_- : Duration = throw new IllegalArgumentException("cannot negate Undefined duration")
- }
-
- trait Infinite {
- this: Duration ⇒
-
- override def equals(other: Any) = false
-
- def +(other: Duration): Duration =
- other match {
- case _: this.type ⇒ this
- case _: Infinite ⇒ throw new IllegalArgumentException("illegal addition of infinities")
- case _ ⇒ this
- }
- def -(other: Duration): Duration =
- other match {
- case _: this.type ⇒ throw new IllegalArgumentException("illegal subtraction of infinities")
- case _ ⇒ this
- }
- def *(factor: Double): Duration = this
- def /(factor: Double): Duration = this
- def /(other: Duration): Double =
- other match {
- case _: Infinite ⇒ throw new IllegalArgumentException("illegal division of infinities")
- // maybe questionable but pragmatic: Inf / 0 => Inf
- case x ⇒ Double.PositiveInfinity * (if ((this > Zero) ^ (other >= Zero)) -1 else 1)
- }
-
- def finite_? = false
-
- def length: Long = throw new IllegalArgumentException("length not allowed on infinite Durations")
- def unit: TimeUnit = throw new IllegalArgumentException("unit not allowed on infinite Durations")
- def toNanos: Long = throw new IllegalArgumentException("toNanos not allowed on infinite Durations")
- def toMicros: Long = throw new IllegalArgumentException("toMicros not allowed on infinite Durations")
- def toMillis: Long = throw new IllegalArgumentException("toMillis not allowed on infinite Durations")
- def toSeconds: Long = throw new IllegalArgumentException("toSeconds not allowed on infinite Durations")
- def toMinutes: Long = throw new IllegalArgumentException("toMinutes not allowed on infinite Durations")
- def toHours: Long = throw new IllegalArgumentException("toHours not allowed on infinite Durations")
- def toDays: Long = throw new IllegalArgumentException("toDays not allowed on infinite Durations")
- def toUnit(unit: TimeUnit): Double = throw new IllegalArgumentException("toUnit not allowed on infinite Durations")
-
- def printHMS = toString
- }
-
- /**
- * Infinite duration: greater than any other and not equal to any other,
- * including itself.
- */
- val Inf: Duration = new Duration with Infinite {
- override def toString = "Duration.Inf"
- def >(other: Duration) = true
- def >=(other: Duration) = true
- def <(other: Duration) = false
- def <=(other: Duration) = false
- def unary_- : Duration = MinusInf
- }
-
- /**
- * Infinite negative duration: lesser than any other and not equal to any other,
- * including itself.
- */
- val MinusInf: Duration = new Duration with Infinite {
- override def toString = "Duration.MinusInf"
- def >(other: Duration) = false
- def >=(other: Duration) = false
- def <(other: Duration) = true
- def <=(other: Duration) = true
- def unary_- : Duration = Inf
- }
-
- // Java Factories
- def create(length: Long, unit: TimeUnit): Duration = apply(length, unit)
- def create(length: Double, unit: TimeUnit): Duration = apply(length, unit)
- def create(length: Long, unit: String): Duration = apply(length, unit)
- def parse(s: String): Duration = unapply(s).get
-}
-
-/**
- * Utility for working with java.util.concurrent.TimeUnit durations.
- *
- * <p/>
- * Examples of usage from Java:
- * <pre>
- * import akka.util.FiniteDuration;
- * import java.util.concurrent.TimeUnit;
- *
- * Duration duration = new FiniteDuration(100, MILLISECONDS);
- * Duration duration = new FiniteDuration(5, "seconds");
- *
- * duration.toNanos();
- * </pre>
- *
- * <p/>
- * Examples of usage from Scala:
- * <pre>
- * import akka.util.Duration
- * import java.util.concurrent.TimeUnit
- *
- * val duration = Duration(100, MILLISECONDS)
- * val duration = Duration(100, "millis")
- *
- * duration.toNanos
- * duration < 1.second
- * duration <= Duration.Inf
- * </pre>
- *
- * <p/>
- * Implicits are also provided for Int, Long and Double. Example usage:
- * <pre>
- * import akka.util.duration._
- *
- * val duration = 100 millis
- * </pre>
- *
- * Extractors, parsing and arithmetic are also included:
- * <pre>
- * val d = Duration("1.2 µs")
- * val Duration(length, unit) = 5 millis
- * val d2 = d * 2.5
- * val d3 = d2 + 1.millisecond
- * </pre>
- */
-abstract class Duration extends Serializable {
- def length: Long
- def unit: TimeUnit
- def toNanos: Long
- def toMicros: Long
- def toMillis: Long
- def toSeconds: Long
- def toMinutes: Long
- def toHours: Long
- def toDays: Long
- def toUnit(unit: TimeUnit): Double
- def printHMS: String
- def <(other: Duration): Boolean
- def <=(other: Duration): Boolean
- def >(other: Duration): Boolean
- def >=(other: Duration): Boolean
- def +(other: Duration): Duration
- def -(other: Duration): Duration
- def *(factor: Double): Duration
- def /(factor: Double): Duration
- def /(other: Duration): Double
- def unary_- : Duration
- def finite_? : Boolean
-// def dilated(implicit system: ActorSystem): Duration = this * system.settings.TestTimeFactor
- def min(other: Duration): Duration = if (this < other) this else other
- def max(other: Duration): Duration = if (this > other) this else other
- def sleep(): Unit = Thread.sleep(toMillis)
-
- // Java API
- def lt(other: Duration) = this < other
- def lteq(other: Duration) = this <= other
- def gt(other: Duration) = this > other
- def gteq(other: Duration) = this >= other
- def plus(other: Duration) = this + other
- def minus(other: Duration) = this - other
- def mul(factor: Double) = this * factor
- def div(factor: Double) = this / factor
- def div(other: Duration) = this / other
- def neg() = -this
- def isFinite() = finite_?
-}
-
-class FiniteDuration(val length: Long, val unit: TimeUnit) extends Duration {
- import Duration._
-
- def this(length: Long, unit: String) = this(length, Duration.timeUnit(unit))
-
- def toNanos = unit.toNanos(length)
- def toMicros = unit.toMicros(length)
- def toMillis = unit.toMillis(length)
- def toSeconds = unit.toSeconds(length)
- def toMinutes = unit.toMinutes(length)
- def toHours = unit.toHours(length)
- def toDays = unit.toDays(length)
- def toUnit(u: TimeUnit) = long2double(toNanos) / NANOSECONDS.convert(1, u)
-
- override def toString = this match {
- case Duration(1, DAYS) ⇒ "1 day"
- case Duration(x, DAYS) ⇒ x + " days"
- case Duration(1, HOURS) ⇒ "1 hour"
- case Duration(x, HOURS) ⇒ x + " hours"
- case Duration(1, MINUTES) ⇒ "1 minute"
- case Duration(x, MINUTES) ⇒ x + " minutes"
- case Duration(1, SECONDS) ⇒ "1 second"
- case Duration(x, SECONDS) ⇒ x + " seconds"
- case Duration(1, MILLISECONDS) ⇒ "1 millisecond"
- case Duration(x, MILLISECONDS) ⇒ x + " milliseconds"
- case Duration(1, MICROSECONDS) ⇒ "1 microsecond"
- case Duration(x, MICROSECONDS) ⇒ x + " microseconds"
- case Duration(1, NANOSECONDS) ⇒ "1 nanosecond"
- case Duration(x, NANOSECONDS) ⇒ x + " nanoseconds"
- }
-
- def printHMS = "%02d:%02d:%06.3f".format(toHours, toMinutes % 60, toMillis / 1000.0 % 60)
-
- def <(other: Duration) = {
- if (other.finite_?) {
- toNanos < other.asInstanceOf[FiniteDuration].toNanos
- } else {
- other > this
- }
- }
-
- def <=(other: Duration) = {
- if (other.finite_?) {
- toNanos <= other.asInstanceOf[FiniteDuration].toNanos
- } else {
- other >= this
- }
- }
-
- def >(other: Duration) = {
- if (other.finite_?) {
- toNanos > other.asInstanceOf[FiniteDuration].toNanos
- } else {
- other < this
- }
- }
-
- def >=(other: Duration) = {
- if (other.finite_?) {
- toNanos >= other.asInstanceOf[FiniteDuration].toNanos
- } else {
- other <= this
- }
- }
-
- def +(other: Duration) = {
- if (!other.finite_?) {
- other
- } else {
- val nanos = toNanos + other.asInstanceOf[FiniteDuration].toNanos
- fromNanos(nanos)
- }
- }
-
- def -(other: Duration) = {
- if (!other.finite_?) {
- other
- } else {
- val nanos = toNanos - other.asInstanceOf[FiniteDuration].toNanos
- fromNanos(nanos)
- }
- }
-
- def *(factor: Double) = fromNanos(long2double(toNanos) * factor)
-
- def /(factor: Double) = fromNanos(long2double(toNanos) / factor)
-
- def /(other: Duration) = if (other.finite_?) long2double(toNanos) / other.toNanos else 0
-
- def unary_- = Duration(-length, unit)
-
- def finite_? = true
-
- override def equals(other: Any) =
- other.isInstanceOf[FiniteDuration] &&
- toNanos == other.asInstanceOf[FiniteDuration].toNanos
-
- override def hashCode = toNanos.asInstanceOf[Int]
-}
-
-class DurationInt(n: Int) {
- def nanoseconds = Duration(n, NANOSECONDS)
- def nanos = Duration(n, NANOSECONDS)
- def nanosecond = Duration(n, NANOSECONDS)
- def nano = Duration(n, NANOSECONDS)
-
- def microseconds = Duration(n, MICROSECONDS)
- def micros = Duration(n, MICROSECONDS)
- def microsecond = Duration(n, MICROSECONDS)
- def micro = Duration(n, MICROSECONDS)
-
- def milliseconds = Duration(n, MILLISECONDS)
- def millis = Duration(n, MILLISECONDS)
- def millisecond = Duration(n, MILLISECONDS)
- def milli = Duration(n, MILLISECONDS)
-
- def seconds = Duration(n, SECONDS)
- def second = Duration(n, SECONDS)
-
- def minutes = Duration(n, MINUTES)
- def minute = Duration(n, MINUTES)
-
- def hours = Duration(n, HOURS)
- def hour = Duration(n, HOURS)
-
- def days = Duration(n, DAYS)
- def day = Duration(n, DAYS)
-}
-
-class DurationLong(n: Long) {
- def nanoseconds = Duration(n, NANOSECONDS)
- def nanos = Duration(n, NANOSECONDS)
- def nanosecond = Duration(n, NANOSECONDS)
- def nano = Duration(n, NANOSECONDS)
-
- def microseconds = Duration(n, MICROSECONDS)
- def micros = Duration(n, MICROSECONDS)
- def microsecond = Duration(n, MICROSECONDS)
- def micro = Duration(n, MICROSECONDS)
-
- def milliseconds = Duration(n, MILLISECONDS)
- def millis = Duration(n, MILLISECONDS)
- def millisecond = Duration(n, MILLISECONDS)
- def milli = Duration(n, MILLISECONDS)
-
- def seconds = Duration(n, SECONDS)
- def second = Duration(n, SECONDS)
-
- def minutes = Duration(n, MINUTES)
- def minute = Duration(n, MINUTES)
-
- def hours = Duration(n, HOURS)
- def hour = Duration(n, HOURS)
-
- def days = Duration(n, DAYS)
- def day = Duration(n, DAYS)
-}
-
-class DurationDouble(d: Double) {
- def nanoseconds = Duration(d, NANOSECONDS)
- def nanos = Duration(d, NANOSECONDS)
- def nanosecond = Duration(d, NANOSECONDS)
- def nano = Duration(d, NANOSECONDS)
-
- def microseconds = Duration(d, MICROSECONDS)
- def micros = Duration(d, MICROSECONDS)
- def microsecond = Duration(d, MICROSECONDS)
- def micro = Duration(d, MICROSECONDS)
-
- def milliseconds = Duration(d, MILLISECONDS)
- def millis = Duration(d, MILLISECONDS)
- def millisecond = Duration(d, MILLISECONDS)
- def milli = Duration(d, MILLISECONDS)
-
- def seconds = Duration(d, SECONDS)
- def second = Duration(d, SECONDS)
-
- def minutes = Duration(d, MINUTES)
- def minute = Duration(d, MINUTES)
-
- def hours = Duration(d, HOURS)
- def hour = Duration(d, HOURS)
-
- def days = Duration(d, DAYS)
- def day = Duration(d, DAYS)
-}
diff --git a/src/library/scala/util/Timeout.scala b/src/library/scala/util/Timeout.scala
deleted file mode 100644
index 0190675344..0000000000
--- a/src/library/scala/util/Timeout.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-/**
- * Copyright (C) 2009-2011 Typesafe Inc. <http://www.typesafe.com>
- */
-package scala.util
-
-import java.util.concurrent.TimeUnit
-
-case class Timeout(duration: Duration) {
- def this(timeout: Long) = this(Duration(timeout, TimeUnit.MILLISECONDS))
- def this(length: Long, unit: TimeUnit) = this(Duration(length, unit))
-}
-
-object Timeout {
- /**
- * A timeout with zero duration, will cause most requests to always timeout.
- */
- val zero = new Timeout(Duration.Zero)
-
- /**
- * A Timeout with infinite duration. Will never timeout. Use extreme caution with this
- * as it may cause memory leaks, blocked threads, or may not even be supported by
- * the receiver, which would result in an exception.
- */
- val never = new Timeout(Duration.Inf)
-
- def apply(timeout: Long) = new Timeout(timeout)
- def apply(length: Long, unit: TimeUnit) = new Timeout(length, unit)
-
- implicit def durationToTimeout(duration: Duration) = new Timeout(duration)
- implicit def intToTimeout(timeout: Int) = new Timeout(timeout)
- implicit def longToTimeout(timeout: Long) = new Timeout(timeout)
- //implicit def defaultTimeout(implicit system: ActorSystem) = system.settings.ActorTimeout (have to introduce this in ActorSystem)
-}
diff --git a/src/library/scala/util/matching/Regex.scala b/src/library/scala/util/matching/Regex.scala
index ca97515e23..3f21cc9724 100644
--- a/src/library/scala/util/matching/Regex.scala
+++ b/src/library/scala/util/matching/Regex.scala
@@ -133,6 +133,15 @@ import java.util.regex.{ Pattern, Matcher }
*
* @param regex A string representing a regular expression
* @param groupNames A mapping from names to indices in capture groups
+ *
+ * @define replacementString
+ * In the replacement String, a dollar sign (`$`) followed by a number will be
+ * interpreted as a reference to a group in the matched pattern, with numbers
+ * 1 through 9 corresponding to the first nine groups, and 0 standing for the
+ * whole match. Any other character is an error. The backslash (`\`) character
+ * will be interpreted as an escape character, and can be used to escape the
+ * dollar sign. One can use [[scala.util.matching.Regex]]'s `quoteReplacement`
+ * to automatically escape these characters.
*/
@SerialVersionUID(-2094783597747625537L)
class Regex(regex: String, groupNames: String*) extends Serializable {
@@ -180,7 +189,7 @@ class Regex(regex: String, groupNames: String*) extends Serializable {
None
}
- /** Return all matches of this regexp in given character sequence as a [[scala.util.mathcing.Regex.MatchIterator]],
+ /** Return all matches of this regexp in given character sequence as a [[scala.util.matching.Regex.MatchIterator]],
* which is a special [[scala.collection.Iterator]] that returns the
* matched strings, but can also be converted into a normal iterator
* that returns objects of type [[scala.util.matching.Regex.Match]]
@@ -193,6 +202,25 @@ class Regex(regex: String, groupNames: String*) extends Serializable {
*/
def findAllIn(source: java.lang.CharSequence) = new Regex.MatchIterator(source, this, groupNames)
+
+ /** Return all matches of this regexp in given character sequence as a
+ * [[scala.collection.Iterator]] of [[scala.util.matching.Regex.Match].
+ *
+ * @param source The text to match against.
+ * @return A [[scala.collection.Iterator]] of [[scala.util.matching.Regex.Match]] for all matches.
+ * @example {{{for (words <- """\w+""".r findAllMatchIn "A simple example.") yield words.start}}}
+ */
+ def findAllMatchIn(source: java.lang.CharSequence): Iterator[Match] = {
+ val matchIterator = findAllIn(source)
+ new Iterator[Match] {
+ def hasNext = matchIterator.hasNext
+ def next: Match = {
+ matchIterator.next;
+ new Match(matchIterator.source, matchIterator.matcher, matchIterator.groupNames).force
+ }
+ }
+ }
+
/** Return optionally first matching string of this regexp in given character sequence,
* or None if it does not exist.
*
@@ -258,6 +286,8 @@ class Regex(regex: String, groupNames: String*) extends Serializable {
/** Replaces all matches by a string.
*
+ * $replacementString
+ *
* @param target The string to match
* @param replacement The string that will replace each match
* @return The resulting string
@@ -280,6 +310,8 @@ class Regex(regex: String, groupNames: String*) extends Serializable {
* val repl = datePattern replaceAllIn (text, m => m.group("month")+"/"+m.group("day"))
* }}}
*
+ * $replacementString
+ *
* @param target The string to match.
* @param replacer The function which maps a match to another string.
* @return The target string after replacements.
@@ -298,13 +330,15 @@ class Regex(regex: String, groupNames: String*) extends Serializable {
* {{{
* import scala.util.matching.Regex._
*
- * val map = Map("x" -> "a var", "y" -> "another var")
+ * val map = Map("x" -> "a var", "y" -> """some $ and \ signs""")
* val text = "A text with variables %x, %y and %z."
* val varPattern = """%(\w+)""".r
- * val mapper = (m: Match) => map get (m group 1)
+ * val mapper = (m: Match) => map get (m group 1) map (quoteReplacement(_))
* val repl = varPattern replaceSomeIn (text, mapper)
* }}}
*
+ * $replacementString
+ *
* @param target The string to match.
* @param replacer The function which optionally maps a match to another string.
* @return The target string after replacements.
@@ -319,6 +353,8 @@ class Regex(regex: String, groupNames: String*) extends Serializable {
/** Replaces the first match by a string.
*
+ * $replacementString
+ *
* @param target The string to match
* @param replacement The string that will replace the match
* @return The resulting string
@@ -505,7 +541,7 @@ object Regex {
class MatchIterator(val source: java.lang.CharSequence, val regex: Regex, val groupNames: Seq[String])
extends AbstractIterator[String] with Iterator[String] with MatchData { self =>
- protected val matcher = regex.pattern.matcher(source)
+ protected[Regex] val matcher = regex.pattern.matcher(source)
private var nextSeen = false
/** Is there another match? */
@@ -569,4 +605,18 @@ object Regex {
def replace(rs: String) = matcher.appendReplacement(sb, rs)
}
+
+ /** Quotes replacement strings to be used in replacement methods.
+ *
+ * Replacement methods give special meaning to backslashes (`\`) and
+ * dollar signs (`$`) in replacement strings, so they are not treated
+ * as literals. This method escapes these characters so the resulting
+ * string can be used as a literal replacement representing the input
+ * string.
+ *
+ * @param text The string one wishes to use as literal replacement.
+ * @return A string that can be used to replace matches with `text`.
+ * @example {{{"CURRENCY".r.replaceAllIn(input, Regex quoteReplacement "US$")}}}
+ */
+ def quoteReplacement(text: String): String = Matcher quoteReplacement text
}
diff --git a/src/library/scala/xml/Elem.scala b/src/library/scala/xml/Elem.scala
index cc244a5b88..5b6b9f2bb9 100755
--- a/src/library/scala/xml/Elem.scala
+++ b/src/library/scala/xml/Elem.scala
@@ -23,12 +23,12 @@ object Elem {
* @deprecated This factory method is retained for backward compatibility; please use the other one, with which you
* can specify your own preference for minimizeEmpty.
*/
- @deprecated
- def apply(prefix: String,label: String, attributes: MetaData, scope: NamespaceBinding, child: Node*): Elem =
+ @deprecated("Use the other apply method in this object", "2.10.0")
+ def apply(prefix: String, label: String, attributes: MetaData, scope: NamespaceBinding, child: Node*): Elem =
apply(prefix, label, attributes, scope, child.isEmpty, child: _*)
- def apply(prefix: String,label: String, attributes: MetaData, scope: NamespaceBinding, minimizeEmpty: Boolean, child: Node*): Elem =
- new Elem(prefix,label,attributes,scope, minimizeEmpty, child:_*)
+ def apply(prefix: String, label: String, attributes: MetaData, scope: NamespaceBinding, minimizeEmpty: Boolean, child: Node*): Elem =
+ new Elem(prefix, label, attributes, scope, minimizeEmpty, child: _*)
def unapplySeq(n: Node) = n match {
case _: SpecialNode | _: Group => None
diff --git a/src/library/scala/xml/XML.scala b/src/library/scala/xml/XML.scala
index 4beba91899..f6955c6612 100755
--- a/src/library/scala/xml/XML.scala
+++ b/src/library/scala/xml/XML.scala
@@ -15,8 +15,7 @@ import java.io.{ InputStream, Reader, StringReader, Writer }
import java.nio.channels.Channels
import scala.util.control.Exception.ultimately
-object Source
-{
+object Source {
def fromFile(file: File) = new InputSource(new FileInputStream(file))
def fromFile(fd: FileDescriptor) = new InputSource(new FileInputStream(fd))
def fromFile(name: String) = new InputSource(new FileInputStream(name))
@@ -31,13 +30,18 @@ object Source
* Governs how empty elements (i.e. those without child elements) should be serialized.
*/
object MinimizeMode extends Enumeration {
- /** Minimize empty tags if they were originally empty when parsed, or if they were constructed with [[scala.xml.Elem]]`#minimizeEmpty` == true */
+ /** Minimize empty tags if they were originally empty when parsed, or if they were constructed
+ * with [[scala.xml.Elem]]`#minimizeEmpty` == true
+ */
val Default = Value
- /** Always minimize empty tags. Note that this may be problematic for XHTML, in which case [[scala.xml.Xhtml]]`#toXhtml` should be used instead. */
+ /** Always minimize empty tags. Note that this may be problematic for XHTML, in which
+ * case [[scala.xml.Xhtml]]`#toXhtml` should be used instead.
+ */
val Always = Value
- /** Never minimize empty tags. */
+ /** Never minimize empty tags.
+ */
val Never = Value
}
@@ -50,8 +54,7 @@ import Source._
* @author Burak Emir
* @version 1.0, 25/04/2005
*/
-object XML extends XMLLoader[Elem]
-{
+object XML extends XMLLoader[Elem] {
val xml = "xml"
val xmlns = "xmlns"
val namespace = "http://www.w3.org/XML/1998/namespace"
diff --git a/src/library/scala/xml/factory/LoggedNodeFactory.scala b/src/library/scala/xml/factory/LoggedNodeFactory.scala
index c15a9ebe7b..abf8f97f03 100644
--- a/src/library/scala/xml/factory/LoggedNodeFactory.scala
+++ b/src/library/scala/xml/factory/LoggedNodeFactory.scala
@@ -18,7 +18,7 @@ object testLogged extends Application {
with scala.util.logging.ConsoleLogger
Console.println("Start")
- val doc = x.load(new java.net.URL("http://lampsvn.epfl.ch/svn-repos/scala/scala/trunk/build.xml"))
+ val doc = x.load(new java.net.URL("http://example.com/file.xml"))
Console.println("End")
Console.println(doc)
}
diff --git a/src/library/scala/xml/parsing/MarkupParser.scala b/src/library/scala/xml/parsing/MarkupParser.scala
index 32feaa2209..af9b5f47cf 100755
--- a/src/library/scala/xml/parsing/MarkupParser.scala
+++ b/src/library/scala/xml/parsing/MarkupParser.scala
@@ -134,7 +134,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
//
/** {{{
- * &lt;? prolog ::= xml S ... ?&gt;
+ * <? prolog ::= xml S ... ?>
* }}} */
def xmlProcInstr(): MetaData = {
xToken("xml")
@@ -195,7 +195,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
}
/** {{{
- * &lt;? prolog ::= xml S?
+ * <? prolog ::= xml S?
* // this is a bit more lenient than necessary...
* }}} */
def prolog(): (Option[String], Option[String], Option[Boolean]) =
@@ -355,7 +355,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
}
/** {{{
- * '&lt;! CharData ::= [CDATA[ ( {char} - {char}"]]&gt;"{char} ) ']]&gt;'
+ * '<! CharData ::= [CDATA[ ( {char} - {char}"]]>"{char} ) ']]>'
*
* see [15]
* }}} */
@@ -369,7 +369,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
}
/** {{{
- * Comment ::= '&lt;!--' ((Char - '-') | ('-' (Char - '-')))* '--&gt;'
+ * Comment ::= '<!--' ((Char - '-') | ('-' (Char - '-')))* '-->'
*
* see [15]
* }}} */
@@ -399,7 +399,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
}
/** {{{
- * '&lt;' content1 ::= ...
+ * '<' content1 ::= ...
* }}} */
def content1(pscope: NamespaceBinding, ts: NodeBuffer) {
ch match {
@@ -420,7 +420,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
}
/** {{{
- * content1 ::= '&lt;' content1 | '&amp;' charref ...
+ * content1 ::= '<' content1 | '&' charref ...
* }}} */
def content(pscope: NamespaceBinding): NodeSeq = {
var ts = new NodeBuffer
@@ -490,7 +490,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
/** parses document type declaration and assigns it to instance variable
* dtd.
* {{{
- * &lt;! parseDTD ::= DOCTYPE name ... >
+ * <! parseDTD ::= DOCTYPE name ... >
* }}} */
def parseDTD() { // dirty but fast
var extID: ExternalID = null
@@ -545,8 +545,8 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
}
/** {{{
- * '&lt;' element ::= xmlTag1 '&gt;' { xmlExpr | '{' simpleExpr '}' } ETag
- * | xmlTag1 '/' '&gt;'
+ * '<' element ::= xmlTag1 '>' { xmlExpr | '{' simpleExpr '}' } ETag
+ * | xmlTag1 '/' '>'
* }}} */
def element1(pscope: NamespaceBinding): NodeSeq = {
val pos = this.pos
@@ -778,7 +778,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
}
/** {{{
- * &lt;! attlist := ATTLIST
+ * <! attlist := ATTLIST
* }}} */
def attrDecl() = {
xToken("TTLIST")
@@ -824,7 +824,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
}
/** {{{
- * &lt;! element := ELEMENT
+ * <! element := ELEMENT
* }}} */
def entityDecl() = {
var isParameterEntity = false
diff --git a/src/partest/scala/tools/partest/PartestTask.scala b/src/partest/scala/tools/partest/PartestTask.scala
index 524dc06327..ad2e155182 100644
--- a/src/partest/scala/tools/partest/PartestTask.scala
+++ b/src/partest/scala/tools/partest/PartestTask.scala
@@ -299,6 +299,16 @@ class PartestTask extends Task with CompilationPathProperty {
}
} getOrElse sys.error("Provided classpath does not contain a Scala partest.")
+ val scalaActors = {
+ (classpath.list map { fs => new File(fs) }) find { f =>
+ f.getName match {
+ case "scala-actors.jar" => true
+ case "actors" if (f.getParentFile.getName == "classes") => true
+ case _ => false
+ }
+ }
+ } getOrElse sys.error("Provided classpath does not contain a Scala actors.")
+
def scalacArgsFlat: Option[Seq[String]] = scalacArgs map (_ flatMap { a =>
val parts = a.getParts
if(parts eq null) Seq[String]() else parts.toSeq
@@ -324,6 +334,7 @@ class PartestTask extends Task with CompilationPathProperty {
antFileManager.LATEST_LIB = scalaLibrary.getAbsolutePath
antFileManager.LATEST_COMP = scalaCompiler.getAbsolutePath
antFileManager.LATEST_PARTEST = scalaPartest.getAbsolutePath
+ antFileManager.LATEST_ACTORS = scalaActors.getAbsolutePath
javacmd foreach (x => antFileManager.JAVACMD = x.getAbsolutePath)
javaccmd foreach (x => antFileManager.JAVAC_CMD = x.getAbsolutePath)
diff --git a/src/partest/scala/tools/partest/ScaladocModelTest.scala b/src/partest/scala/tools/partest/ScaladocModelTest.scala
new file mode 100644
index 0000000000..2eb026ceee
--- /dev/null
+++ b/src/partest/scala/tools/partest/ScaladocModelTest.scala
@@ -0,0 +1,124 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2011 LAMP/EPFL
+ * @author Vlad Ureche
+ */
+
+package scala.tools.partest
+
+import scala.tools.partest._
+import java.io._
+import scala.tools.nsc._
+import scala.tools.nsc.util.CommandLineParser
+import scala.tools.nsc.doc.{Settings, DocFactory, Universe}
+import scala.tools.nsc.doc.model._
+import scala.tools.nsc.reporters.ConsoleReporter
+
+/** A class for testing scaladoc model generation
+ * - you need to specify the code in the `code` method
+ * - you need to override the testModel method to test the model
+ * - you may specify extra parameters to send to scaladoc in `scaladocSettings`
+ * {{{
+ import scala.tools.nsc.doc.model._
+ import scala.tools.partest.ScaladocModelTest
+
+ object Test extends ScaladocModelTest {
+
+ def code = """ ... """
+ def scaladocSettings = ""
+ def testModel(rootPackage: Package) = {
+ // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+ import access._
+
+ // just need to check the member exists, access methods will throw an error if there's a problem
+ rootPackage._package("scala")._package("test")._class("C")._method("foo")
+ }
+ }
+ * }}}
+ */
+abstract class ScaladocModelTest extends DirectTest {
+
+ /** Override this to give scaladoc command line parameters */
+ def scaladocSettings: String
+
+ /** Override this to test the model */
+ def testModel(root: Package): Unit
+
+ // Implementation follows:
+ override def extraSettings: String = "-usejavacp"
+
+ override def show(): Unit = {
+ // redirect err to out, for logging
+ val prevErr = System.err
+ System.setErr(System.out)
+
+ try {
+ // 1 - compile with scaladoc and get the model out
+ val args = scaladocSettings.split(" ")
+ val universe = model(args:_*).getOrElse({sys.error("Scaladoc Model Test ERROR: No universe generated!")})
+ // 2 - check the model generated
+ testModel(universe.rootPackage)
+ } catch {
+ case e =>
+ println(e)
+ e.printStackTrace
+ }
+ // set err back to the real err handler
+ System.setErr(prevErr)
+ }
+
+ // create a new scaladoc compiler
+ def newDocFactory(args: String*): DocFactory = {
+ val settings = new Settings(_ => ())
+ val command = new ScalaDoc.Command((CommandLineParser tokenize extraSettings) ++ args.toList, settings)
+ val docFact = new DocFactory(new ConsoleReporter(settings), settings)
+ docFact
+ }
+
+ // compile with scaladoc and output the result
+ def model(args: String*): Option[Universe] = newDocFactory(args: _*).makeUniverse(Right(code))
+
+ // so we don't get the newSettings warning
+ override def isDebug = false
+
+
+ // finally, enable easy navigation inside the entities
+ object access {
+
+ // Make it easy to access things
+ class TemplateAccess(tpl: DocTemplateEntity) {
+
+ def _class(name: String): DocTemplateEntity = getTheFirst(_classes(name), tpl.qualifiedName + ".class(" + name + ")")
+ def _classes(name: String): List[DocTemplateEntity] = tpl.templates.filter(_.name == name).flatMap({ case c: Class => List(c)})
+
+ def _trait(name: String): DocTemplateEntity = getTheFirst(_traits(name), tpl.qualifiedName + ".trait(" + name + ")")
+ def _traits(name: String): List[DocTemplateEntity] = tpl.templates.filter(_.name == name).flatMap({ case t: Trait => List(t)})
+
+ def _object(name: String): DocTemplateEntity = getTheFirst(_objects(name), tpl.qualifiedName + ".object(" + name + ")")
+ def _objects(name: String): List[DocTemplateEntity] = tpl.templates.filter(_.name == name).flatMap({ case o: Object => List(o)})
+
+ def _method(name: String): Def = getTheFirst(_methods(name), tpl.qualifiedName + ".method(" + name + ")")
+ def _methods(name: String): List[Def] = tpl.methods.filter(_.name == name)
+
+ def _value(name: String): Val = getTheFirst(_values(name), tpl.qualifiedName + ".value(" + name + ")")
+ def _values(name: String): List[Val] = tpl.values.filter(_.name == name)
+
+ def getTheFirst[T](list: List[T], expl: String): T = {
+ if (list.length == 1)
+ list.head
+ else if (list.length == 0)
+ sys.error("Error getting " + expl + ": No such element. All elements in list: [" + list.mkString(", ") + "]")
+ else
+ sys.error("Error getting " + expl + ": " + list.length + " elements with this name. " +
+ "All elements in list: [" + list.mkString(", ") + "]")
+ }
+ }
+
+ class PackageAccess(pack: Package) extends TemplateAccess(pack) {
+ def _package(name: String): Package = getTheFirst(_packages(name), pack.qualifiedName + ".package(" + name + ")")
+ def _packages(name: String): List[Package] = pack.packages.filter(_.name == name)
+ }
+
+ implicit def templateAccess(tpl: DocTemplateEntity) = new TemplateAccess(tpl)
+ implicit def packageAccess(pack: Package) = new PackageAccess(pack)
+ }
+}
diff --git a/src/partest/scala/tools/partest/nest/AntRunner.scala b/src/partest/scala/tools/partest/nest/AntRunner.scala
index 4795e5551a..e77385d6e9 100644
--- a/src/partest/scala/tools/partest/nest/AntRunner.scala
+++ b/src/partest/scala/tools/partest/nest/AntRunner.scala
@@ -22,6 +22,7 @@ class AntRunner extends DirectRunner {
var LATEST_LIB: String = _
var LATEST_COMP: String = _
var LATEST_PARTEST: String = _
+ var LATEST_ACTORS: String = _
val testRootPath: String = "test"
val testRootDir: Directory = Directory(testRootPath)
}
diff --git a/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala b/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala
index 3d72227b04..fa533eeb10 100644
--- a/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala
+++ b/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala
@@ -83,6 +83,7 @@ class ConsoleFileManager extends FileManager {
latestFile = testClassesDir.parent / "bin"
latestLibFile = testClassesDir / "library"
+ latestActorsFile = testClassesDir / "library" / "actors"
latestCompFile = testClassesDir / "compiler"
latestPartestFile = testClassesDir / "partest"
latestFjbgFile = testParent / "lib" / "fjbg.jar"
@@ -92,6 +93,7 @@ class ConsoleFileManager extends FileManager {
NestUI.verbose("Running on "+dir)
latestFile = dir / "bin"
latestLibFile = dir / "lib/scala-library.jar"
+ latestActorsFile = dir / "lib/scala-actors.jar"
latestCompFile = dir / "lib/scala-compiler.jar"
latestPartestFile = dir / "lib/scala-partest.jar"
}
@@ -100,6 +102,7 @@ class ConsoleFileManager extends FileManager {
NestUI.verbose("Running build/quick")
latestFile = prefixFile("build/quick/bin")
latestLibFile = prefixFile("build/quick/classes/library")
+ latestActorsFile = prefixFile("build/quick/classes/library/actors")
latestCompFile = prefixFile("build/quick/classes/compiler")
latestPartestFile = prefixFile("build/quick/classes/partest")
}
@@ -109,6 +112,7 @@ class ConsoleFileManager extends FileManager {
val p = testParent.getParentFile
latestFile = prefixFileWith(p, "bin")
latestLibFile = prefixFileWith(p, "lib/scala-library.jar")
+ latestActorsFile = prefixFileWith(p, "lib/scala-actors.jar")
latestCompFile = prefixFileWith(p, "lib/scala-compiler.jar")
latestPartestFile = prefixFileWith(p, "lib/scala-partest.jar")
}
@@ -117,6 +121,7 @@ class ConsoleFileManager extends FileManager {
NestUI.verbose("Running dists/latest")
latestFile = prefixFile("dists/latest/bin")
latestLibFile = prefixFile("dists/latest/lib/scala-library.jar")
+ latestActorsFile = prefixFile("dists/latest/lib/scala-actors.jar")
latestCompFile = prefixFile("dists/latest/lib/scala-compiler.jar")
latestPartestFile = prefixFile("dists/latest/lib/scala-partest.jar")
}
@@ -125,6 +130,7 @@ class ConsoleFileManager extends FileManager {
NestUI.verbose("Running build/pack")
latestFile = prefixFile("build/pack/bin")
latestLibFile = prefixFile("build/pack/lib/scala-library.jar")
+ latestActorsFile = prefixFile("build/pack/lib/scala-actors.jar")
latestCompFile = prefixFile("build/pack/lib/scala-compiler.jar")
latestPartestFile = prefixFile("build/pack/lib/scala-partest.jar")
}
@@ -159,14 +165,17 @@ class ConsoleFileManager extends FileManager {
LATEST_LIB = latestLibFile.getAbsolutePath
LATEST_COMP = latestCompFile.getAbsolutePath
LATEST_PARTEST = latestPartestFile.getAbsolutePath
+ LATEST_ACTORS = latestActorsFile.getAbsolutePath
}
var LATEST_LIB: String = ""
var LATEST_COMP: String = ""
var LATEST_PARTEST: String = ""
+ var LATEST_ACTORS: String = ""
var latestFile: File = _
var latestLibFile: File = _
+ var latestActorsFile: File = _
var latestCompFile: File = _
var latestPartestFile: File = _
var latestFjbgFile: File = _
diff --git a/src/partest/scala/tools/partest/nest/DirectRunner.scala b/src/partest/scala/tools/partest/nest/DirectRunner.scala
index d3d50ca58c..20f435cfbb 100644
--- a/src/partest/scala/tools/partest/nest/DirectRunner.scala
+++ b/src/partest/scala/tools/partest/nest/DirectRunner.scala
@@ -57,13 +57,14 @@ trait DirectRunner {
// for example, see how it's done in ReflectiveRunner
//val consFM = new ConsoleFileManager
//import consFM.{ latestCompFile, latestLibFile, latestPartestFile }
- val latestCompFile = new File(fileManager.LATEST_COMP);
- val latestLibFile = new File(fileManager.LATEST_LIB);
- val latestPartestFile = new File(fileManager.LATEST_PARTEST);
+ val latestCompFile = new File(fileManager.LATEST_COMP)
+ val latestLibFile = new File(fileManager.LATEST_LIB)
+ val latestPartestFile = new File(fileManager.LATEST_PARTEST)
+ val latestActorsFile = new File(fileManager.LATEST_ACTORS)
val scalacheckURL = PathSettings.scalaCheck.toURL
val scalaCheckParentClassLoader = ScalaClassLoader.fromURLs(
- List(scalacheckURL, latestCompFile.toURI.toURL, latestLibFile.toURI.toURL, latestPartestFile.toURI.toURL)
+ scalacheckURL :: (List(latestCompFile, latestLibFile, latestActorsFile, latestPartestFile).map(_.toURI.toURL))
)
Output.init()
diff --git a/src/partest/scala/tools/partest/nest/FileManager.scala b/src/partest/scala/tools/partest/nest/FileManager.scala
index a4a94fe93e..6d9e64730f 100644
--- a/src/partest/scala/tools/partest/nest/FileManager.scala
+++ b/src/partest/scala/tools/partest/nest/FileManager.scala
@@ -62,6 +62,7 @@ trait FileManager extends FileUtil {
var LATEST_LIB: String
var LATEST_COMP: String
var LATEST_PARTEST: String
+ var LATEST_ACTORS: String
var showDiff = false
var updateCheck = false
diff --git a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
index 5cde63dc81..a0511774a9 100644
--- a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
+++ b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
@@ -48,9 +48,9 @@ class ReflectiveRunner {
new ConsoleFileManager
import fileManager.
- { latestCompFile, latestLibFile, latestPartestFile, latestFjbgFile, latestScalapFile }
+ { latestCompFile, latestLibFile, latestPartestFile, latestFjbgFile, latestScalapFile, latestActorsFile }
val files =
- Array(latestCompFile, latestLibFile, latestPartestFile, latestFjbgFile, latestScalapFile) map (x => io.File(x))
+ Array(latestCompFile, latestLibFile, latestPartestFile, latestFjbgFile, latestScalapFile, latestActorsFile) map (x => io.File(x))
val sepUrls = files map (_.toURL)
var sepLoader = new URLClassLoader(sepUrls, null)
diff --git a/src/partest/scala/tools/partest/nest/SBTRunner.scala b/src/partest/scala/tools/partest/nest/SBTRunner.scala
index 4c6f417df5..750e270c18 100644
--- a/src/partest/scala/tools/partest/nest/SBTRunner.scala
+++ b/src/partest/scala/tools/partest/nest/SBTRunner.scala
@@ -15,6 +15,7 @@ object SBTRunner extends DirectRunner {
var LATEST_LIB: String = _
var LATEST_COMP: String = _
var LATEST_PARTEST: String = _
+ var LATEST_ACTORS: String = _
val testRootPath: String = "test"
val testRootDir: Directory = Directory(testRootPath)
}
@@ -60,6 +61,9 @@ object SBTRunner extends DirectRunner {
fileManager.LATEST_COMP = comp getOrElse sys.error("No scala-compiler found! Classpath = " + fileManager.CLASSPATH)
val partest: Option[String] = (fileManager.CLASSPATH split File.pathSeparator filter (_ matches ".*scala-partest.*\\.jar")).headOption
fileManager.LATEST_PARTEST = partest getOrElse sys.error("No scala-partest found! Classpath = " + fileManager.CLASSPATH)
+ val actors: Option[String] = (fileManager.CLASSPATH split File.pathSeparator filter (_ matches ".*scala-actors.*\\.jar")).headOption
+ fileManager.LATEST_ACTORS = actors getOrElse sys.error("No scala-actors found! Classpath = " + fileManager.CLASSPATH)
+
// TODO - Do something useful here!!!
fileManager.JAVAC_CMD = "javac"
fileManager.failed = config.justFailedTests
diff --git a/src/partest/scala/tools/partest/nest/Worker.scala b/src/partest/scala/tools/partest/nest/Worker.scala
index 3f2cb16082..cb6f2a0edc 100644
--- a/src/partest/scala/tools/partest/nest/Worker.scala
+++ b/src/partest/scala/tools/partest/nest/Worker.scala
@@ -55,6 +55,7 @@ class ScalaCheckFileManager(val origmanager: FileManager) extends FileManager {
var LATEST_LIB: String = origmanager.LATEST_LIB
var LATEST_COMP: String = origmanager.LATEST_COMP
var LATEST_PARTEST: String = origmanager.LATEST_PARTEST
+ var LATEST_ACTORS: String = origmanager.LATEST_ACTORS
}
object Output {
diff --git a/test/files/jvm/scala-concurrent-tck.scala b/test/files/jvm/scala-concurrent-tck.scala
index ba7dffbcb0..70221c0de1 100644
--- a/test/files/jvm/scala-concurrent-tck.scala
+++ b/test/files/jvm/scala-concurrent-tck.scala
@@ -13,7 +13,7 @@ import scala.concurrent.promise
import scala.concurrent.blocking
import scala.util.{ Try, Success, Failure }
-import scala.util.Duration
+import scala.concurrent.util.Duration
trait TestBase {
diff --git a/test/files/neg/saferJavaConversions.check b/test/files/neg/saferJavaConversions.check
new file mode 100644
index 0000000000..0e53d2c437
--- /dev/null
+++ b/test/files/neg/saferJavaConversions.check
@@ -0,0 +1,6 @@
+saferJavaConversions.scala:13: error: type mismatch;
+ found : String("a")
+ required: Foo
+ val v = map.get("a") // now this is a type error
+ ^
+one error found
diff --git a/test/files/neg/saferJavaConversions.scala b/test/files/neg/saferJavaConversions.scala
new file mode 100644
index 0000000000..f0611204e6
--- /dev/null
+++ b/test/files/neg/saferJavaConversions.scala
@@ -0,0 +1,20 @@
+
+case class Foo(s: String)
+
+object Test {
+ def f1 = {
+ import scala.collection.JavaConversions._
+ val map: Map[Foo, String] = Map(Foo("a") -> "a", Foo("b") -> "b")
+ val v = map.get("a") // should be a type error, actually returns null
+ }
+ def f2 = {
+ import scala.collection.convert.wrapAsScala._
+ val map: Map[Foo, String] = Map(Foo("a") -> "a", Foo("b") -> "b")
+ val v = map.get("a") // now this is a type error
+ }
+ def f3 = {
+ import scala.collection.convert.wrapAsJava._
+ val map: Map[Foo, String] = Map(Foo("a") -> "a", Foo("b") -> "b")
+ val v = map.get("a")
+ }
+}
diff --git a/test/files/neg/t1364.check b/test/files/neg/t1364.check
index 78375333c2..cb8803abdc 100644
--- a/test/files/neg/t1364.check
+++ b/test/files/neg/t1364.check
@@ -1,4 +1,4 @@
-t1364.scala:9: error: overriding type T in trait A with bounds >: Nothing <: AnyRef{type S[-U]};
+t1364.scala:9: error: overriding type T in trait A with bounds <: AnyRef{type S[-U]};
type T has incompatible type
type T = { type S[U] = U }
^
diff --git a/test/files/neg/t1477.check b/test/files/neg/t1477.check
index e497637857..72bffa3270 100644
--- a/test/files/neg/t1477.check
+++ b/test/files/neg/t1477.check
@@ -1,4 +1,4 @@
-t1477.scala:13: error: overriding type V in trait C with bounds >: Nothing <: Middle.this.D;
+t1477.scala:13: error: overriding type V in trait C with bounds <: Middle.this.D;
type V is a volatile type; cannot override a type with non-volatile upper bound
type V <: (D with U)
^
diff --git a/test/files/neg/t2070.check b/test/files/neg/t2070.check
index bd049409a8..ef1d08f7b7 100644
--- a/test/files/neg/t2070.check
+++ b/test/files/neg/t2070.check
@@ -1,5 +1,6 @@
t2070.scala:8: error: The kind of trait T does not conform to the expected kind of type T[X] in trait A.
-t2070.B.T's type parameters do not match type T's expected parameters: type X (in object B) has one type parameter, but type X (in trait A) has none
+t2070.B.T's type parameters do not match type T's expected parameters:
+type X (in object B) has one type parameter, but type X (in trait A) has none
trait T[X[_]]
^
one error found
diff --git a/test/files/neg/t3015.check b/test/files/neg/t3015.check
index 53221b7ca0..6948392bb0 100644
--- a/test/files/neg/t3015.check
+++ b/test/files/neg/t3015.check
@@ -1,5 +1,5 @@
t3015.scala:7: error: scrutinee is incompatible with pattern type;
- found : _$1 where type +_$1
+ found : _$1
required: String
val b(foo) = "foo"
^
diff --git a/test/files/neg/t3481.check b/test/files/neg/t3481.check
index 48e6ff357b..debe07275b 100644
--- a/test/files/neg/t3481.check
+++ b/test/files/neg/t3481.check
@@ -1,17 +1,17 @@
t3481.scala:5: error: type mismatch;
found : String("hello")
- required: _$1 where type +_$1
+ required: _$1
f[A[Int]]("hello")
^
t3481.scala:11: error: type mismatch;
- found : _$2 where type +_$2
+ found : _$2
required: b.T
(which expands to) _$2
def f[T <: B[_]](a: T#T, b: T) = b.m(a)
^
t3481.scala:12: error: type mismatch;
found : String("Hello")
- required: _$2 where type +_$2
+ required: _$2
f("Hello", new B[Int])
^
t3481.scala:18: error: type mismatch;
diff --git a/test/files/neg/t4044.check b/test/files/neg/t4044.check
index 75dcf63bfe..41a04f69b9 100644
--- a/test/files/neg/t4044.check
+++ b/test/files/neg/t4044.check
@@ -2,15 +2,18 @@ t4044.scala:9: error: AnyRef takes no type parameters, expected: one
M[AnyRef] // error, (AnyRef :: *) not kind-conformant to (N :: * -> * -> *)
^
t4044.scala:9: error: kinds of the type arguments (<error>) do not conform to the expected kinds of the type parameters (type N).
-<error>'s type parameters do not match type N's expected parameters: <none> has no type parameters, but type N has one
+<error>'s type parameters do not match type N's expected parameters:
+<none> has no type parameters, but type N has one
M[AnyRef] // error, (AnyRef :: *) not kind-conformant to (N :: * -> * -> *)
^
t4044.scala:11: error: kinds of the type arguments (Test.A) do not conform to the expected kinds of the type parameters (type N).
-Test.A's type parameters do not match type N's expected parameters: type _ has no type parameters, but type O has one
+Test.A's type parameters do not match type N's expected parameters:
+type _ has no type parameters, but type O has one
M[A] // error, (A :: (* -> *) not kind-conformant to (N :: * -> * -> *)
^
t4044.scala:15: error: kinds of the type arguments (Test.C) do not conform to the expected kinds of the type parameters (type N).
-Test.C's type parameters do not match type N's expected parameters: type _ has one type parameter, but type _ has none
+Test.C's type parameters do not match type N's expected parameters:
+type _ has one type parameter, but type _ has none
M[C] // error, (C :: (* -> * -> * -> *) not kind-conformant to (N :: * -> * -> *)
^
four errors found
diff --git a/test/files/neg/t4515.check b/test/files/neg/t4515.check
index ce5350b35f..a60d16295f 100644
--- a/test/files/neg/t4515.check
+++ b/test/files/neg/t4515.check
@@ -1,6 +1,6 @@
t4515.scala:37: error: type mismatch;
found : _0(in value $anonfun) where type _0(in value $anonfun)
- required: (some other)_0(in value $anonfun) where type +(some other)_0(in value $anonfun)
+ required: (some other)_0(in value $anonfun)
handler.onEvent(target, ctx.getEvent, node, ctx)
^
one error found
diff --git a/test/files/neg/t4987.check b/test/files/neg/t4987.check
new file mode 100644
index 0000000000..8d7344d27b
--- /dev/null
+++ b/test/files/neg/t4987.check
@@ -0,0 +1,4 @@
+t4987.scala:2: error: constructor Foo2 in class Foo2 cannot be accessed in object Bar2
+object Bar2 { new Foo2(0, 0) }
+ ^
+one error found
diff --git a/test/files/neg/t4987.scala b/test/files/neg/t4987.scala
new file mode 100644
index 0000000000..e55acd4127
--- /dev/null
+++ b/test/files/neg/t4987.scala
@@ -0,0 +1,2 @@
+class Foo2 private (a: Int, b: Int)
+object Bar2 { new Foo2(0, 0) }
diff --git a/test/files/neg/t5063.check b/test/files/neg/t5063.check
new file mode 100644
index 0000000000..84690d0a1d
--- /dev/null
+++ b/test/files/neg/t5063.check
@@ -0,0 +1,4 @@
+t5063.scala:2: error: value + is not a member of Object
+ super.+("")
+ ^
+one error found
diff --git a/test/files/neg/t5063.scala b/test/files/neg/t5063.scala
new file mode 100644
index 0000000000..5b34b53fb7
--- /dev/null
+++ b/test/files/neg/t5063.scala
@@ -0,0 +1,3 @@
+class A {
+ super.+("")
+}
diff --git a/test/files/neg/t5152.check b/test/files/neg/t5152.check
index 80e0141b64..fd510dbae0 100644
--- a/test/files/neg/t5152.check
+++ b/test/files/neg/t5152.check
@@ -1,9 +1,11 @@
t5152.scala:7: error: kinds of the type arguments (Test.B) do not conform to the expected kinds of the type parameters (type E) in class A.
-Test.B's type parameters do not match type E's expected parameters: type E has one type parameter, but type _ has none
+Test.B's type parameters do not match type E's expected parameters:
+type E has one type parameter, but type _ has none
class B[E[_]] extends A[B] { } // B is depth 2 but A requires 1
^
t5152.scala:11: error: kinds of the type arguments (Test.B1) do not conform to the expected kinds of the type parameters (type E) in class A1.
-Test.B1's type parameters do not match type E's expected parameters: type _ has no type parameters, but type G has one
+Test.B1's type parameters do not match type E's expected parameters:
+type _ has no type parameters, but type G has one
class B1[E[_]] extends A1[B1] // B1 is depth 2 but A1 requires 3
^
two errors found
diff --git a/test/files/neg/t5189b.check b/test/files/neg/t5189b.check
index 7f78cbb438..46996e96d0 100644
--- a/test/files/neg/t5189b.check
+++ b/test/files/neg/t5189b.check
@@ -1,8 +1,11 @@
-t5189b.scala:25: error: type mismatch;
- found : TestNeg.Wrapped[?T2] where type ?T2 <: T
+t5189b.scala:38: error: type mismatch;
+ found : TestNeg.Wrapped[?T7] where type ?T7 <: T (this is a GADT skolem)
required: TestNeg.Wrapped[T]
-Note: ?T2 <: T, but class Wrapped is invariant in type W.
+Note: ?T7 <: T, but class Wrapped is invariant in type W.
You may wish to define W as +W instead. (SLS 4.5)
case Wrapper/*[_ <: T ]*/(wrapped) => wrapped // : Wrapped[_ <: T], which is a subtype of Wrapped[T] if and only if Wrapped is covariant in its type parameter
^
-one error found
+t5189b.scala:51: error: value foo is not a member of type parameter T
+ case Some(xs) => xs.foo // the error message should not refer to a skolem (testing extrapolation)
+ ^
+two errors found
diff --git a/test/files/neg/t5189b.scala b/test/files/neg/t5189b.scala
index 1750f14084..7c1871dc97 100644
--- a/test/files/neg/t5189b.scala
+++ b/test/files/neg/t5189b.scala
@@ -5,8 +5,21 @@ class TestPos {
def unwrap[T](x: AbsWrapperCov[T]): T = x match {
case Wrapper/*[_ <: T ]*/(x) => x // _ <: T, which is a subtype of T
}
+
+ def unwrapOption[T](x: Option[T]): T = x match {
+ case Some(xs) => xs
+ }
+
+
+ case class Down[+T](x: T)
+ case class Up[-T](f: T => Unit)
+
+ def f1[T](x1: Down[T])(x2: Up[T]) = ((x1, x2)) match {
+ case (Down(x), Up(f)) => f(x)
+ }
}
+
object TestNeg extends App {
class AbsWrapperCov[+A]
case class Wrapper[B](x: Wrapped[B]) extends AbsWrapperCov[B]
@@ -33,6 +46,11 @@ object TestNeg extends App {
// val w = new Wrapped(new A)
// unwrap[Any](Wrapper(w)).cell = new B
// w.cell.imNotAB
+
+ def unwrapOption[T](x: Option[T]): T = x match {
+ case Some(xs) => xs.foo // the error message should not refer to a skolem (testing extrapolation)
+ }
+
}
// class TestPos1 {
diff --git a/test/files/neg/t5580a.check b/test/files/neg/t5580a.check
new file mode 100644
index 0000000000..50a31857d5
--- /dev/null
+++ b/test/files/neg/t5580a.check
@@ -0,0 +1,6 @@
+t5580a.scala:9: error: polymorphic expression cannot be instantiated to expected type;
+ found : [A]scala.collection.mutable.Set[A]
+ required: scala.collection.mutable.Map[bar,scala.collection.mutable.Set[bar]]
+ if (map.get(tmp).isEmpty) map.put(tmp,collection.mutable.Set())
+ ^
+one error found
diff --git a/test/files/neg/t5580a.scala b/test/files/neg/t5580a.scala
new file mode 100644
index 0000000000..742f0e85ea
--- /dev/null
+++ b/test/files/neg/t5580a.scala
@@ -0,0 +1,11 @@
+import scala.collection.mutable.WeakHashMap
+
+class bar{ }
+class foo{
+ val map = WeakHashMap[AnyRef, collection.mutable.Map[bar, collection.mutable.Set[bar]]]()
+
+ def test={
+ val tmp:bar=null
+ if (map.get(tmp).isEmpty) map.put(tmp,collection.mutable.Set())
+ }
+}
diff --git a/test/files/neg/t5589neg.check b/test/files/neg/t5589neg.check
new file mode 100644
index 0000000000..b3ff16d7e4
--- /dev/null
+++ b/test/files/neg/t5589neg.check
@@ -0,0 +1,37 @@
+t5589neg.scala:2: warning: `withFilter' method does not yet exist on Either.RightProjection[Int,String], using `filter' method instead
+ def f5(x: Either[Int, String]) = for ((y1, y2: String) <- x.right) yield ((y1, y2))
+ ^
+t5589neg.scala:2: error: constructor cannot be instantiated to expected type;
+ found : (T1, T2)
+ required: String
+ def f5(x: Either[Int, String]) = for ((y1, y2: String) <- x.right) yield ((y1, y2))
+ ^
+t5589neg.scala:3: warning: `withFilter' method does not yet exist on Either.RightProjection[Int,String], using `filter' method instead
+ def f6(x: Either[Int, String]) = for ((y1, y2: Any) <- x.right) yield ((y1, y2))
+ ^
+t5589neg.scala:3: error: constructor cannot be instantiated to expected type;
+ found : (T1, T2)
+ required: String
+ def f6(x: Either[Int, String]) = for ((y1, y2: Any) <- x.right) yield ((y1, y2))
+ ^
+t5589neg.scala:4: error: constructor cannot be instantiated to expected type;
+ found : (T1,)
+ required: (String, Int)
+ def f7(x: Either[Int, (String, Int)]) = for (y1 @ Tuple1(y2) <- x.right) yield ((y1, y2))
+ ^
+t5589neg.scala:4: error: not found: value y2
+ def f7(x: Either[Int, (String, Int)]) = for (y1 @ Tuple1(y2) <- x.right) yield ((y1, y2))
+ ^
+t5589neg.scala:5: error: constructor cannot be instantiated to expected type;
+ found : (T1, T2, T3)
+ required: (String, Int)
+ def f8(x: Either[Int, (String, Int)]) = for ((y1, y2, y3) <- x.right) yield ((y1, y2))
+ ^
+t5589neg.scala:5: error: not found: value y1
+ def f8(x: Either[Int, (String, Int)]) = for ((y1, y2, y3) <- x.right) yield ((y1, y2))
+ ^
+t5589neg.scala:5: error: not found: value y2
+ def f8(x: Either[Int, (String, Int)]) = for ((y1, y2, y3) <- x.right) yield ((y1, y2))
+ ^
+two warnings found
+7 errors found
diff --git a/test/files/neg/t5589neg.scala b/test/files/neg/t5589neg.scala
new file mode 100644
index 0000000000..31ff2c3693
--- /dev/null
+++ b/test/files/neg/t5589neg.scala
@@ -0,0 +1,6 @@
+class A {
+ def f5(x: Either[Int, String]) = for ((y1, y2: String) <- x.right) yield ((y1, y2))
+ def f6(x: Either[Int, String]) = for ((y1, y2: Any) <- x.right) yield ((y1, y2))
+ def f7(x: Either[Int, (String, Int)]) = for (y1 @ Tuple1(y2) <- x.right) yield ((y1, y2))
+ def f8(x: Either[Int, (String, Int)]) = for ((y1, y2, y3) <- x.right) yield ((y1, y2))
+}
diff --git a/test/files/neg/t5589neg2.check b/test/files/neg/t5589neg2.check
new file mode 100644
index 0000000000..6af4955a83
--- /dev/null
+++ b/test/files/neg/t5589neg2.check
@@ -0,0 +1,9 @@
+t5589neg2.scala:7: error: constructor cannot be instantiated to expected type;
+ found : (T1, T2)
+ required: String
+ for (((((a, (b, (c, (d1, d2)))), es), fs), gs) <- x) yield (d :: es).mkString(", ") // not ok
+ ^
+t5589neg2.scala:7: error: not found: value d
+ for (((((a, (b, (c, (d1, d2)))), es), fs), gs) <- x) yield (d :: es).mkString(", ") // not ok
+ ^
+two errors found
diff --git a/test/files/neg/t5589neg2.scala b/test/files/neg/t5589neg2.scala
new file mode 100644
index 0000000000..b7c7ab7218
--- /dev/null
+++ b/test/files/neg/t5589neg2.scala
@@ -0,0 +1,13 @@
+class A {
+ def f1(x: List[((((Int, (Double, (Float, String))), List[String]), List[Int]), List[Float])]) = {
+ for (((((a, (b, (c, d))), es), fs), gs) <- x) yield (d :: es).mkString(", ") // ok
+ }
+
+ def f2(x: List[((((Int, (Double, (Float, String))), List[String]), List[Int]), List[Float])]) = {
+ for (((((a, (b, (c, (d1, d2)))), es), fs), gs) <- x) yield (d :: es).mkString(", ") // not ok
+ }
+
+ def f3(x: List[((((Int, (Double, (Float, String))), List[String]), List[Int]), List[Float])]) = {
+ for (((((a, (b, _)), es), fs), gs) <- x) yield (es ::: fs).mkString(", ") // ok
+ }
+} \ No newline at end of file
diff --git a/test/files/neg/t708.check b/test/files/neg/t708.check
index 15a9c9ed93..4983aab613 100644
--- a/test/files/neg/t708.check
+++ b/test/files/neg/t708.check
@@ -1,4 +1,4 @@
-t708.scala:8: error: overriding type S in trait X with bounds >: Nothing <: A.this.T;
+t708.scala:8: error: overriding type S in trait X with bounds <: A.this.T;
type S has incompatible type
override private[A] type S = Any;
^
diff --git a/test/files/neg/t742.check b/test/files/neg/t742.check
index f587948ef1..d355715442 100644
--- a/test/files/neg/t742.check
+++ b/test/files/neg/t742.check
@@ -1,5 +1,6 @@
t742.scala:5: error: kinds of the type arguments (Crash._1,Crash._2,Any) do not conform to the expected kinds of the type parameters (type m,type n,type z).
-Crash._1's type parameters do not match type m's expected parameters: type s1 has one type parameter, but type n has two
+Crash._1's type parameters do not match type m's expected parameters:
+type s1 has one type parameter, but type n has two
type p = mul[_1, _2, Any] // mul[_1, _1, Any] needs -Yrecursion
^
one error found
diff --git a/test/files/neg/tcpoly_override.check b/test/files/neg/tcpoly_override.check
index 95529329e8..dbc3ff9461 100644
--- a/test/files/neg/tcpoly_override.check
+++ b/test/files/neg/tcpoly_override.check
@@ -1,5 +1,6 @@
tcpoly_override.scala:9: error: The kind of type T does not conform to the expected kind of type T[_] in trait A.
-C.this.T's type parameters do not match type T's expected parameters: type T (in class C) has no type parameters, but type T (in trait A) has one
+C.this.T's type parameters do not match type T's expected parameters:
+type T (in class C) has no type parameters, but type T (in trait A) has one
type T = B // This compiles well (@M: ... but it shouldn't)
^
one error found
diff --git a/test/files/neg/tcpoly_typealias.check b/test/files/neg/tcpoly_typealias.check
index 670add2c04..4beac0e440 100644
--- a/test/files/neg/tcpoly_typealias.check
+++ b/test/files/neg/tcpoly_typealias.check
@@ -1,13 +1,16 @@
tcpoly_typealias.scala:37: error: The kind of type m does not conform to the expected kind of type m[+x] in trait A.
-BInv.this.m's type parameters do not match type m's expected parameters: type x (in trait BInv) is invariant, but type x (in trait A) is declared covariant
+BInv.this.m's type parameters do not match type m's expected parameters:
+type x (in trait BInv) is invariant, but type x (in trait A) is declared covariant
type m[x] = FooCov[x] // error: invariant x in alias def
^
tcpoly_typealias.scala:41: error: The kind of type m does not conform to the expected kind of type m[+x] in trait A.
-BCon.this.m's type parameters do not match type m's expected parameters: type x (in trait BCon) is contravariant, but type x (in trait A) is declared covariant
+BCon.this.m's type parameters do not match type m's expected parameters:
+type x (in trait BCon) is contravariant, but type x (in trait A) is declared covariant
type m[-x] = FooCon[x] // error: contravariant x
^
tcpoly_typealias.scala:45: error: The kind of type m does not conform to the expected kind of type m[+x] in trait A.
-BBound.this.m's type parameters do not match type m's expected parameters: type x (in trait BBound)'s bounds >: Nothing <: String are stricter than type x (in trait A)'s declared bounds >: Nothing <: Any
+BBound.this.m's type parameters do not match type m's expected parameters:
+type x (in trait BBound)'s bounds <: String are stricter than type x (in trait A)'s declared bounds >: Nothing <: Any
type m[+x <: String] = FooBound[x] // error: x with stricter bound
^
three errors found
diff --git a/test/files/neg/tcpoly_variance_enforce.check b/test/files/neg/tcpoly_variance_enforce.check
index 44b5b2c15c..3299cc3435 100644
--- a/test/files/neg/tcpoly_variance_enforce.check
+++ b/test/files/neg/tcpoly_variance_enforce.check
@@ -1,45 +1,57 @@
tcpoly_variance_enforce.scala:15: error: kinds of the type arguments (FooInvar) do not conform to the expected kinds of the type parameters (type m) in trait coll.
-FooInvar's type parameters do not match type m's expected parameters: type x (in class FooInvar) is invariant, but type x is declared covariant
+FooInvar's type parameters do not match type m's expected parameters:
+type x (in class FooInvar) is invariant, but type x is declared covariant
object fcollinv extends coll[FooInvar] // error
^
tcpoly_variance_enforce.scala:16: error: kinds of the type arguments (FooContra) do not conform to the expected kinds of the type parameters (type m) in trait coll.
-FooContra's type parameters do not match type m's expected parameters: type x (in class FooContra) is contravariant, but type x is declared covariant
+FooContra's type parameters do not match type m's expected parameters:
+type x (in class FooContra) is contravariant, but type x is declared covariant
object fcollcon extends coll[FooContra] // error
^
tcpoly_variance_enforce.scala:17: error: kinds of the type arguments (FooString) do not conform to the expected kinds of the type parameters (type m) in trait coll.
-FooString's type parameters do not match type m's expected parameters: type x (in class FooString)'s bounds >: Nothing <: String are stricter than type x's declared bounds >: Nothing <: Any
+FooString's type parameters do not match type m's expected parameters:
+type x (in class FooString)'s bounds <: String are stricter than type x's declared bounds >: Nothing <: Any
object fcollwb extends coll[FooString] // error
^
tcpoly_variance_enforce.scala:19: error: kinds of the type arguments (FooCov) do not conform to the expected kinds of the type parameters (type m) in trait coll2.
-FooCov's type parameters do not match type m's expected parameters: type x (in class FooCov) is covariant, but type x is declared contravariant
+FooCov's type parameters do not match type m's expected parameters:
+type x (in class FooCov) is covariant, but type x is declared contravariant
object fcoll2ok extends coll2[FooCov] // error
^
tcpoly_variance_enforce.scala:20: error: kinds of the type arguments (FooInvar) do not conform to the expected kinds of the type parameters (type m) in trait coll2.
-FooInvar's type parameters do not match type m's expected parameters: type x (in class FooInvar) is invariant, but type x is declared contravariant
+FooInvar's type parameters do not match type m's expected parameters:
+type x (in class FooInvar) is invariant, but type x is declared contravariant
object fcoll2inv extends coll2[FooInvar] // error
^
tcpoly_variance_enforce.scala:22: error: kinds of the type arguments (FooString) do not conform to the expected kinds of the type parameters (type m) in trait coll2.
-FooString's type parameters do not match type m's expected parameters: type x (in class FooString) is covariant, but type x is declared contravarianttype x (in class FooString)'s bounds >: Nothing <: String are stricter than type x's declared bounds >: Nothing <: Any
+FooString's type parameters do not match type m's expected parameters:
+type x (in class FooString) is covariant, but type x is declared contravariant
+type x (in class FooString)'s bounds <: String are stricter than type x's declared bounds >: Nothing <: Any
object fcoll2wb extends coll2[FooString] // error
^
tcpoly_variance_enforce.scala:27: error: kinds of the type arguments (FooString) do not conform to the expected kinds of the type parameters (type m) in trait coll3.
-FooString's type parameters do not match type m's expected parameters: type x (in class FooString)'s bounds >: Nothing <: String are stricter than type x's declared bounds >: Nothing <: Any
+FooString's type parameters do not match type m's expected parameters:
+type x (in class FooString)'s bounds <: String are stricter than type x's declared bounds >: Nothing <: Any
object fcoll3wb extends coll3[FooString] // error
^
tcpoly_variance_enforce.scala:30: error: kinds of the type arguments (FooString,Int) do not conform to the expected kinds of the type parameters (type m,type y) in trait coll4.
-FooString's type parameters do not match type m's expected parameters: type x (in class FooString)'s bounds >: Nothing <: String are stricter than type x's declared bounds >: Nothing <: y
+FooString's type parameters do not match type m's expected parameters:
+type x (in class FooString)'s bounds <: String are stricter than type x's declared bounds <: y
object fcoll4_1 extends coll4[FooString, Int] // error
^
tcpoly_variance_enforce.scala:31: error: kinds of the type arguments (FooString,Any) do not conform to the expected kinds of the type parameters (type m,type y) in trait coll4.
-FooString's type parameters do not match type m's expected parameters: type x (in class FooString)'s bounds >: Nothing <: String are stricter than type x's declared bounds >: Nothing <: y
+FooString's type parameters do not match type m's expected parameters:
+type x (in class FooString)'s bounds <: String are stricter than type x's declared bounds <: y
object fcoll4_2 extends coll4[FooString, Any] // error
^
tcpoly_variance_enforce.scala:37: error: kinds of the type arguments (FooInvar) do not conform to the expected kinds of the type parameters (type m) in trait coll.
-FooInvar's type parameters do not match type m's expected parameters: type x (in class FooInvar) is invariant, but type x is declared covariant
+FooInvar's type parameters do not match type m's expected parameters:
+type x (in class FooInvar) is invariant, but type x is declared covariant
def x: coll[FooInvar] = sys.error("foo") // error
^
tcpoly_variance_enforce.scala:38: error: kinds of the type arguments (FooContra) do not conform to the expected kinds of the type parameters (type m) in trait coll.
-FooContra's type parameters do not match type m's expected parameters: type x (in class FooContra) is contravariant, but type x is declared covariant
+FooContra's type parameters do not match type m's expected parameters:
+type x (in class FooContra) is contravariant, but type x is declared covariant
def y: coll[FooContra] = sys.error("foo") // error
^
11 errors found
diff --git a/test/files/neg/valueclasses.check b/test/files/neg/valueclasses.check
index 756a0474fa..4f042faded 100644
--- a/test/files/neg/valueclasses.check
+++ b/test/files/neg/valueclasses.check
@@ -40,7 +40,4 @@ class V12[@specialized T, U](val x: (T, U)) extends AnyVal // fail
valueclasses.scala:31: error: value class needs to have exactly one public val parameter
class V13(x: Int) extends AnyVal // fail
^
-valueclasses.scala:45: error: value class must have public primary constructor
-final class TOD private (val secondsOfDay: Int) extends AnyVal { // should fail with private constructor
- ^
-15 errors found
+14 errors found
diff --git a/test/files/neg/valueclasses.scala b/test/files/neg/valueclasses.scala
index e405d95489..7cac94ab11 100644
--- a/test/files/neg/valueclasses.scala
+++ b/test/files/neg/valueclasses.scala
@@ -29,26 +29,3 @@ class V11[T](val x: List[T]) extends AnyVal // ok
class V12[@specialized T, U](val x: (T, U)) extends AnyVal // fail
class V13(x: Int) extends AnyVal // fail
-
-
-package time {
-
-object TOD {
- final val SecondsPerDay = 86400
-
- def apply(seconds: Int) = {
- val n = seconds % SecondsPerDay
- new TOD(if (n >= 0) n else n + SecondsPerDay)
- }
-}
-
-final class TOD private (val secondsOfDay: Int) extends AnyVal { // should fail with private constructor
- def hours = secondsOfDay / 3600
- def minutes = (secondsOfDay / 60) % 60
- def seconds = secondsOfDay % 60
-
- override def toString = "%02d:%02d:%02d".format(hours, minutes, seconds)
-}
-}
-
-
diff --git a/test/files/pos/irrefutable.scala b/test/files/pos/irrefutable.scala
new file mode 100644
index 0000000000..0a792b644a
--- /dev/null
+++ b/test/files/pos/irrefutable.scala
@@ -0,0 +1,22 @@
+// The test which this should perform but does not
+// is that f1 is recognized as irrefutable and f2 is not
+// This can be recognized via the generated classes:
+//
+// A$$anonfun$f1$1.class
+// A$$anonfun$f2$1.class
+// A$$anonfun$f2$2.class
+//
+// The extra one in $f2$ is the filter.
+//
+// !!! Marking with exclamation points so maybe someday
+// this test will be finished.
+class A {
+ case class Foo[T](x: T)
+
+ def f1(xs: List[Foo[Int]]) = {
+ for (Foo(x: Int) <- xs) yield x
+ }
+ def f2(xs: List[Foo[Any]]) = {
+ for (Foo(x: Int) <- xs) yield x
+ }
+}
diff --git a/test/files/pos/lookupswitch.scala b/test/files/pos/lookupswitch.scala
new file mode 100644
index 0000000000..33594c0ea6
--- /dev/null
+++ b/test/files/pos/lookupswitch.scala
@@ -0,0 +1,37 @@
+// There's not a real test here, but on compilation the
+// switch should have the cases arranged in order from 1-30.
+class A {
+ def f(x: Int) = x match {
+ case 6 => "6"
+ case 18 => "18"
+ case 7 => "7"
+ case 2 => "2"
+ case 13 => "13"
+ case 11 => "11"
+ case 26 => "26"
+ case 27 => "27"
+ case 29 => "29"
+ case 25 => "25"
+ case 9 => "9"
+ case 17 => "17"
+ case 16 => "16"
+ case 1 => "1"
+ case 30 => "30"
+ case 15 => "15"
+ case 22 => "22"
+ case 19 => "19"
+ case 23 => "23"
+ case 8 => "8"
+ case 28 => "28"
+ case 5 => "5"
+ case 12 => "12"
+ case 10 => "10"
+ case 21 => "21"
+ case 24 => "24"
+ case 4 => "4"
+ case 14 => "14"
+ case 3 => "3"
+ case 20 => "20"
+ }
+}
+ \ No newline at end of file
diff --git a/test/files/pos/t1336.scala b/test/files/pos/t1336.scala
new file mode 100644
index 0000000000..63967985c7
--- /dev/null
+++ b/test/files/pos/t1336.scala
@@ -0,0 +1,10 @@
+object Foo {
+ def foreach( f : ((Int,Int)) => Unit ) {
+ println("foreach")
+ f(1,2)
+ }
+
+ for( (a,b) <- this ) {
+ println((a,b))
+ }
+}
diff --git a/test/files/pos/t5580b.scala b/test/files/pos/t5580b.scala
new file mode 100644
index 0000000000..d5a4a0a2b2
--- /dev/null
+++ b/test/files/pos/t5580b.scala
@@ -0,0 +1,19 @@
+/** It's a pos test because it does indeed compile,
+ * not so much because I'm glad it does. Testing
+ * that error messages created and discarded during
+ * implicit search don't blow it up.
+ */
+
+import scala.collection.mutable.WeakHashMap
+import scala.collection.JavaConversions._
+
+class bar { }
+
+class foo {
+ val map = WeakHashMap[AnyRef, collection.mutable.Map[bar, collection.mutable.Set[bar]]]()
+
+ def test={
+ val tmp:bar=null
+ if (map.get(tmp).isEmpty) map.put(tmp,collection.mutable.Set())
+ }
+}
diff --git a/test/files/pos/t5589.scala b/test/files/pos/t5589.scala
new file mode 100644
index 0000000000..69cbb20391
--- /dev/null
+++ b/test/files/pos/t5589.scala
@@ -0,0 +1,22 @@
+class A {
+ // First three compile.
+ def f1(x: Either[Int, String]) = x.right map (y => y)
+ def f2(x: Either[Int, String]) = for (y <- x.right) yield y
+ def f3(x: Either[Int, (String, Int)]) = x.right map { case (y1, y2) => (y1, y2) }
+ // Last one fails.
+ def f4(x: Either[Int, (String, Int)]) = for ((y1, y2) <- x.right) yield ((y1, y2))
+/**
+./a.scala:5: error: constructor cannot be instantiated to expected type;
+ found : (T1, T2)
+ required: Either[Nothing,(String, Int)]
+ def f4(x: Either[Int, (String, Int)]) = for ((y1, y2) <- x.right) yield ((y1, y2))
+ ^
+./a.scala:5: error: not found: value y1
+ def f4(x: Either[Int, (String, Int)]) = for ((y1, y2) <- x.right) yield ((y1, y2))
+ ^
+./a.scala:5: error: not found: value y2
+ def f4(x: Either[Int, (String, Int)]) = for ((y1, y2) <- x.right) yield ((y1, y2))
+ ^
+three errors found
+**/
+}
diff --git a/test/files/pos/t5604/ReplConfig.scala b/test/files/pos/t5604/ReplConfig.scala
new file mode 100644
index 0000000000..8c589eba60
--- /dev/null
+++ b/test/files/pos/t5604/ReplConfig.scala
@@ -0,0 +1,53 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2011 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+import util.Exceptional.unwrap
+import util.stackTraceString
+
+trait ReplConfig {
+ lazy val replProps = new ReplProps
+
+ class TapMaker[T](x: T) {
+ def tapInfo(msg: => String): T = tap(x => replinfo(parens(x)))
+ def tapDebug(msg: => String): T = tap(x => repldbg(parens(x)))
+ def tapTrace(msg: => String): T = tap(x => repltrace(parens(x)))
+ def tap[U](f: T => U): T = {
+ f(x)
+ x
+ }
+ }
+
+ private def parens(x: Any) = "(" + x + ")"
+ private def echo(msg: => String) =
+ try Console println msg
+ catch { case x: AssertionError => Console.println("Assertion error printing debugging output: " + x) }
+
+ private[nsc] def repldbg(msg: => String) = if (isReplDebug) echo(msg)
+ private[nsc] def repltrace(msg: => String) = if (isReplTrace) echo(msg)
+ private[nsc] def replinfo(msg: => String) = if (isReplInfo) echo(msg)
+
+ private[nsc] def logAndDiscard[T](label: String, alt: => T): PartialFunction[Throwable, T] = {
+ case t =>
+ repldbg(label + ": " + unwrap(t))
+ repltrace(stackTraceString(unwrap(t)))
+ alt
+ }
+ private[nsc] def substituteAndLog[T](alt: => T)(body: => T): T =
+ substituteAndLog("" + alt, alt)(body)
+ private[nsc] def substituteAndLog[T](label: String, alt: => T)(body: => T): T = {
+ try body
+ catch logAndDiscard(label, alt)
+ }
+ private[nsc] def squashAndLog(label: String)(body: => Unit): Unit =
+ substituteAndLog(label, ())(body)
+
+ def isReplTrace: Boolean = replProps.trace
+ def isReplDebug: Boolean = replProps.debug || isReplTrace
+ def isReplInfo: Boolean = replProps.info || isReplDebug
+ def isReplPower: Boolean = replProps.power
+}
diff --git a/test/files/pos/t5604/ReplReporter.scala b/test/files/pos/t5604/ReplReporter.scala
new file mode 100644
index 0000000000..130af990ad
--- /dev/null
+++ b/test/files/pos/t5604/ReplReporter.scala
@@ -0,0 +1,30 @@
+/* NSC -- new Scala compiler
+ * Copyright 2002-2011 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+import reporters._
+import IMain._
+
+class ReplReporter(intp: IMain) extends ConsoleReporter(intp.settings, Console.in, new ReplStrippingWriter(intp)) {
+ override def printMessage(msg: String) {
+ // Avoiding deadlock if the compiler starts logging before
+ // the lazy val is complete.
+ if (intp.isInitializeComplete) {
+ if (intp.totalSilence) {
+ if (isReplTrace)
+ super.printMessage("[silent] " + msg)
+ }
+ else super.printMessage(msg)
+ }
+ else Console.println("[init] " + msg)
+ }
+
+ override def displayPrompt() {
+ if (intp.totalSilence) ()
+ else super.displayPrompt()
+ }
+}
diff --git a/test/files/pos/virtpatmat_anonfun_for.flags b/test/files/pos/virtpatmat_anonfun_for.flags
new file mode 100644
index 0000000000..23e3dc7d26
--- /dev/null
+++ b/test/files/pos/virtpatmat_anonfun_for.flags
@@ -0,0 +1 @@
+-Yvirtpatmat \ No newline at end of file
diff --git a/test/files/pos/virtpatmat_anonfun_for.scala b/test/files/pos/virtpatmat_anonfun_for.scala
new file mode 100644
index 0000000000..8623cd97ba
--- /dev/null
+++ b/test/files/pos/virtpatmat_anonfun_for.scala
@@ -0,0 +1,8 @@
+trait Foo {
+ def bla = {
+ val tvs = "tvs"
+ Nil.foreach(x => x match {
+ case _ => println(tvs)
+ })
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/virtpatmat_exist4.scala b/test/files/pos/virtpatmat_exist4.scala
new file mode 100644
index 0000000000..a04d0e3229
--- /dev/null
+++ b/test/files/pos/virtpatmat_exist4.scala
@@ -0,0 +1,35 @@
+trait Global {
+ trait Tree
+ trait Symbol { def foo: Boolean }
+}
+
+trait IMain { self: MemberHandlers =>
+ val global: Global
+ def handlers: List[MemberHandler]
+}
+
+trait MemberHandlers {
+ val intp: IMain
+ import intp.global._
+ sealed abstract class MemberHandler(val member: Tree) {
+ def importedSymbols: List[Symbol]
+ }
+}
+
+object Test {
+ var intp: IMain with MemberHandlers = null
+
+ val handlers = intp.handlers
+ handlers.filterNot(_.importedSymbols.isEmpty).zipWithIndex foreach {
+ case (handler, idx) =>
+ val (types, terms) = handler.importedSymbols partition (_.foo)
+ }
+}
+
+object Test2 {
+ type JClass = java.lang.Class[_]
+
+ def tvarString(bounds: List[AnyRef]) = {
+ bounds collect { case x: JClass => x }
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/virtpatmat_instof_valuetype.flags b/test/files/pos/virtpatmat_instof_valuetype.flags
new file mode 100644
index 0000000000..9769db9257
--- /dev/null
+++ b/test/files/pos/virtpatmat_instof_valuetype.flags
@@ -0,0 +1 @@
+ -Yvirtpatmat -Xexperimental
diff --git a/test/files/pos/virtpatmat_instof_valuetype.scala b/test/files/pos/virtpatmat_instof_valuetype.scala
new file mode 100644
index 0000000000..1dda9bf57c
--- /dev/null
+++ b/test/files/pos/virtpatmat_instof_valuetype.scala
@@ -0,0 +1,8 @@
+case class Data(private val t: Option[String] = None, only: Boolean = false) {
+ def add(other: Data) = {
+ other match {
+ case Data(None, b) => ()
+ case Data(Some(_), b) => ()
+ }
+ }
+} \ No newline at end of file
diff --git a/test/files/presentation/shutdown-deadlock/ShutdownDeadlockTest.scala b/test/files/presentation/shutdown-deadlock/ShutdownDeadlockTest.scala
index 53af84541a..cef9d2a5ed 100644
--- a/test/files/presentation/shutdown-deadlock/ShutdownDeadlockTest.scala
+++ b/test/files/presentation/shutdown-deadlock/ShutdownDeadlockTest.scala
@@ -20,7 +20,7 @@ object Test extends InteractiveTest {
}
for ((j, i) <- jobs1.zipWithIndex) {
- j.get(5000) match {
+ j.get(40000) match {
case None =>
println(i + ": TIMEOUT")
exit(1) // no need to delay the test any longer
diff --git a/test/files/run/delay-bad.check b/test/files/run/delay-bad.check
new file mode 100644
index 0000000000..9d9c828a03
--- /dev/null
+++ b/test/files/run/delay-bad.check
@@ -0,0 +1,47 @@
+
+
+// new C { }
+-A -B -C
+
+// new C { 5 }
+-A -B -C
+ A+ B+ C+
+
+// new D()
+-A -B -C -D
+ A+ B+ C+ D+
+
+// new D() { }
+-A -B -C -D
+ A+ B+ C+ D+
+
+// new D() { val x = 5 }
+-A -B -C -D
+ A+ B+ C+ D+
+ A+ B+ C+ D+
+
+// new { val x = 5 } with D()
+-A -B -C -D
+ A+ B+ C+ D+
+
+// new E() { val x = 5 }
+-A -B -C -D
+ A+ B+ C+ D+ E+ -E
+ A+ B+ C+ D+ E+
+ A+ B+ C+ D+ E+
+
+// new { val x = 5 } with E()
+-A -B -C -D
+ A+ B+ C+ D+ E+ -E
+ A+ B+ C+ D+ E+
+
+// new { val x = 5 } with E() { }
+-A -B -C -D
+ A+ B+ C+ D+ E+ -E
+ A+ B+ C+ D+ E+
+
+// new { val x = 5 } with E() { 5 }
+-A -B -C -D
+ A+ B+ C+ D+ E+ -E
+ A+ B+ C+ D+ E+
+ A+ B+ C+ D+ E+
diff --git a/test/files/run/delay-bad.scala b/test/files/run/delay-bad.scala
new file mode 100644
index 0000000000..43acc1ea3d
--- /dev/null
+++ b/test/files/run/delay-bad.scala
@@ -0,0 +1,77 @@
+trait A extends DelayedInit
+{
+ print("-A")
+
+ def delayedInit(body: => Unit) = {
+ body
+ postConstructionCode
+ }
+ def postConstructionCode: Unit = {
+ print("\n A+")
+ }
+}
+trait B extends A {
+ print(" -B")
+ override def postConstructionCode: Unit = {
+ super.postConstructionCode
+ print(" B+")
+ }
+}
+
+trait C extends B {
+ print(" -C")
+ override def postConstructionCode: Unit = {
+ super.postConstructionCode
+ print(" C+")
+ }
+}
+
+class D() extends C {
+ print(" -D")
+ override def postConstructionCode: Unit = {
+ super.postConstructionCode
+ print(" D+")
+ }
+}
+class E() extends D() {
+ print(" -E")
+ override def postConstructionCode: Unit = {
+ super.postConstructionCode
+ print(" E+")
+ }
+}
+
+object Test {
+ def p(msg: String) = println("\n\n// " + msg)
+
+ def main(args: Array[String]) {
+ val f: A => Unit = _ => ()
+
+ p("new C { }")
+ f(new C { })
+ p("new C { 5 }")
+ f(new C { 5 })
+
+ p("new D()")
+ f(new D())
+ p("new D() { }")
+ f(new D() { })
+
+ p("new D() { val x = 5 }")
+ f(new D() { val x = 5 })
+ p("new { val x = 5 } with D()")
+ f(new { val x = 5 } with D())
+
+ p("new E() { val x = 5 }")
+ f(new E() { val x = 5 })
+ p("new { val x = 5 } with E()")
+ f(new { val x = 5 } with E())
+
+ p("new { val x = 5 } with E() { }")
+ f(new { val x = 5 } with E() { })
+ p("new { val x = 5 } with E() { 5 }")
+ f(new { val x = 5 } with E() { 5 })
+
+ println("")
+ }
+}
diff --git a/test/files/run/delay-good.check b/test/files/run/delay-good.check
new file mode 100644
index 0000000000..8eb04c7cff
--- /dev/null
+++ b/test/files/run/delay-good.check
@@ -0,0 +1,41 @@
+
+
+// new C { }
+-A -B -C
+ A+ B+ C+
+
+// new C { 5 }
+-A -B -C
+ A+ B+ C+
+
+// new D()
+-A -B -C -D
+ A+ B+ C+ D+
+
+// new D() { }
+-A -B -C -D
+ A+ B+ C+ D+
+
+// new D() { val x = 5 }
+-A -B -C -D
+ A+ B+ C+ D+
+
+// new { val x = 5 } with D()
+-A -B -C -D
+ A+ B+ C+ D+
+
+// new E() { val x = 5 }
+-A -B -C -D -E
+ A+ B+ C+ D+ E+
+
+// new { val x = 5 } with E()
+-A -B -C -D -E
+ A+ B+ C+ D+ E+
+
+// new { val x = 5 } with E() { }
+-A -B -C -D -E
+ A+ B+ C+ D+ E+
+
+// new { val x = 5 } with E() { 5 }
+-A -B -C -D -E
+ A+ B+ C+ D+ E+
diff --git a/test/files/run/delay-good.scala b/test/files/run/delay-good.scala
new file mode 100644
index 0000000000..2e4487b92c
--- /dev/null
+++ b/test/files/run/delay-good.scala
@@ -0,0 +1,77 @@
+trait A
+{
+ print("-A")
+
+ def delayedInit(body: => Unit) = {
+ body
+ postConstructionCode
+ }
+ def postConstructionCode: Unit = {
+ print("\n A+")
+ }
+}
+trait B extends A {
+ print(" -B")
+ override def postConstructionCode: Unit = {
+ super.postConstructionCode
+ print(" B+")
+ }
+}
+
+trait C extends B {
+ print(" -C")
+ override def postConstructionCode: Unit = {
+ super.postConstructionCode
+ print(" C+")
+ }
+}
+
+class D() extends C {
+ print(" -D")
+ override def postConstructionCode: Unit = {
+ super.postConstructionCode
+ print(" D+")
+ }
+}
+class E() extends D() {
+ print(" -E")
+ override def postConstructionCode: Unit = {
+ super.postConstructionCode
+ print(" E+")
+ }
+}
+
+object Test {
+ def p(msg: String) = println("\n\n// " + msg)
+
+ def main(args: Array[String]) {
+ val f: A => Unit = _.postConstructionCode
+
+ p("new C { }")
+ f(new C { })
+ p("new C { 5 }")
+ f(new C { 5 })
+
+ p("new D()")
+ f(new D())
+ p("new D() { }")
+ f(new D() { })
+
+ p("new D() { val x = 5 }")
+ f(new D() { val x = 5 })
+ p("new { val x = 5 } with D()")
+ f(new { val x = 5 } with D())
+
+ p("new E() { val x = 5 }")
+ f(new E() { val x = 5 })
+ p("new { val x = 5 } with E()")
+ f(new { val x = 5 } with E())
+
+ p("new { val x = 5 } with E() { }")
+ f(new { val x = 5 } with E() { })
+ p("new { val x = 5 } with E() { 5 }")
+ f(new { val x = 5 } with E() { 5 })
+
+ println("")
+ }
+}
diff --git a/test/files/run/existentials-in-compiler.check b/test/files/run/existentials-in-compiler.check
index c8040a4cb1..83e3cdf435 100644
--- a/test/files/run/existentials-in-compiler.check
+++ b/test/files/run/existentials-in-compiler.check
@@ -1,156 +1,156 @@
-abstract trait Bippy[A <: AnyRef,B] extends Object
+abstract trait Bippy[A <: AnyRef, B] extends Object
extest.Bippy[_ <: AnyRef, _]
-abstract trait BippyBud[A <: AnyRef,B,C <: List[A]] extends Object
+abstract trait BippyBud[A <: AnyRef, B, C <: List[A]] extends Object
extest.BippyBud[A,B,C] forSome { A <: AnyRef; B; C <: List[A] }
-abstract trait BippyLike[A <: AnyRef,B <: List[A],This <: extest.BippyLike[A,B,This] with extest.Bippy[A,B]] extends Object
+abstract trait BippyLike[A <: AnyRef, B <: List[A], This <: extest.BippyLike[A,B,This] with extest.Bippy[A,B]] extends Object
extest.BippyLike[A,B,This] forSome { A <: AnyRef; B <: List[A]; This <: extest.BippyLike[A,B,This] with extest.Bippy[A,B] }
-abstract trait Contra[-A >: AnyRef,-B] extends Object
+abstract trait Contra[-A >: AnyRef, -B] extends Object
extest.Contra[_ >: AnyRef, _]
-abstract trait ContraLike[-A >: AnyRef,-B >: List[A]] extends Object
+abstract trait ContraLike[-A >: AnyRef, -B >: List[A]] extends Object
extest.ContraLike[A,B] forSome { -A >: AnyRef; -B >: List[A] }
-abstract trait Cov01[+A <: AnyRef,+B] extends Object
+abstract trait Cov01[+A <: AnyRef, +B] extends Object
extest.Cov01[_ <: AnyRef, _]
-abstract trait Cov02[+A <: AnyRef,B] extends Object
+abstract trait Cov02[+A <: AnyRef, B] extends Object
extest.Cov02[_ <: AnyRef, _]
-abstract trait Cov03[+A <: AnyRef,-B] extends Object
+abstract trait Cov03[+A <: AnyRef, -B] extends Object
extest.Cov03[_ <: AnyRef, _]
-abstract trait Cov04[A <: AnyRef,+B] extends Object
+abstract trait Cov04[A <: AnyRef, +B] extends Object
extest.Cov04[_ <: AnyRef, _]
-abstract trait Cov05[A <: AnyRef,B] extends Object
+abstract trait Cov05[A <: AnyRef, B] extends Object
extest.Cov05[_ <: AnyRef, _]
-abstract trait Cov06[A <: AnyRef,-B] extends Object
+abstract trait Cov06[A <: AnyRef, -B] extends Object
extest.Cov06[_ <: AnyRef, _]
-abstract trait Cov07[-A <: AnyRef,+B] extends Object
+abstract trait Cov07[-A <: AnyRef, +B] extends Object
extest.Cov07[_ <: AnyRef, _]
-abstract trait Cov08[-A <: AnyRef,B] extends Object
+abstract trait Cov08[-A <: AnyRef, B] extends Object
extest.Cov08[_ <: AnyRef, _]
-abstract trait Cov09[-A <: AnyRef,-B] extends Object
+abstract trait Cov09[-A <: AnyRef, -B] extends Object
extest.Cov09[_ <: AnyRef, _]
-abstract trait Cov11[+A <: AnyRef,+B <: List[_]] extends Object
+abstract trait Cov11[+A <: AnyRef, +B <: List[_]] extends Object
extest.Cov11[_ <: AnyRef, _ <: List[_]]
-abstract trait Cov12[+A <: AnyRef,B <: List[_]] extends Object
+abstract trait Cov12[+A <: AnyRef, B <: List[_]] extends Object
extest.Cov12[_ <: AnyRef, _ <: List[_]]
-abstract trait Cov13[+A <: AnyRef,-B <: List[_]] extends Object
+abstract trait Cov13[+A <: AnyRef, -B <: List[_]] extends Object
extest.Cov13[_ <: AnyRef, _ <: List[_]]
-abstract trait Cov14[A <: AnyRef,+B <: List[_]] extends Object
+abstract trait Cov14[A <: AnyRef, +B <: List[_]] extends Object
extest.Cov14[_ <: AnyRef, _ <: List[_]]
-abstract trait Cov15[A <: AnyRef,B <: List[_]] extends Object
+abstract trait Cov15[A <: AnyRef, B <: List[_]] extends Object
extest.Cov15[_ <: AnyRef, _ <: List[_]]
-abstract trait Cov16[A <: AnyRef,-B <: List[_]] extends Object
+abstract trait Cov16[A <: AnyRef, -B <: List[_]] extends Object
extest.Cov16[_ <: AnyRef, _ <: List[_]]
-abstract trait Cov17[-A <: AnyRef,+B <: List[_]] extends Object
+abstract trait Cov17[-A <: AnyRef, +B <: List[_]] extends Object
extest.Cov17[_ <: AnyRef, _ <: List[_]]
-abstract trait Cov18[-A <: AnyRef,B <: List[_]] extends Object
+abstract trait Cov18[-A <: AnyRef, B <: List[_]] extends Object
extest.Cov18[_ <: AnyRef, _ <: List[_]]
-abstract trait Cov19[-A <: AnyRef,-B <: List[_]] extends Object
+abstract trait Cov19[-A <: AnyRef, -B <: List[_]] extends Object
extest.Cov19[_ <: AnyRef, _ <: List[_]]
-abstract trait Cov21[+A,+B] extends Object
+abstract trait Cov21[+A, +B] extends Object
extest.Cov21[_, _]
-abstract trait Cov22[+A,B] extends Object
+abstract trait Cov22[+A, B] extends Object
extest.Cov22[_, _]
-abstract trait Cov23[+A,-B] extends Object
+abstract trait Cov23[+A, -B] extends Object
extest.Cov23[_, _]
-abstract trait Cov24[A,+B] extends Object
+abstract trait Cov24[A, +B] extends Object
extest.Cov24[_, _]
-abstract trait Cov25[A,B] extends Object
+abstract trait Cov25[A, B] extends Object
extest.Cov25[_, _]
-abstract trait Cov26[A,-B] extends Object
+abstract trait Cov26[A, -B] extends Object
extest.Cov26[_, _]
-abstract trait Cov27[-A,+B] extends Object
+abstract trait Cov27[-A, +B] extends Object
extest.Cov27[_, _]
-abstract trait Cov28[-A,B] extends Object
+abstract trait Cov28[-A, B] extends Object
extest.Cov28[_, _]
-abstract trait Cov29[-A,-B] extends Object
+abstract trait Cov29[-A, -B] extends Object
extest.Cov29[_, _]
-abstract trait Cov31[+A,+B,C <: (A, B)] extends Object
+abstract trait Cov31[+A, +B, C <: (A, B)] extends Object
extest.Cov31[A,B,C] forSome { +A; +B; C <: (A, B) }
-abstract trait Cov32[+A,B,C <: (A, B)] extends Object
+abstract trait Cov32[+A, B, C <: (A, B)] extends Object
extest.Cov32[A,B,C] forSome { +A; B; C <: (A, B) }
-abstract trait Cov33[+A,-B,C <: (A, _$10) forSome { type _$10 }] extends Object
+abstract trait Cov33[+A, -B, C <: (A, _$10) forSome { type _$10 }] extends Object
extest.Cov33[A,B,C] forSome { +A; -B; C <: (A, _$10) forSome { type _$10 } }
-abstract trait Cov34[A,+B,C <: (A, B)] extends Object
+abstract trait Cov34[A, +B, C <: (A, B)] extends Object
extest.Cov34[A,B,C] forSome { A; +B; C <: (A, B) }
-abstract trait Cov35[A,B,C <: (A, B)] extends Object
+abstract trait Cov35[A, B, C <: (A, B)] extends Object
extest.Cov35[A,B,C] forSome { A; B; C <: (A, B) }
-abstract trait Cov36[A,-B,C <: (A, _$11) forSome { type _$11 }] extends Object
+abstract trait Cov36[A, -B, C <: (A, _$11) forSome { type _$11 }] extends Object
extest.Cov36[A,B,C] forSome { A; -B; C <: (A, _$11) forSome { type _$11 } }
-abstract trait Cov37[-A,+B,C <: (_$12, B) forSome { type _$12 }] extends Object
+abstract trait Cov37[-A, +B, C <: (_$12, B) forSome { type _$12 }] extends Object
extest.Cov37[A,B,C] forSome { -A; +B; C <: (_$12, B) forSome { type _$12 } }
-abstract trait Cov38[-A,B,C <: (_$13, B) forSome { type _$13 }] extends Object
+abstract trait Cov38[-A, B, C <: (_$13, B) forSome { type _$13 }] extends Object
extest.Cov38[A,B,C] forSome { -A; B; C <: (_$13, B) forSome { type _$13 } }
-abstract trait Cov39[-A,-B,C <: Tuple2[_, _]] extends Object
+abstract trait Cov39[-A, -B, C <: Tuple2[_, _]] extends Object
extest.Cov39[_, _, _ <: Tuple2[_, _]]
-abstract trait Cov41[+A >: Null,+B] extends Object
+abstract trait Cov41[+A >: Null, +B] extends Object
extest.Cov41[_ >: Null, _]
-abstract trait Cov42[+A >: Null,B] extends Object
+abstract trait Cov42[+A >: Null, B] extends Object
extest.Cov42[_ >: Null, _]
-abstract trait Cov43[+A >: Null,-B] extends Object
+abstract trait Cov43[+A >: Null, -B] extends Object
extest.Cov43[_ >: Null, _]
-abstract trait Cov44[A >: Null,+B] extends Object
+abstract trait Cov44[A >: Null, +B] extends Object
extest.Cov44[_ >: Null, _]
-abstract trait Cov45[A >: Null,B] extends Object
+abstract trait Cov45[A >: Null, B] extends Object
extest.Cov45[_ >: Null, _]
-abstract trait Cov46[A >: Null,-B] extends Object
+abstract trait Cov46[A >: Null, -B] extends Object
extest.Cov46[_ >: Null, _]
-abstract trait Cov47[-A >: Null,+B] extends Object
+abstract trait Cov47[-A >: Null, +B] extends Object
extest.Cov47[_ >: Null, _]
-abstract trait Cov48[-A >: Null,B] extends Object
+abstract trait Cov48[-A >: Null, B] extends Object
extest.Cov48[_ >: Null, _]
-abstract trait Cov49[-A >: Null,-B] extends Object
+abstract trait Cov49[-A >: Null, -B] extends Object
extest.Cov49[_ >: Null, _]
-abstract trait Covariant[+A <: AnyRef,+B] extends Object
+abstract trait Covariant[+A <: AnyRef, +B] extends Object
extest.Covariant[_ <: AnyRef, _]
-abstract trait CovariantLike[+A <: AnyRef,+B <: List[A],+This <: extest.CovariantLike[A,B,This] with extest.Covariant[A,B]] extends Object
+abstract trait CovariantLike[+A <: AnyRef, +B <: List[A], +This <: extest.CovariantLike[A,B,This] with extest.Covariant[A,B]] extends Object
extest.CovariantLike[A,B,This] forSome { +A <: AnyRef; +B <: List[A]; +This <: extest.CovariantLike[A,B,This] with extest.Covariant[A,B] }
diff --git a/test/files/run/reify_ann1a.check b/test/files/run/reify_ann1a.check
index 97d4848a49..66dce778a8 100644
--- a/test/files/run/reify_ann1a.check
+++ b/test/files/run/reify_ann1a.check
@@ -1,5 +1,5 @@
{
- @new ann(immutable.this.List.apply[String]("1a")) @new ann(immutable.this.List.apply[String]("1b")) class C[@new ann(immutable.this.List.apply[String]("2a")) @new ann(immutable.this.List.apply[String]("2b")) T>: Nothing <: Any] extends scala.AnyRef {
+ @new ann(immutable.this.List.apply[String]("1a")) @new ann(immutable.this.List.apply[String]("1b")) class C[@new ann(immutable.this.List.apply[String]("2a")) @new ann(immutable.this.List.apply[String]("2b")) T] extends scala.AnyRef {
@new ann(immutable.this.List.apply[String]("3a")) @new ann(immutable.this.List.apply[String]("3b")) <paramaccessor> private[this] val x: T @ann(immutable.this.List.apply[String]("4a")) @ann(immutable.this.List.apply[String]("4b")) = _;
def <init>(@new ann(immutable.this.List.apply[String]("3a")) @new ann(immutable.this.List.apply[String]("3b")) x: T @ann(immutable.this.List.apply[String]("4a")) @ann(immutable.this.List.apply[String]("4b"))) = {
super.<init>();
@@ -14,7 +14,7 @@
()
}
{
- @ann(immutable.this.List.apply[String]("1a")) @ann(immutable.this.List.apply[String]("1b")) class C[@ann(immutable.this.List.apply[String]("2a")) @ann(immutable.this.List.apply[String]("2b")) T>: Nothing <: Any] extends scala.AnyRef {
+ @ann(immutable.this.List.apply[String]("1a")) @ann(immutable.this.List.apply[String]("1b")) class C[@ann(immutable.this.List.apply[String]("2a")) @ann(immutable.this.List.apply[String]("2b")) T] extends scala.AnyRef {
@ann(immutable.this.List.apply[String]("3a")) @ann(immutable.this.List.apply[String]("3b")) <paramaccessor> private[this] val x: T @ann(immutable.this.List.apply[String]("4b")) @ann(immutable.this.List.apply[String]("4a")) = _;
def <init>(@ann(immutable.this.List.apply[String]("3a")) @ann(immutable.this.List.apply[String]("3b")) x: T @ann(immutable.this.List.apply[String]("4b")) @ann(immutable.this.List.apply[String]("4a"))): C[T] = {
C.super.<init>();
diff --git a/test/files/run/reify_ann1b.check b/test/files/run/reify_ann1b.check
index ceebc0e2ed..9bc65a422e 100644
--- a/test/files/run/reify_ann1b.check
+++ b/test/files/run/reify_ann1b.check
@@ -1,5 +1,5 @@
{
- @new ann(bar = "1a") @new ann(bar = "1b") class C[@new ann(bar = "2a") @new ann(bar = "2b") T>: Nothing <: Any] extends scala.AnyRef {
+ @new ann(bar = "1a") @new ann(bar = "1b") class C[@new ann(bar = "2a") @new ann(bar = "2b") T] extends scala.AnyRef {
@new ann(bar = "3a") @new ann(bar = "3b") <paramaccessor> private[this] val x: T @ann(bar = "4a") @ann(bar = "4b") = _;
def <init>(@new ann(bar = "3a") @new ann(bar = "3b") x: T @ann(bar = "4a") @ann(bar = "4b")) = {
super.<init>();
@@ -14,7 +14,7 @@
()
}
{
- @ann(bar = "1a") @ann(bar = "1b") class C[@ann(bar = "2a") @ann(bar = "2b") T>: Nothing <: Any] extends scala.AnyRef {
+ @ann(bar = "1a") @ann(bar = "1b") class C[@ann(bar = "2a") @ann(bar = "2b") T] extends scala.AnyRef {
@ann(bar = "3a") @ann(bar = "3b") <paramaccessor> private[this] val x: T @ann(bar = "4b") @ann(bar = "4a") = _;
def <init>(@ann(bar = "3a") @ann(bar = "3b") x: T @ann(bar = "4b") @ann(bar = "4a")): C[T] = {
C.super.<init>();
diff --git a/test/files/run/si4750.check b/test/files/run/si4750.check
new file mode 100644
index 0000000000..bf55f70df3
--- /dev/null
+++ b/test/files/run/si4750.check
@@ -0,0 +1 @@
+US$ 5.80
diff --git a/test/files/run/si4750.scala b/test/files/run/si4750.scala
new file mode 100644
index 0000000000..96d2c4fec7
--- /dev/null
+++ b/test/files/run/si4750.scala
@@ -0,0 +1,7 @@
+import scala.util.matching.Regex
+
+object Test extends App {
+ val input = "CURRENCY 5.80"
+ println("CURRENCY".r.replaceAllIn(input, Regex quoteReplacement "US$"))
+}
+
diff --git a/test/files/run/t4574.check b/test/files/run/t4574.check
new file mode 100644
index 0000000000..a4522fff24
--- /dev/null
+++ b/test/files/run/t4574.check
@@ -0,0 +1,2 @@
+I hereby refute null!
+I denounce null as unListLike!
diff --git a/test/files/run/t4574.scala b/test/files/run/t4574.scala
new file mode 100644
index 0000000000..1dde496aca
--- /dev/null
+++ b/test/files/run/t4574.scala
@@ -0,0 +1,13 @@
+object Test {
+ val xs: List[(Int, Int)] = List((2, 2), null)
+
+ def expectMatchError[T](msg: String)(body: => T) {
+ try { body ; assert(false, "Should not succeed.") }
+ catch { case _: MatchError => println(msg) }
+ }
+
+ def main(args: Array[String]): Unit = {
+ expectMatchError("I hereby refute null!")( for ((x, y) <- xs) yield x )
+ expectMatchError("I denounce null as unListLike!")( (null: Any) match { case List(_*) => true } )
+ }
+}
diff --git a/test/files/run/valueclasses-constr.check b/test/files/run/valueclasses-constr.check
index df37fbc723..785e6fa25b 100644
--- a/test/files/run/valueclasses-constr.check
+++ b/test/files/run/valueclasses-constr.check
@@ -1,2 +1,10 @@
-0
+16
+00:16:40
+16
+00:16:40
+16
+00:16:40
+16
+00:16:40
+16
00:16:40
diff --git a/test/files/run/valueclasses-constr.scala b/test/files/run/valueclasses-constr.scala
index 7a10299386..652d8d8d22 100644
--- a/test/files/run/valueclasses-constr.scala
+++ b/test/files/run/valueclasses-constr.scala
@@ -1,25 +1,79 @@
-object TOD {
- final val SecondsPerDay = 86400
+package test1 {
+ object TOD {
+ final val SecondsPerDay = 86400
- def apply(seconds: Int) = {
- val n = seconds % SecondsPerDay
- new TOD(if (n >= 0) n else n + SecondsPerDay)
- }
+ def apply(seconds: Int) = {
+ val n = seconds % SecondsPerDay
+ new TOD(if (n >= 0) n else n + SecondsPerDay)
+ }
+ }
+
+ final class TOD (val secondsOfDay: Int) extends AnyVal {
+ def hours = secondsOfDay / 3600
+ def minutes = (secondsOfDay / 60) % 60
+ def seconds = secondsOfDay % 60
+
+ override def toString = "%02d:%02d:%02d".format(hours, minutes, seconds)
+ }
}
+package test2 {
+ object TOD {
+ final val SecondsPerDay = 86400
+
+ def apply(seconds: Int) = {
+ val n = seconds % SecondsPerDay
+ new TOD(if (n >= 0) n else n + SecondsPerDay)
+ }
+ }
+
+ final class TOD private[test2] (val secondsOfDay: Int) extends AnyVal {
+ def hours = secondsOfDay / 3600
+ def minutes = (secondsOfDay / 60) % 60
+ def seconds = secondsOfDay % 60
+
+ override def toString = "%02d:%02d:%02d".format(hours, minutes, seconds)
+ }
+
+ object Client {
+ def newTOD(x: Int) = new TOD(x)
+ }
+}
+
+package test3 {
+ object TOD {
+ final val SecondsPerDay = 86400
+
+ def apply(seconds: Int) = {
+ val n = seconds % SecondsPerDay
+ new TOD(if (n >= 0) n else n + SecondsPerDay)
+ }
+ }
-final class TOD (val secondsOfDay: Int) extends AnyVal {
- def hours = secondsOfDay / 3600
- def minutes = (secondsOfDay / 60) % 60
- def seconds = secondsOfDay % 60
+ final class TOD private (val secondsOfDay: Int) extends AnyVal {
+ def hours = secondsOfDay / 3600
+ def minutes = (secondsOfDay / 60) % 60
+ def seconds = secondsOfDay % 60
- override def toString = "%02d:%02d:%02d".format(hours, minutes, seconds)
+ override def toString = "%02d:%02d:%02d".format(hours, minutes, seconds)
+ }
}
object Test extends App {
- val y: TOD = new TOD(1000)
- val x: TOD = TOD(1000)
- println(x.hours)
- println(x)
+ val y1: test1.TOD = new test1.TOD(1000)
+ val y2: test2.TOD = test2.Client.newTOD(1000)
+ val x1: test1.TOD = test1.TOD(1000)
+ val x2: test2.TOD = test2.TOD(1000)
+ val x3: test3.TOD = test3.TOD(1000)
+ println(y1.minutes)
+ println(y1)
+ println(y2.minutes)
+ println(y2)
+ println(x1.minutes)
+ println(x1)
+ println(x2.minutes)
+ println(x2)
+ println(x3.minutes)
+ println(x3)
}
diff --git a/test/files/run/virtpatmat_extends_product.check b/test/files/run/virtpatmat_extends_product.check
new file mode 100644
index 0000000000..c07e8385a7
--- /dev/null
+++ b/test/files/run/virtpatmat_extends_product.check
@@ -0,0 +1 @@
+AnnotationInfo(a,1)
diff --git a/test/files/run/virtpatmat_extends_product.flags b/test/files/run/virtpatmat_extends_product.flags
new file mode 100644
index 0000000000..ac6b805bd0
--- /dev/null
+++ b/test/files/run/virtpatmat_extends_product.flags
@@ -0,0 +1 @@
+-Yvirtpatmat
diff --git a/test/files/run/virtpatmat_extends_product.scala b/test/files/run/virtpatmat_extends_product.scala
new file mode 100644
index 0000000000..e564f4430b
--- /dev/null
+++ b/test/files/run/virtpatmat_extends_product.scala
@@ -0,0 +1,11 @@
+object Test extends App {
+ case class AnnotationInfo(a: String, b: Int) extends Product2[String, Int]
+
+ // if we're not careful in unapplyTypeListFromReturnType, the generated unapply is
+ // thought to return two components instead of one, since AnnotationInfo (the result of the unapply) is a Product2
+ case class NestedAnnotArg(ai: AnnotationInfo)
+
+ NestedAnnotArg(AnnotationInfo("a", 1)) match {
+ case NestedAnnotArg(x) => println(x)
+ }
+} \ No newline at end of file
diff --git a/test/files/run/virtpatmat_partial.check b/test/files/run/virtpatmat_partial.check
index 1555eca82b..137d16da79 100644
--- a/test/files/run/virtpatmat_partial.check
+++ b/test/files/run/virtpatmat_partial.check
@@ -1,4 +1,17 @@
Map(a -> Some(1), b -> None)
-79
-undefined
Map(a -> 1)
+a
+undefined
+a
+undefined
+a
+undefined
+a
+undefined
+hai!
+hai!
+2
+hai!
+undefined
+1
+undefined
diff --git a/test/files/run/virtpatmat_partial.scala b/test/files/run/virtpatmat_partial.scala
index 6597f2f5ae..a235314610 100644
--- a/test/files/run/virtpatmat_partial.scala
+++ b/test/files/run/virtpatmat_partial.scala
@@ -2,95 +2,180 @@ object Test extends App {
val a = Map("a" -> Some(1), "b" -> None)
println(a)
+// inferred type should be Map[String, Int]
val res = a collect {case (p, Some(a)) => (p, a)}
- final val GT = 79
- final val GTGT = 93
- final val GTGTGT = 94
- final val GTEQ = 81
- final val GTGTEQ = 113
- final val GTGTGTEQ = 114
- final val ASSIGN = 75
-
- def acceptClosingAngle(in: Int) {
- val closers: PartialFunction[Int, Int] = {
- case GTGTGTEQ => GTGTEQ
- case GTGTGT => GTGT
- case GTGTEQ => GTEQ
- case GTGT => GT
- case GTEQ => ASSIGN
+// variations: const target -> switch, non-const -> normal match, char target --> scrut needs toInt,
+// eta-expanded --> work is done by typedFunction, non-eta-expanded --> typedMatch
+
+ object nonConstCharEta {
+ final val GT : Char = 'a'
+ final val GTGT : Char = 'b'
+ final val GTGTGT : Char = 'c'
+ final val GTEQ : Char = 'd'
+ final val GTGTEQ : Char = 'e'
+ final val GTGTGTEQ: Char = 'f'
+ final val ASSIGN : Char = 'g'
+
+ def acceptClosingAngle(in: Char) {
+ val closers: PartialFunction[Char, Char] = {
+ case GTGTGTEQ => GTGTEQ
+ case GTGTGT => GTGT
+ case GTGTEQ => GTEQ
+ case GTGT => GT
+ case GTEQ => ASSIGN
+ }
+ if (closers isDefinedAt in) println(closers(in))
+ else println("undefined")
+ }
+
+ def test() = {
+ acceptClosingAngle(GTGT)
+ acceptClosingAngle(ASSIGN)
+ }
+ }
+
+ object nonConstChar {
+ final val GT : Char = 'a'
+ final val GTGT : Char = 'b'
+ final val GTGTGT : Char = 'c'
+ final val GTEQ : Char = 'd'
+ final val GTGTEQ : Char = 'e'
+ final val GTGTGTEQ: Char = 'f'
+ final val ASSIGN : Char = 'g'
+
+ def acceptClosingAngle(in: Char) {
+ val closers: PartialFunction[Char, Char] = x => x match {
+ case GTGTGTEQ => GTGTEQ
+ case GTGTGT => GTGT
+ case GTGTEQ => GTEQ
+ case GTGT => GT
+ case GTEQ => ASSIGN
+ }
+ if (closers isDefinedAt in) println(closers(in))
+ else println("undefined")
+ }
+
+ def test() = {
+ acceptClosingAngle(GTGT)
+ acceptClosingAngle(ASSIGN)
}
- if (closers isDefinedAt in) println(closers(in))
- else println("undefined")
}
- acceptClosingAngle(GTGT)
- acceptClosingAngle(ASSIGN)
-
- // should uncurry to:
- // val res: Map[String,Int] = a.collect[(String, Int), Map[String,Int]](
- // new PartialFunction[(String, Option[Int]),(String, Int)] {
- // def apply(x0_1: (String, Option[Int])): (String, Int) = MatchingStrategy.OptionMatchingStrategy.runOrElse[(String, Option[Int]), (String, Int)](x0_1)(
- // (x1: (String, Option[Int])) => {
- // val o9: Option[(String, Int)] = ({
- // val o8: Option[(String, Option[Int])] = Tuple2.unapply[String, Option[Int]](x1);
- // if (o8.isEmpty)
- // MatchingStrategy.OptionMatchingStrategy.zero
- // else
- // {
- // val o7: Option[Some[Int]] = if (o8.get._2.isInstanceOf[Some[Int]])
- // MatchingStrategy.OptionMatchingStrategy.one[Some[Int]](o8.get._2.asInstanceOf[Some[Int]])
- // else
- // MatchingStrategy.OptionMatchingStrategy.zero;
- // if (o7.isEmpty)
- // MatchingStrategy.OptionMatchingStrategy.zero
- // else
- // {
- // val o6: Option[Int] = Some.unapply[Int](o7.get);
- // if (o6.isEmpty)
- // MatchingStrategy.OptionMatchingStrategy.zero
- // else
- // MatchingStrategy.OptionMatchingStrategy.one[(String, Int)]((o8.get._1, o6.get).asInstanceOf[(String, Int)])
- // }
- // }
- // }: Option[(String, Int)]);
- // if (o9.isEmpty)
- // (MatchingStrategy.OptionMatchingStrategy.zero: Option[(String, Int)])
- // else
- // o9
- // })
- //
- // def isDefinedAt(x_1: (String, Option[Int])): Boolean = MatchingStrategy.OptionMatchingStrategy.isSuccess[(String, Option[Int]), (String, Int)](x_1)(
- // (x1: (String, Option[Int])) => {
- // val o9: Option[(String, Int)] = ({
- // val o8: Option[(String, Option[Int])] = scala.Tuple2.unapply[String, Option[Int]](x1);
- // if (o8.isEmpty)
- // MatchingStrategy.OptionMatchingStrategy.zero
- // else
- // {
- // val o7: Option[Some[Int]] = if (o8.get._2.isInstanceOf[Some[Int]])
- // MatchingStrategy.OptionMatchingStrategy.one[Some[Int]](o8.get._2.asInstanceOf[Some[Int]]) // XXX
- // else
- // MatchingStrategy.OptionMatchingStrategy.zero;
- // if (o7.isEmpty)
- // MatchingStrategy.OptionMatchingStrategy.zero
- // else
- // {
- // val o6: Option[Int] = scala.Some.unapply[Int](o7.get);
- // if (o6.isEmpty)
- // MatchingStrategy.OptionMatchingStrategy.zero
- // else
- // MatchingStrategy.OptionMatchingStrategy.one[(String, Int)](null.asInstanceOf[(String, Int)])
- // }
- // }
- // }: Option[(String, Int)]);
- // if (o9.isEmpty)
- // (MatchingStrategy.OptionMatchingStrategy.zero: Option[(String, Int)])
- // else
- // o9
- // })
- // }
- // )
-
- println(res)
+ object constCharEta {
+ final val GT = 'a'
+ final val GTGT = 'b'
+ final val GTGTGT = 'c'
+ final val GTEQ = 'd'
+ final val GTGTEQ = 'e'
+ final val GTGTGTEQ= 'f'
+ final val ASSIGN = 'g'
+
+ def acceptClosingAngle(in: Char) {
+ val closers: PartialFunction[Char, Char] = x => x match {
+ case GTGTGTEQ => GTGTEQ
+ case GTGTGT => GTGT
+ case GTGTEQ => GTEQ
+ case GTGT => GT
+ case GTEQ => ASSIGN
+ }
+ if (closers isDefinedAt in) println(closers(in))
+ else println("undefined")
+ }
+
+ def test() = {
+ acceptClosingAngle(GTGT)
+ acceptClosingAngle(ASSIGN)
+ }
+ }
+
+ object constChar {
+ final val GT = 'a'
+ final val GTGT = 'b'
+ final val GTGTGT = 'c'
+ final val GTEQ = 'd'
+ final val GTGTEQ = 'e'
+ final val GTGTGTEQ= 'f'
+ final val ASSIGN = 'g'
+
+ def acceptClosingAngle(in: Char) {
+ val closers: PartialFunction[Char, Char] = {
+ case GTGTGTEQ => GTGTEQ
+ case GTGTGT => GTGT
+ case GTGTEQ => GTEQ
+ case GTGT => GT
+ case GTEQ => ASSIGN
+ }
+ if (closers isDefinedAt in) println(closers(in))
+ else println("undefined")
+ }
+
+ def test() = {
+ acceptClosingAngle(GTGT)
+ acceptClosingAngle(ASSIGN)
+ }
+ }
+
+ object constIntEta {
+ final val GT = 1
+ final val GTGT = 2
+ final val GTGTGT = 3
+ final val GTEQ = 4
+ final val GTGTEQ = 5
+ final val GTGTGTEQ = 6
+ final val ASSIGN = 7
+
+ def acceptClosingAngle(in: Int) {
+ val closers: PartialFunction[Int, Int] = x => {println("hai!"); (x + 1)} match {
+ case GTGTGTEQ => GTGTEQ
+ case GTGTGT => GTGT
+ case GTGTEQ => GTEQ
+ case GTGT => GT
+ case GTEQ => ASSIGN
+ }
+ if (closers isDefinedAt in) println(closers(in))
+ else println("undefined")
+ }
+
+ def test() = {
+ acceptClosingAngle(GTGT)
+ acceptClosingAngle(ASSIGN)
+ }
+ }
+
+ object constInt {
+ final val GT = 1
+ final val GTGT = 2
+ final val GTGTGT = 3
+ final val GTEQ = 4
+ final val GTGTEQ = 5
+ final val GTGTGTEQ = 6
+ final val ASSIGN = 7
+
+ def acceptClosingAngle(in: Int) {
+ val closers: PartialFunction[Int, Int] = {
+ case GTGTGTEQ => GTGTEQ
+ case GTGTGT => GTGT
+ case GTGTEQ => GTEQ
+ case GTGT => GT
+ case GTEQ => ASSIGN
+ }
+ if (closers isDefinedAt in) println(closers(in))
+ else println("undefined")
+ }
+
+ def test() = {
+ acceptClosingAngle(GTGT)
+ acceptClosingAngle(ASSIGN)
+ }
+ }
+
+ println(res) // prints "Map(a -> 1)"
+
+ nonConstCharEta.test()
+ nonConstChar.test()
+ constCharEta.test()
+ constChar.test()
+ constIntEta.test()
+ constInt.test()
}
diff --git a/test/files/run/virtpatmat_tailcalls_verifyerror.check b/test/files/run/virtpatmat_tailcalls_verifyerror.check
new file mode 100644
index 0000000000..c508d5366f
--- /dev/null
+++ b/test/files/run/virtpatmat_tailcalls_verifyerror.check
@@ -0,0 +1 @@
+false
diff --git a/test/files/run/virtpatmat_tailcalls_verifyerror.flags b/test/files/run/virtpatmat_tailcalls_verifyerror.flags
new file mode 100644
index 0000000000..9769db9257
--- /dev/null
+++ b/test/files/run/virtpatmat_tailcalls_verifyerror.flags
@@ -0,0 +1 @@
+ -Yvirtpatmat -Xexperimental
diff --git a/test/files/run/virtpatmat_tailcalls_verifyerror.scala b/test/files/run/virtpatmat_tailcalls_verifyerror.scala
new file mode 100644
index 0000000000..5ce91e8dce
--- /dev/null
+++ b/test/files/run/virtpatmat_tailcalls_verifyerror.scala
@@ -0,0 +1,14 @@
+// shouldn't result in a verify error when run...
+object Test extends App {
+ @annotation.tailrec
+ final def test(meh: Boolean): Boolean = {
+ Some("a") match {
+ case x =>
+ x match {
+ case Some(_) => if(meh) test(false) else false
+ case _ => test(false)
+ }
+ }
+ }
+ println(test(true))
+} \ No newline at end of file
diff --git a/test/files/scalacheck/t2460.scala b/test/files/scalacheck/t2460.scala
new file mode 100644
index 0000000000..196b43789f
--- /dev/null
+++ b/test/files/scalacheck/t2460.scala
@@ -0,0 +1,32 @@
+import org.scalacheck.Prop.forAll
+import org.scalacheck.Properties
+import org.scalacheck.ConsoleReporter.testStatsEx
+import org.scalacheck.{Test => SCTest}
+import org.scalacheck.Gen
+
+object Test extends Properties("Regex : Ticket 2460") {
+
+ val vowel = Gen.oneOf("a", "z")
+
+ val numberOfMatch = forAll(vowel) {
+ (s: String) => "\\s*([a-z])\\s*".r("data").findAllMatchIn((1 to 20).map(_ => s).mkString).size == 20
+ }
+
+ val numberOfGroup = forAll(vowel) {
+ (s: String) => "\\s*([a-z])\\s*([a-z])\\s*".r("data").findAllMatchIn((1 to 20).map(_ => s).mkString).next.groupCount == 2
+ }
+
+ val nameOfGroup = forAll(vowel) {
+ (s: String) => "([a-z])".r("data").findAllMatchIn(s).next.group("data") == s
+ }
+
+ val tests = List(
+ ("numberOfMatch", numberOfMatch),
+ ("numberOfGroup", numberOfGroup),
+ ("nameOfGroup", nameOfGroup)
+ )
+
+ /*tests foreach {
+ case (name, p) => testStatsEx(name, SCTest.check(p))
+ }*/
+}
diff --git a/test/pending/run/implicit-classes.scala b/test/pending/run/implicit-classes.scala
new file mode 100644
index 0000000000..02b74de2b0
--- /dev/null
+++ b/test/pending/run/implicit-classes.scala
@@ -0,0 +1,17 @@
+object O {
+ implicit class C(s: String) {
+ def twice = s + s
+ }
+}
+
+/**
+//
+// We'd like to augment object O in Namers so that it also has an implicit method
+object O {
+ implicit class C(s: String) {
+ def twice = s + s
+ }
+ implicit def C(s: String): C = new C(s)
+}
+
+**/
diff --git a/test/pending/run/virtpatmat_anonfun_underscore.check b/test/pending/run/virtpatmat_anonfun_underscore.check
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/pending/run/virtpatmat_anonfun_underscore.check
diff --git a/test/pending/run/virtpatmat_anonfun_underscore.flags b/test/pending/run/virtpatmat_anonfun_underscore.flags
new file mode 100644
index 0000000000..23e3dc7d26
--- /dev/null
+++ b/test/pending/run/virtpatmat_anonfun_underscore.flags
@@ -0,0 +1 @@
+-Yvirtpatmat \ No newline at end of file
diff --git a/test/pending/run/virtpatmat_anonfun_underscore.scala b/test/pending/run/virtpatmat_anonfun_underscore.scala
new file mode 100644
index 0000000000..db6705d025
--- /dev/null
+++ b/test/pending/run/virtpatmat_anonfun_underscore.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ List(1,2,3) map (_ match { case x => x + 1} ) // `_ match` is redundant but shouldn't crash the compiler
+ List((1,2)) map (_ match { case (x, z) => x + z})
+} \ No newline at end of file
diff --git a/test/scaladoc/resources/code-indent.scala b/test/scaladoc/resources/code-indent.scala
new file mode 100644
index 0000000000..88946ffc7f
--- /dev/null
+++ b/test/scaladoc/resources/code-indent.scala
@@ -0,0 +1,37 @@
+/**
+ * This is an example of indented comments:
+ * {{{
+ * a typicial indented
+ * comment on multiple
+ * comment lines
+ * }}}
+ * {{{ one liner }}}
+ * {{{ two lines, one useful
+ * }}}
+ * {{{
+ * line1
+ * line2
+ * line3
+ * line4}}}
+ * {{{
+ * a ragged example
+ * a (condition)
+ * the t h e n branch
+ * an alternative
+ * the e l s e branch
+ * }}}
+ * NB: Trailing spaces are necessary for this test!
+ * {{{
+ * l1
+ *
+ * l2
+ *
+ * l3
+ *
+ * l4
+ *
+ * l5
+ * }}}
+
+ */
+class C
diff --git a/test/scaladoc/run/SI-5373.check b/test/scaladoc/run/SI-5373.check
new file mode 100644
index 0000000000..c55eb001cf
--- /dev/null
+++ b/test/scaladoc/run/SI-5373.check
@@ -0,0 +1 @@
+model contains 6 documentable templates
diff --git a/test/scaladoc/run/SI-5373.scala b/test/scaladoc/run/SI-5373.scala
new file mode 100644
index 0000000000..af433a1844
--- /dev/null
+++ b/test/scaladoc/run/SI-5373.scala
@@ -0,0 +1,34 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ def code = """
+ import scala.annotation.bridge
+
+ package scala.test {
+
+ trait A {
+ def foo = ()
+ }
+
+ trait B {
+ @bridge()
+ def foo = ()
+ }
+
+ class C extends A with B
+ }
+ """
+
+ // no need for special settings
+ def scaladocSettings = ""
+
+ def testModel(rootPackage: Package) = {
+ // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+ import access._
+
+ // just need to check the member exists, access methods will throw an error if there's a problem
+ rootPackage._package("scala")._package("test")._class("C")._method("foo")
+ }
+} \ No newline at end of file
diff --git a/test/scaladoc/scala/html.flags b/test/scaladoc/scala/html.flags
deleted file mode 100644
index b2264ec4f4..0000000000
--- a/test/scaladoc/scala/html.flags
+++ /dev/null
@@ -1 +0,0 @@
--encoding UTF-8 \ No newline at end of file
diff --git a/test/scaladoc/scala/model/CommentFactoryTest.scala b/test/scaladoc/scalacheck/CommentFactoryTest.scala
index 69c314a64c..69c314a64c 100644
--- a/test/scaladoc/scala/model/CommentFactoryTest.scala
+++ b/test/scaladoc/scalacheck/CommentFactoryTest.scala
diff --git a/test/scaladoc/scala/html/HtmlFactoryTest.flags b/test/scaladoc/scalacheck/HtmlFactoryTest.flags
index b2264ec4f4..b2264ec4f4 100644
--- a/test/scaladoc/scala/html/HtmlFactoryTest.flags
+++ b/test/scaladoc/scalacheck/HtmlFactoryTest.flags
diff --git a/test/scaladoc/scala/html/HtmlFactoryTest.scala b/test/scaladoc/scalacheck/HtmlFactoryTest.scala
index 67358e6e70..5b6f75426e 100644
--- a/test/scaladoc/scala/html/HtmlFactoryTest.scala
+++ b/test/scaladoc/scalacheck/HtmlFactoryTest.scala
@@ -50,7 +50,7 @@ object Test extends Properties("HtmlFactory") {
def createTemplates(basename: String) = {
val result = scala.collection.mutable.Map[String, scala.xml.NodeSeq]()
- createFactory.makeUniverse(List(RESOURCES+basename)) match {
+ createFactory.makeUniverse(Left(List(RESOURCES+basename))) match {
case Some(universe) => {
val index = IndexModelFactory.makeIndex(universe)
(new HtmlFactory(universe, index)).writeTemplates((page) => {
@@ -64,7 +64,7 @@ object Test extends Properties("HtmlFactory") {
}
def createReferenceIndex(basename: String) = {
- createFactory.makeUniverse(List(RESOURCES+basename)) match {
+ createFactory.makeUniverse(Left(List(RESOURCES+basename))) match {
case Some(universe) => {
val index = IndexModelFactory.makeIndex(universe)
val pages = index.firstLetterIndex.map({
@@ -442,8 +442,9 @@ object Test extends Properties("HtmlFactory") {
property("Use cases should override their original members") =
checkText("SI_5054_q1.scala")(
- (None,"""def test(): Int""", true),
- (None,"""def test(implicit lost: Int): Int""", false)
+ (None,"""def test(): Int""", true)
+ //Disabled because the full signature is now displayed
+ //(None,"""def test(implicit lost: Int): Int""", false)
)
property("Use cases should keep their flags - final should not be lost") =
@@ -599,8 +600,9 @@ object Test extends Properties("HtmlFactory") {
T StartT the type of the first argument EndT
arg1 Start1 The T term comment End1
arg2 Start2 The string comment End2
- returns StartRet The return comment EndRet
- Definition Classes InheritDocDerived → InheritDocBase
+ returns StartRet The return comment EndRet""", true),
+ (Some("InheritDocDerived"),
+ """Definition Classes InheritDocDerived → InheritDocBase
Example: StartExample function[Int](3, "something") EndExample
Version StartVer 0.0.2 EndVer
Since StartSince 0.0.1 EndSince
@@ -624,8 +626,9 @@ object Test extends Properties("HtmlFactory") {
T StartT the type of the first argument EndT
arg1 Start1 The T term comment End1
arg2 Start2 The string comment End2
- returns StartRet The return comment EndRet
- Example: StartExample function[Int](3,"something") EndExample
+ returns StartRet The return comment EndRet""", true),
+ (Some("UseCaseInheritDoc"),
+ """Example: StartExample function[Int](3,"something") EndExample
Version StartVer 0.0.2 EndVer
Since StartSince 0.0.1 EndSince
Exceptions thrown
@@ -671,6 +674,23 @@ object Test extends Properties("HtmlFactory") {
// traits E, F and H shouldn't crash scaladoc but we don't need to check the output
)
+ property("Indentation normalization for code blocks") = {
+ val files = createTemplates("code-indent.scala")
+
+ files("C.html") match {
+ case node: scala.xml.Node => {
+ val s = node.toString
+ s.contains("<pre>a typicial indented\ncomment on multiple\ncomment lines</pre>") &&
+ s.contains("<pre>one liner</pre>") &&
+ s.contains("<pre>two lines, one useful</pre>") &&
+ s.contains("<pre>line1\nline2\nline3\nline4</pre>") &&
+ s.contains("<pre>a ragged example\na (condition)\n the t h e n branch\nan alternative\n the e l s e branch</pre>") &&
+ s.contains("<pre>l1\n\nl2\n\nl3\n\nl4\n\nl5</pre>")
+ }
+ case _ => false
+ }
+ }
+
{
val files = createTemplates("basic.scala")
//println(files)
diff --git a/test/scaladoc/scala/IndexScriptTest.scala b/test/scaladoc/scalacheck/IndexScriptTest.scala
index e0372020fd..5aef38e00a 100644
--- a/test/scaladoc/scala/IndexScriptTest.scala
+++ b/test/scaladoc/scalacheck/IndexScriptTest.scala
@@ -24,7 +24,7 @@ object Test extends Properties("IndexScript") {
val indexModelFactory = doc.model.IndexModelFactory
def createIndexScript(path: String) =
- docFactory.makeUniverse(List(path)) match {
+ docFactory.makeUniverse(Left(List(path))) match {
case Some(universe) => {
val index = new IndexScript(universe,
indexModelFactory.makeIndex(universe))
diff --git a/test/scaladoc/scala/IndexTest.scala b/test/scaladoc/scalacheck/IndexTest.scala
index c14fd98297..29e337da2b 100644
--- a/test/scaladoc/scala/IndexTest.scala
+++ b/test/scaladoc/scalacheck/IndexTest.scala
@@ -37,7 +37,7 @@ object Test extends Properties("Index") {
//val original = Console.out
//Console.setOut(stream)
- val result = docFactory.makeUniverse(List(path))
+ val result = docFactory.makeUniverse(Left(List(path)))
// assert(stream.toString == "model contains 2 documentable templates\n")
//Console.setOut(original)
diff --git a/tools/get-scala-commit-date b/tools/get-scala-commit-date
index ef5b0f540d..b2e4e10770 100755
--- a/tools/get-scala-commit-date
+++ b/tools/get-scala-commit-date
@@ -11,6 +11,7 @@
[[ $# -eq 0 ]] || cd "$1"
lastcommitdate=$(git log --format="%ci" HEAD | head -n 1 | cut -d ' ' -f 1)
+lastcommithours=$(git log --format="%ci" HEAD | head -n 1 | cut -d ' ' -f 2)
# 20120324
-echo "${lastcommitdate//-/}"
+echo "${lastcommitdate//-/}-${lastcommithours//:/}"
diff --git a/tools/get-scala-commit-drift b/tools/get-scala-commit-drift
deleted file mode 100755
index 4959826ec1..0000000000
--- a/tools/get-scala-commit-drift
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/usr/bin/env bash
-#
-# Usage: get-scala-commit-drift [dir]
-# Figures out current commit drift of a git clone.
-# If no dir is given, current working dir is used.
-#
-# Example output string:
-# 123
-#
-# Build drift = # of commits since last tag.
-
-[[ $# -eq 0 ]] || cd "$1"
-
-ensure_tag () {
- sha=$1
- rev=$2
-
- [[ -n $(git tag -l $rev) ]] || {
- git tag -a -m "generated by get-scala-revision" $rev $sha
- }
-}
-
-# Ensure some baseline tags are present so if this repository's
-# tags are screwed up or stale, we should still have a reference
-# point for a build string.
-ensure_tag 58cb15c40d v2.10.0-M1
-ensure_tag 29f3eace1e v2.9.1
-ensure_tag b0d78f6b9c v2.8.2
-
-# the closest tag, obtained separately because we have to
-# reconstruct the string around the padded distance.
-tag=$(git describe --tags --match 'v2*' --abbrev=0)
-
-# printf %016s is not portable for 0-padding, has to be a digit.
-# so we're stuck disassembling it.
-described=$(git describe --tags --match 'v2*' --abbrev=10)
-suffix="${described##${tag}-}"
-counter=$(echo $suffix | cut -d - -f 1)
-
-echo "$counter"
diff --git a/tools/get-scala-commit-drift.bat b/tools/get-scala-commit-drift.bat
deleted file mode 100644
index ac289d3481..0000000000
--- a/tools/get-scala-commit-drift.bat
+++ /dev/null
@@ -1,21 +0,0 @@
-@echo off
-rem
-rem Usage: get-scala-commit-drift.bat [dir]
-rem Figures out current scala commit drift, of a clone.
-rem
-rem If no dir is given, current working dir is used.
-
-@setlocal
-set _DIR=
-if "%*"=="" (
- for /f "delims=;" %%i in ('cd') do set "_DIR=%%i"
-) else (
- set "_DIR=%~1"
-)
-cd %_DIR%
-
-rem TODO - WRITE THIS
-echo "TODO"
-
-:end
-@endlocal
diff --git a/tools/get-scala-commit-sha b/tools/get-scala-commit-sha
index 0abe31a53c..eab90a4215 100755
--- a/tools/get-scala-commit-sha
+++ b/tools/get-scala-commit-sha
@@ -10,31 +10,9 @@
[[ $# -eq 0 ]] || cd "$1"
-ensure_tag () {
- sha=$1
- rev=$2
-
- [[ -n $(git tag -l $rev) ]] || {
- git tag -a -m "generated by get-scala-revision" $rev $sha
- }
-}
-
-# Ensure some baseline tags are present so if this repository's
-# tags are screwed up or stale, we should still have a reference
-# point for a build string.
-ensure_tag 58cb15c40d v2.10.0-M1
-ensure_tag 29f3eace1e v2.9.1
-ensure_tag b0d78f6b9c v2.8.2
-
-# the closest tag, obtained separately because we have to
-# reconstruct the string around the padded distance.
-tag=$(git describe --tags --match 'v2*' --abbrev=0)
-
# printf %016s is not portable for 0-padding, has to be a digit.
# so we're stuck disassembling it.
-described=$(git describe --tags --match 'v2*' --abbrev=10)
-suffix="${described##${tag}-}"
-hash=$(echo $suffix | cut -d - -f 2)
+hash=$(git log -1 --format="%H" HEAD)
hash=${hash#g}
-
+hash=${hash:0:10}
echo "$hash"
diff --git a/tools/get-scala-commit-sha.bat b/tools/get-scala-commit-sha.bat
index 80d9aa34b1..7a5afa11b1 100644
--- a/tools/get-scala-commit-sha.bat
+++ b/tools/get-scala-commit-sha.bat
@@ -15,7 +15,7 @@ if "%*"=="" (
cd %_DIR%
rem TODO - truncate chars.
-git log -1 --format="%T"
+git log -1 --format="%H
:end
@endlocal
diff --git a/tools/make-release-notes b/tools/make-release-notes
new file mode 100755
index 0000000000..dcd206f7fc
--- /dev/null
+++ b/tools/make-release-notes
@@ -0,0 +1,49 @@
+#!/usr/bin/env bash
+
+# This tool is used to build a *scaffold* of a release note that you can fill in details with before posting to the list.
+# It aims to provide *all* the information you need, and probably need to prune it before releasing.
+# Author: jsuereth
+
+fixMessages() {
+ local tag1="$1"
+ local tag2="$2"
+ git log $tag1..$tag2 "--format=format: * %h - %s" --no-merges --grep "SI-"
+}
+
+allcommitMessages() {
+ local tag1="$1"
+ local tag2="$2"
+ git log $tag1..$tag2 "--format=format: * %h - %s" --no-merges
+}
+
+authors() {
+ local tag1="$1"
+ local tag2="$2"
+ git log $tag1..$tag2 --format=format:%an --no-merges | sort | uniq -c | sort -rh
+}
+
+
+message() {
+ local tag1="$1"
+ local tag2="$2"
+
+ echo "A new release of Scala is available! Please point your build tools at ${tag2#v}"
+ echo
+ echo "Here's a list of the issues that have been fixed since ${tag1#v}: "
+ fixMessages "$tag1" "$tag2"
+ echo
+ echo
+ echo "Special thanks to all the contributions!"
+ echo "------- --------------------------------"
+ authors "$tag1" "$tag2"
+ echo "------- --------------------------------"
+ echo
+ echo
+ echo "Here's a complete list of changes:"
+ allcommitMessages "$tag1" "$tag2"
+}
+
+
+message "$1" "$2"
+
+