summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--build.number4
-rw-r--r--build.xml82
-rw-r--r--lib/scala-compiler.jar.desired.sha12
-rw-r--r--lib/scala-library-src.jar.desired.sha12
-rw-r--r--lib/scala-library.jar.desired.sha12
-rw-r--r--src/actors/scala/actors/threadpool/AbstractExecutorService.java10
-rw-r--r--src/actors/scala/actors/threadpool/BlockingQueue.java24
-rw-r--r--src/actors/scala/actors/threadpool/Executors.java6
-rw-r--r--src/actors/scala/actors/threadpool/LinkedBlockingQueue.java716
-rw-r--r--src/actors/scala/actors/threadpool/ThreadPoolExecutor.java2
-rw-r--r--src/actors/scala/actors/threadpool/helpers/FIFOWaitQueue.java2
-rw-r--r--src/actors/scala/actors/threadpool/helpers/Utils.java12
-rw-r--r--src/actors/scala/actors/threadpool/locks/CondVar.java1
-rw-r--r--src/actors/scala/actors/threadpool/locks/FIFOCondVar.java1
-rw-r--r--src/actors/scala/actors/threadpool/locks/ReentrantReadWriteLock.java8
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala6
-rw-r--r--src/compiler/scala/tools/nsc/Interpreter.scala17
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/DocComments.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeDSL.scala22
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeGen.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeInfo.scala6
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreePrinters.scala6
-rw-r--r--src/compiler/scala/tools/nsc/ast/Trees.scala15
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala15
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/Inliners.scala1006
-rw-r--r--src/compiler/scala/tools/nsc/doc/DocFactory.scala2
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala2
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala7
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/Index.scala11
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/Template.scala127
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.pngbin3519 -> 481 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psdbin31923 -> 30823 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.pngbin2977 -> 533 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psdbin28574 -> 31295 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css37
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js8
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css72
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js148
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/Entity.scala63
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala82
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/comment/Comment.scala1
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala3
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/Completion.scala9
-rw-r--r--src/compiler/scala/tools/nsc/io/AbstractFile.scala3
-rw-r--r--src/compiler/scala/tools/nsc/io/NullPrintStream.scala19
-rw-r--r--src/compiler/scala/tools/nsc/io/Path.scala7
-rw-r--r--src/compiler/scala/tools/nsc/io/ZipArchive.scala34
-rw-r--r--src/compiler/scala/tools/nsc/matching/ParallelMatching.scala1
-rw-r--r--src/compiler/scala/tools/nsc/plugins/Plugin.scala2
-rw-r--r--src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala2
-rw-r--r--src/compiler/scala/tools/nsc/symtab/Definitions.scala1
-rw-r--r--src/compiler/scala/tools/nsc/symtab/StdNames.scala1
-rw-r--r--src/compiler/scala/tools/nsc/symtab/Symbols.scala3
-rw-r--r--src/compiler/scala/tools/nsc/symtab/Types.scala81
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/CleanUp.scala105
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala289
-rw-r--r--src/compiler/scala/tools/nsc/transform/Mixin.scala19
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala10
-rw-r--r--src/compiler/scala/tools/nsc/transform/TailCalls.scala29
-rw-r--r--src/compiler/scala/tools/nsc/transform/UnCurry.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Analyzer.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala126
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala36
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala109
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala3
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala107
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Unapplies.scala24
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala1
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala3
-rw-r--r--src/library/scala/Enumeration.scala154
-rw-r--r--src/library/scala/Option.scala10
-rw-r--r--src/library/scala/collection/Parallel.scala (renamed from src/parallel-collections/scala/collection/Parallel.scala)0
-rw-r--r--src/library/scala/collection/Parallelizable.scala38
-rw-r--r--src/library/scala/collection/Sequentializable.scala (renamed from src/parallel-collections/scala/collection/Sequentializable.scala)0
-rw-r--r--src/library/scala/collection/SetLike.scala1
-rw-r--r--src/library/scala/collection/TraversableLike.scala17
-rw-r--r--src/library/scala/collection/TraversableProxyLike.scala2
-rw-r--r--src/library/scala/collection/TraversableViewLike.scala2
-rw-r--r--src/library/scala/collection/generic/CanCombineFrom.scala (renamed from src/parallel-collections/scala/collection/generic/CanBuildFromParallel.scala)7
-rw-r--r--src/library/scala/collection/generic/GenericParallelCompanion.scala29
-rw-r--r--src/library/scala/collection/generic/GenericParallelTemplate.scala (renamed from src/parallel-collections/scala/collection/generic/GenericParallelTemplate.scala)11
-rw-r--r--src/library/scala/collection/generic/HasNewCombiner.scala (renamed from src/parallel-collections/scala/collection/generic/HasNewCombiner.scala)0
-rw-r--r--src/library/scala/collection/generic/ParallelFactory.scala (renamed from src/parallel-collections/scala/collection/generic/ParallelFactory.scala)4
-rw-r--r--src/library/scala/collection/generic/ParallelMapFactory.scala (renamed from src/parallel-collections/scala/collection/generic/ParallelMapFactory.scala)11
-rw-r--r--src/library/scala/collection/generic/Signalling.scala (renamed from src/parallel-collections/scala/collection/generic/Signalling.scala)0
-rw-r--r--src/library/scala/collection/generic/Sizing.scala (renamed from src/parallel-collections/scala/collection/generic/Sizing.scala)0
-rw-r--r--src/library/scala/collection/immutable/HashMap.scala104
-rw-r--r--src/library/scala/collection/immutable/IntMap.scala16
-rw-r--r--src/library/scala/collection/immutable/LongMap.scala25
-rw-r--r--src/library/scala/collection/immutable/StringOps.scala12
-rw-r--r--src/library/scala/collection/immutable/package.scala (renamed from src/parallel-collections/scala/collection/immutable/package.scala)0
-rw-r--r--src/library/scala/collection/mutable/ArrayBuffer.scala6
-rw-r--r--src/library/scala/collection/mutable/ArrayOps.scala7
-rw-r--r--src/library/scala/collection/mutable/BufferLike.scala7
-rw-r--r--src/library/scala/collection/mutable/BufferProxy.scala10
-rw-r--r--src/library/scala/collection/mutable/PriorityQueue.scala1
-rw-r--r--src/library/scala/collection/mutable/Seq.scala8
-rw-r--r--src/library/scala/collection/mutable/SeqLike.scala31
-rw-r--r--src/library/scala/collection/mutable/Stack.scala5
-rw-r--r--src/library/scala/collection/parallel/Combiner.scala66
-rw-r--r--src/library/scala/collection/parallel/Combiners.scala (renamed from src/parallel-collections/scala/collection/parallel/Combiners.scala)3
-rw-r--r--src/library/scala/collection/parallel/Iterators.scala (renamed from src/parallel-collections/scala/collection/parallel/Iterators.scala)14
-rw-r--r--src/library/scala/collection/parallel/ParallelIterable.scala (renamed from src/parallel-collections/scala/collection/parallel/ParallelIterable.scala)4
-rw-r--r--src/library/scala/collection/parallel/ParallelIterableLike.scala (renamed from src/parallel-collections/scala/collection/parallel/ParallelIterableLike.scala)90
-rw-r--r--src/library/scala/collection/parallel/ParallelIterableView.scala (renamed from src/parallel-collections/scala/collection/parallel/ParallelIterableView.scala)0
-rw-r--r--src/library/scala/collection/parallel/ParallelIterableViewLike.scala (renamed from src/parallel-collections/scala/collection/parallel/ParallelIterableViewLike.scala)0
-rw-r--r--src/library/scala/collection/parallel/ParallelMap.scala71
-rw-r--r--src/library/scala/collection/parallel/ParallelMapLike.scala (renamed from src/parallel-collections/scala/collection/parallel/ParallelMapLike.scala)4
-rw-r--r--src/library/scala/collection/parallel/ParallelSeq.scala (renamed from src/parallel-collections/scala/collection/parallel/ParallelSeq.scala)4
-rw-r--r--src/library/scala/collection/parallel/ParallelSeqLike.scala (renamed from src/parallel-collections/scala/collection/parallel/ParallelSeqLike.scala)22
-rw-r--r--src/library/scala/collection/parallel/ParallelSeqView.scala (renamed from src/parallel-collections/scala/collection/parallel/ParallelSeqView.scala)6
-rw-r--r--src/library/scala/collection/parallel/ParallelSeqViewLike.scala (renamed from src/parallel-collections/scala/collection/parallel/ParallelSeqViewLike.scala)6
-rw-r--r--src/library/scala/collection/parallel/RemainsIterator.scala438
-rw-r--r--src/library/scala/collection/parallel/Splitter.scala (renamed from src/parallel-collections/scala/collection/parallel/Splitters.scala)0
-rw-r--r--src/library/scala/collection/parallel/Splitters.scala86
-rw-r--r--src/library/scala/collection/parallel/TaskSupport.scala (renamed from src/parallel-collections/scala/collection/parallel/TaskSupport.scala)0
-rw-r--r--src/library/scala/collection/parallel/Tasks.scala (renamed from src/parallel-collections/scala/collection/parallel/Tasks.scala)15
-rw-r--r--src/library/scala/collection/parallel/immutable/ParallelHashTrie.scala248
-rw-r--r--src/library/scala/collection/parallel/immutable/ParallelIterable.scala (renamed from src/parallel-collections/scala/collection/parallel/immutable/ParallelIterable.scala)0
-rw-r--r--src/library/scala/collection/parallel/immutable/ParallelIterable.scala.disabled53
-rw-r--r--src/library/scala/collection/parallel/immutable/ParallelRange.scala (renamed from src/parallel-collections/scala/collection/parallel/immutable/ParallelRange.scala)6
-rw-r--r--src/library/scala/collection/parallel/immutable/ParallelSeq.scala (renamed from src/parallel-collections/scala/collection/parallel/immutable/ParallelSeq.scala)2
-rw-r--r--src/library/scala/collection/parallel/immutable/ParallelSeq.scala.disabled44
-rw-r--r--src/library/scala/collection/parallel/immutable/package.scala (renamed from src/parallel-collections/scala/collection/parallel/immutable/package.scala)0
-rw-r--r--src/library/scala/collection/parallel/mutable/LazyCombiner.scala (renamed from src/parallel-collections/scala/collection/parallel/mutable/LazyCombiner.scala)6
-rw-r--r--src/library/scala/collection/parallel/mutable/ParallelArray.scala (renamed from src/parallel-collections/scala/collection/parallel/mutable/ParallelArray.scala)60
-rw-r--r--src/library/scala/collection/parallel/mutable/ParallelArrayCombiner.scala (renamed from src/parallel-collections/scala/collection/parallel/mutable/ParallelArrayCombiner.scala)2
-rw-r--r--src/library/scala/collection/parallel/mutable/ParallelIterable.scala (renamed from src/parallel-collections/scala/collection/parallel/mutable/ParallelIterable.scala)4
-rw-r--r--src/library/scala/collection/parallel/mutable/ParallelSeq.scala (renamed from src/parallel-collections/scala/collection/parallel/mutable/ParallelSeq.scala)4
-rw-r--r--src/library/scala/collection/parallel/mutable/package.scala (renamed from src/parallel-collections/scala/collection/parallel/mutable/package.scala)0
-rw-r--r--src/library/scala/collection/parallel/package.scala (renamed from src/parallel-collections/scala/collection/parallel/package.scala)8
-rw-r--r--src/library/scala/concurrent/SyncVar.scala35
-rw-r--r--src/library/scala/io/Source.scala18
-rw-r--r--src/library/scala/math/Ordering.scala18
-rwxr-xr-xsrc/library/scala/reflect/generic/Symbols.scala1
-rw-r--r--src/library/scala/runtime/AnyValCompanion.scala2
-rw-r--r--src/library/scala/runtime/ScalaRunTime.scala8
-rw-r--r--src/library/scala/util/Random.scala44
-rw-r--r--src/library/scala/xml/parsing/MarkupParser.scala6
-rw-r--r--src/parallel-collections/scala/collection/Parallelizable.scala40
-rw-r--r--src/parallel-collections/scala/collection/generic/GenericParallelCompanion.scala31
-rw-r--r--src/parallel-collections/scala/collection/parallel/ParallelMap.scala61
-rw-r--r--src/parallel-collections/scala/collection/parallel/immutable/ParallelHashTrie.scala60
-rw-r--r--src/partest/scala/tools/partest/PartestTask.scala4
-rw-r--r--src/partest/scala/tools/partest/nest/Diff.java8
-rw-r--r--src/partest/scala/tools/partest/nest/DiffPrint.java2
-rw-r--r--src/partest/scala/tools/partest/nest/Worker.scala2
-rw-r--r--src/swing/scala/swing/Font.scala.disabled (renamed from src/swing/scala/swing/Font.scala)30
-rw-r--r--test/benchmarks/src/scala/collection/parallel/Benchmarking.scala7
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/generic/Dummy.scala1
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/generic/Operators.scala1
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/generic/ParallelBenches.scala16
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Combine.scala13
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/MultipleCombine.scala87
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/ParallelHashTries.scala121
-rw-r--r--test/files/jvm/actor-executor2.scala12
-rw-r--r--test/files/neg/abstract-vars.check21
-rw-r--r--test/files/neg/abstract-vars.scala29
-rw-r--r--test/files/neg/bug1275.check8
-rw-r--r--test/files/neg/bug1275.scala26
-rw-r--r--test/files/neg/bug1845.check4
-rw-r--r--test/files/neg/bug1845.scala10
-rw-r--r--test/files/neg/bug3209.check4
-rw-r--r--test/files/neg/bug3209.scala2
-rw-r--r--test/files/neg/bug3631.check4
-rw-r--r--test/files/neg/bug3631.scala3
-rw-r--r--test/files/neg/bug882.check2
-rw-r--r--test/files/neg/names-defaults-neg.check13
-rw-r--r--test/files/neg/names-defaults-neg.scala5
-rw-r--r--test/files/neg/t2416.check10
-rw-r--r--test/files/neg/t2416.scala14
-rw-r--r--test/files/neg/t3399.check4
-rw-r--r--test/files/neg/t3399.scala24
-rw-r--r--test/files/neg/t3507.check4
-rw-r--r--test/files/neg/t3507.scala15
-rw-r--r--test/files/neg/t3604.check7
-rw-r--r--test/files/neg/t3604.scala6
-rw-r--r--test/files/neg/t3653.check7
-rw-r--r--test/files/neg/t3653.scala4
-rw-r--r--test/files/neg/t742.check5
-rw-r--r--test/files/neg/t742.scala8
-rw-r--r--test/files/neg/tailrec-2.check4
-rw-r--r--test/files/neg/tailrec-2.scala26
-rw-r--r--test/files/pos/bug0013.scala31
-rw-r--r--test/files/pos/bug0095.scala15
-rw-r--r--test/files/pos/bug1974.scala20
-rw-r--r--test/files/pos/bug261-ab.scala9
-rw-r--r--test/files/pos/bug261-ba.scala9
-rw-r--r--test/files/pos/bug3234.flags1
-rw-r--r--test/files/pos/bug3234.scala19
-rw-r--r--test/files/pos/bug3440.scala18
-rw-r--r--test/files/pos/bug3570.scala7
-rw-r--r--test/files/pos/t1263/Test.java2
-rw-r--r--test/files/pos/t2331.scala11
-rw-r--r--test/files/pos/t2413/TestJava.java7
-rw-r--r--test/files/pos/t2413/TestScalac.scala23
-rw-r--r--test/files/pos/t3249/Test.java5
-rw-r--r--test/files/pos/t3249/a.scala11
-rw-r--r--test/files/pos/t3374.scala6
-rw-r--r--test/files/pos/t3477.scala7
-rw-r--r--test/files/pos/t3486/JTest.java3
-rw-r--r--test/files/pos/t3486/test.scala6
-rw-r--r--test/files/pos/t3494.scala7
-rw-r--r--test/files/pos/t3622/test/AsyncTask.java5
-rw-r--r--test/files/pos/t3622/test/MyAsyncTask.java9
-rw-r--r--test/files/pos/t3622/test/Test.scala5
-rw-r--r--test/files/run/bug1766.scala16
-rw-r--r--test/files/run/bug2106.flags1
-rw-r--r--test/files/run/bug2106.scala8
-rw-r--r--test/files/run/bug3616.check1
-rw-r--r--test/files/run/bug3616.scala12
-rw-r--r--test/files/run/colltest1.scala4
-rw-r--r--test/files/run/names-defaults.scala5
-rw-r--r--test/files/run/slice-strings.scala19
-rw-r--r--test/files/run/t0432.scala15
-rw-r--r--test/files/run/t3493.scala15
-rw-r--r--test/files/run/t3580.scala17
-rw-r--r--test/files/run/t3603.scala18
-rw-r--r--test/files/run/t3645.scala6
-rw-r--r--test/files/run/xml-loop-bug.scala6
-rw-r--r--test/pending/continuations-neg/t3628.check3
-rw-r--r--test/pending/continuations-neg/t3628.scala11
-rw-r--r--test/pending/continuations-pos/t3620.scala73
-rw-r--r--test/pending/jvm/serialization.check198
-rw-r--r--test/pending/jvm/serialization.scala411
-rw-r--r--test/pending/pos/t3636.scala49
229 files changed, 4682 insertions, 2695 deletions
diff --git a/build.number b/build.number
index 31a2b93530..e00461fe6c 100644
--- a/build.number
+++ b/build.number
@@ -1,5 +1,5 @@
#Tue Sep 11 19:21:09 CEST 2007
version.minor=8
-version.patch=0
-version.suffix=final
+version.patch=1
+version.suffix=alpha
version.major=2
diff --git a/build.xml b/build.xml
index c0593415fc..656c45a8c5 100644
--- a/build.xml
+++ b/build.xml
@@ -261,6 +261,15 @@ INITIALISATION
<pathelement location="${lib.dir}/forkjoin.jar"/>
<pathelement location="${ant.jar}"/>
</path>
+ <!-- What to have on the compilation path when compiling during certain phases -->
+ <path id="quick.compilation.path">
+ <pathelement location="${build-quick.dir}/classes/library"/>
+ <pathelement location="${lib.dir}/forkjoin.jar"/>
+ </path>
+ <path id="strap.compilation.path">
+ <pathelement location="${build-strap.dir}/classes/library"/>
+ <pathelement location="${lib.dir}/forkjoin.jar"/>
+ </path>
<taskdef resource="scala/tools/ant/sabbus/antlib.xml" classpathref="starr.classpath"/>
</target>
@@ -304,6 +313,7 @@ LOCAL REFERENCE BUILD (LOCKER)
<include name="**/*.scala"/>
<compilationpath>
<pathelement location="${build-locker.dir}/classes/library"/>
+ <pathelement location="${lib.dir}/forkjoin.jar"/>
</compilationpath>
</scalacfork>
<propertyfile file="${build-locker.dir}/classes/library/library.properties">
@@ -437,14 +447,14 @@ QUICK BUILD (QUICK)
============================================================================ -->
<target name="quick.start" depends="locker.done"/>
-
+
<target name="quick.pre-lib" depends="quick.start">
<uptodate property="quick.lib.available" targetfile="${build-quick.dir}/library.complete">
<srcfiles dir="${src.dir}">
<include name="library/**"/>
<include name="dbc/**"/>
<include name="actors/**"/>
- <include name="parallel-collections/**"/>
+ <!--<include name="parallel-collections/**"/>-->
<include name="continuations/**"/>
<include name="swing/**"/>
</srcfiles>
@@ -478,9 +488,7 @@ QUICK BUILD (QUICK)
srcdir="${src.dir}/library"
jvmargs="${scalacfork.jvmargs}">
<include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-quick.dir}/classes/library"/>
- </compilationpath>
+ <compilationpath refid="quick.compilation.path"/>
</scalacfork>
<scalacfork
destdir="${build-quick.dir}/classes/library"
@@ -489,12 +497,9 @@ QUICK BUILD (QUICK)
srcdir="${src.dir}/actors"
jvmargs="${scalacfork.jvmargs}">
<include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-quick.dir}/classes/library"/>
- <pathelement location="${lib.dir}/forkjoin.jar"/>
- </compilationpath>
+ <compilationpath refid="quick.compilation.path"/>
</scalacfork>
- <scalacfork
+ <!--<scalacfork
destdir="${build-quick.dir}/classes/library"
compilerpathref="locker.classpath"
params="${scalac.args.quick}"
@@ -505,7 +510,7 @@ QUICK BUILD (QUICK)
<pathelement location="${build-quick.dir}/classes/library"/>
<pathelement location="${lib.dir}/forkjoin.jar"/>
</compilationpath>
- </scalacfork>
+ </scalacfork>-->
<scalacfork
destdir="${build-quick.dir}/classes/library"
compilerpathref="locker.classpath"
@@ -513,9 +518,7 @@ QUICK BUILD (QUICK)
srcdir="${src.dir}/dbc"
jvmargs="${scalacfork.jvmargs}">
<include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-quick.dir}/classes/library"/>
- </compilationpath>
+ <compilationpath refid="quick.compilation.path"/>
</scalacfork>
<scalacfork
destdir="${build-quick.dir}/classes/library"
@@ -524,9 +527,7 @@ QUICK BUILD (QUICK)
srcdir="${src.dir}/swing"
jvmargs="${scalacfork.jvmargs}">
<include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-quick.dir}/classes/library"/>
- </compilationpath>
+ <compilationpath refid="quick.compilation.path"/>
</scalacfork>
<propertyfile file="${build-quick.dir}/classes/library/library.properties">
<entry key="version.number" value="${version.number}"/>
@@ -629,6 +630,7 @@ QUICK BUILD (QUICK)
<pathelement location="${build-quick.dir}/classes/library"/>
<pathelement location="${build-quick.dir}/classes/compiler"/>
<pathelement location="${build-quick.dir}/classes/continuations-plugin"/>
+ <pathelement location="${lib.dir}/forkjoin.jar"/>
</compilationpath>
</scalacfork>
<copy
@@ -647,10 +649,7 @@ QUICK BUILD (QUICK)
srcdir="${src.dir}/continuations/library"
jvmargs="${scalacfork.jvmargs}">
<include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-quick.dir}/classes/library"/>
- <pathelement location="${lib.dir}/forkjoin.jar"/>
- </compilationpath>
+ <compilationpath refid="quick.compilation.path"/>
</scalacfork>
<touch file="${build-quick.dir}/plugins.complete" verbose="no"/>
<stopwatch name="quick.plugins.timer" action="total"/>
@@ -678,6 +677,7 @@ QUICK BUILD (QUICK)
<pathelement location="${build-quick.dir}/classes/scalap"/>
<pathelement location="${build-quick.dir}/classes/partest"/>
<pathelement location="${ant.jar}"/>
+ <pathelement location="${lib.dir}/forkjoin.jar"/>
</compilationpath>
</scalacfork>
<touch file="${build-quick.dir}/scalap.complete" verbose="no"/>
@@ -719,6 +719,7 @@ QUICK BUILD (QUICK)
<pathelement location="${build-quick.dir}/classes/scalap"/>
<pathelement location="${build-quick.dir}/classes/partest"/>
<pathelement location="${ant.jar}"/>
+ <pathelement location="${lib.dir}/forkjoin.jar"/>
<pathelement location="${scalacheck.jar}"/>
</compilationpath>
</scalacfork>
@@ -1009,9 +1010,7 @@ BOOTSTRAPPING BUILD (STRAP)
srcdir="${src.dir}/library"
jvmargs="${scalacfork.jvmargs}">
<include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-strap.dir}/classes/library"/>
- </compilationpath>
+ <compilationpath refid="strap.compilation.path"/>
</scalacfork>
<scalacfork
destdir="${build-strap.dir}/classes/library"
@@ -1020,12 +1019,9 @@ BOOTSTRAPPING BUILD (STRAP)
srcdir="${src.dir}/actors"
jvmargs="${scalacfork.jvmargs}">
<include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-strap.dir}/classes/library"/>
- <pathelement location="${forkjoin.jar}"/>
- </compilationpath>
+ <compilationpath refid="strap.compilation.path"/>
</scalacfork>
- <scalacfork
+ <!--<scalacfork
destdir="${build-strap.dir}/classes/library"
compilerpathref="pack.classpath"
params="${scalac.args.all}"
@@ -1036,7 +1032,7 @@ BOOTSTRAPPING BUILD (STRAP)
<pathelement location="${build-strap.dir}/classes/library"/>
<pathelement location="${forkjoin.jar}"/>
</compilationpath>
- </scalacfork>
+ </scalacfork>-->
<scalacfork
destdir="${build-strap.dir}/classes/library"
compilerpathref="pack.classpath"
@@ -1044,9 +1040,7 @@ BOOTSTRAPPING BUILD (STRAP)
srcdir="${src.dir}/dbc"
jvmargs="${scalacfork.jvmargs}">
<include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-strap.dir}/classes/library"/>
- </compilationpath>
+ <compilationpath refid="strap.compilation.path"/>
</scalacfork>
<scalacfork
destdir="${build-strap.dir}/classes/library"
@@ -1055,9 +1049,7 @@ BOOTSTRAPPING BUILD (STRAP)
srcdir="${src.dir}/swing"
jvmargs="${scalacfork.jvmargs}">
<include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-strap.dir}/classes/library"/>
- </compilationpath>
+ <compilationpath refid="strap.compilation.path"/>
</scalacfork>
<propertyfile file="${build-strap.dir}/classes/library/library.properties">
<entry key="version.number" value="${version.number}"/>
@@ -1140,6 +1132,7 @@ BOOTSTRAPPING BUILD (STRAP)
<pathelement location="${build-strap.dir}/classes/library"/>
<pathelement location="${build-strap.dir}/classes/compiler"/>
<pathelement location="${build-strap.dir}/classes/continuations-plugin"/>
+ <pathelement location="${lib.dir}/forkjoin.jar"/>
</compilationpath>
</scalacfork>
<copy
@@ -1158,10 +1151,7 @@ BOOTSTRAPPING BUILD (STRAP)
srcdir="${src.dir}/continuations/library"
jvmargs="${scalacfork.jvmargs}">
<include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-strap.dir}/classes/library"/>
- <pathelement location="${lib.dir}/forkjoin.jar"/>
- </compilationpath>
+ <compilationpath refid="strap.compilation.path"/>
</scalacfork>
<touch file="${build-strap.dir}/plugins.complete" verbose="no"/>
<stopwatch name="strap.plugins.timer" action="total"/>
@@ -1189,6 +1179,7 @@ BOOTSTRAPPING BUILD (STRAP)
<pathelement location="${build-strap.dir}/classes/scalap"/>
<pathelement location="${build-strap.dir}/classes/partest"/>
<pathelement location="${ant.jar}"/>
+ <pathelement location="${lib.dir}/forkjoin.jar"/>
</compilationpath>
</scalacfork>
<touch file="${build-strap.dir}/scalap.complete" verbose="no"/>
@@ -1229,8 +1220,9 @@ BOOTSTRAPPING BUILD (STRAP)
<pathelement location="${build-strap.dir}/classes/compiler"/>
<pathelement location="${build-strap.dir}/classes/scalap"/>
<pathelement location="${build-strap.dir}/classes/partest"/>
- <pathelement location="${scalacheck.jar}"/>
<pathelement location="${ant.jar}"/>
+ <pathelement location="${lib.dir}/forkjoin.jar"/>
+ <pathelement location="${scalacheck.jar}"/>
</compilationpath>
</scalacfork>
<copy todir="${build-strap.dir}/classes/partest">
@@ -1393,7 +1385,7 @@ DOCUMENTATION
<include name="library/**"/>
<include name="dbc/**"/>
<include name="actors/**"/>
- <include name="parallel-collections/**"/>
+ <!--<include name="parallel-collections/**"/>-->
<include name="swing/**"/>
</srcfiles>
</uptodate>
@@ -1411,7 +1403,7 @@ DOCUMENTATION
classpathref="pack.classpath">
<src>
<files includes="${src.dir}/actors"/>
- <files includes="${src.dir}/parallel-collections"/>
+ <!--<files includes="${src.dir}/parallel-collections"/>-->
<files includes="${src.dir}/library/scala"/>
<files includes="${src.dir}/swing"/>
<files includes="${src.dir}/continuations/library"/>
@@ -1650,7 +1642,7 @@ DISTRIBUTION
<jar destfile="${dist.dir}/src/scala-library-src.jar">
<fileset dir="${src.dir}/library"/>
<fileset dir="${src.dir}/actors"/>
- <fileset dir="${src.dir}/parallel-collections"/>
+ <!--<fileset dir="${src.dir}/parallel-collections"/>-->
<fileset dir="${src.dir}/continuations/library"/>
</jar>
<jar destfile="${dist.dir}/src/scala-dbc-src.jar">
@@ -1739,7 +1731,7 @@ STABLE REFERENCE (STARR)
<jar destfile="${basedir}/lib/scala-library-src.jar">
<fileset dir="${basedir}/src/library"/>
<fileset dir="${basedir}/src/actors"/>
- <fileset dir="${basedir}/src/parallel-collections"/>
+ <!--<fileset dir="${basedir}/src/parallel-collections"/>-->
<fileset dir="${basedir}/src/swing"/>
<fileset dir="${basedir}/src/dbc"/>
</jar>
diff --git a/lib/scala-compiler.jar.desired.sha1 b/lib/scala-compiler.jar.desired.sha1
index f2a687e44e..7f771c65d9 100644
--- a/lib/scala-compiler.jar.desired.sha1
+++ b/lib/scala-compiler.jar.desired.sha1
@@ -1 +1 @@
-47cd1c12567af0aa7d93b4cf2877db26bd908fe7 ?scala-compiler.jar
+dac47f86e72a0d4d1c196b30e5fab00671ae72cc ?scala-compiler.jar
diff --git a/lib/scala-library-src.jar.desired.sha1 b/lib/scala-library-src.jar.desired.sha1
index f25112f05b..3ca7e74721 100644
--- a/lib/scala-library-src.jar.desired.sha1
+++ b/lib/scala-library-src.jar.desired.sha1
@@ -1 +1 @@
-d42ea573aada13c9ea6b05da483c3e08522ea1fe ?scala-library-src.jar
+4d83364ebab2a2d6da1cc9c7401e9fce40868b58 ?scala-library-src.jar
diff --git a/lib/scala-library.jar.desired.sha1 b/lib/scala-library.jar.desired.sha1
index 10dba65a67..80d5c6b541 100644
--- a/lib/scala-library.jar.desired.sha1
+++ b/lib/scala-library.jar.desired.sha1
@@ -1 +1 @@
-82a0de3721dc7299d57d385b1d19286d63a5e763 ?scala-library.jar
+01733ad3ce01044f067f06de66a3aba634077b59 ?scala-library.jar
diff --git a/src/actors/scala/actors/threadpool/AbstractExecutorService.java b/src/actors/scala/actors/threadpool/AbstractExecutorService.java
index 7953bfe30f..4a12aa3c28 100644
--- a/src/actors/scala/actors/threadpool/AbstractExecutorService.java
+++ b/src/actors/scala/actors/threadpool/AbstractExecutorService.java
@@ -120,7 +120,7 @@ public abstract class AbstractExecutorService implements ExecutorService {
int ntasks = tasks.size();
if (ntasks == 0)
throw new IllegalArgumentException();
- List futures= new ArrayList(ntasks);
+ List<Future> futures = new ArrayList<Future>(ntasks);
ExecutorCompletionService ecs =
new ExecutorCompletionService(this);
@@ -203,10 +203,10 @@ public abstract class AbstractExecutorService implements ExecutorService {
return doInvokeAny(tasks, true, unit.toNanos(timeout));
}
- public List invokeAll(Collection tasks) throws InterruptedException {
+ public List<Future> invokeAll(Collection tasks) throws InterruptedException {
if (tasks == null)
throw new NullPointerException();
- List futures = new ArrayList(tasks.size());
+ List<Future> futures = new ArrayList<Future>(tasks.size());
boolean done = false;
try {
for (Iterator t = tasks.iterator(); t.hasNext();) {
@@ -235,13 +235,13 @@ public abstract class AbstractExecutorService implements ExecutorService {
}
}
- public List invokeAll(Collection tasks,
+ public List<Future> invokeAll(Collection tasks,
long timeout, TimeUnit unit)
throws InterruptedException {
if (tasks == null || unit == null)
throw new NullPointerException();
long nanos = unit.toNanos(timeout);
- List futures = new ArrayList(tasks.size());
+ List<Future> futures = new ArrayList<Future>(tasks.size());
boolean done = false;
try {
for (Iterator t = tasks.iterator(); t.hasNext();)
diff --git a/src/actors/scala/actors/threadpool/BlockingQueue.java b/src/actors/scala/actors/threadpool/BlockingQueue.java
index 880c2580da..1b4e808d84 100644
--- a/src/actors/scala/actors/threadpool/BlockingQueue.java
+++ b/src/actors/scala/actors/threadpool/BlockingQueue.java
@@ -7,9 +7,10 @@
package scala.actors.threadpool;
import java.util.Collection;
+import java.util.Queue;
/**
- * A {@link edu.emory.mathcs.backport.java.util.Queue} that additionally supports operations
+ * A {@link java.util.Queue} that additionally supports operations
* that wait for the queue to become non-empty when retrieving an
* element, and wait for space to become available in the queue when
* storing an element.
@@ -146,8 +147,9 @@ import java.util.Collection;
*
* @since 1.5
* @author Doug Lea
+ * @param <E> the type of elements held in this collection
*/
-public interface BlockingQueue extends Queue {
+public interface BlockingQueue<E> extends Queue<E> {
/**
* Inserts the specified element into this queue if it is possible to do
* so immediately without violating capacity restrictions, returning
@@ -157,7 +159,7 @@ public interface BlockingQueue extends Queue {
* use {@link #offer(Object) offer}.
*
* @param e the element to add
- * @return <tt>true</tt> (as specified by {@link java.util.Collection#add})
+ * @return <tt>true</tt> (as specified by {@link Collection#add})
* @throws IllegalStateException if the element cannot be added at this
* time due to capacity restrictions
* @throws ClassCastException if the class of the specified element
@@ -166,7 +168,7 @@ public interface BlockingQueue extends Queue {
* @throws IllegalArgumentException if some property of the specified
* element prevents it from being added to this queue
*/
- boolean add(Object e);
+ boolean add(E e);
/**
* Inserts the specified element into this queue if it is possible to do
@@ -185,7 +187,7 @@ public interface BlockingQueue extends Queue {
* @throws IllegalArgumentException if some property of the specified
* element prevents it from being added to this queue
*/
- boolean offer(Object e);
+ boolean offer(E e);
/**
* Inserts the specified element into this queue, waiting if necessary
@@ -199,7 +201,7 @@ public interface BlockingQueue extends Queue {
* @throws IllegalArgumentException if some property of the specified
* element prevents it from being added to this queue
*/
- void put(Object e) throws InterruptedException;
+ void put(E e) throws InterruptedException;
/**
* Inserts the specified element into this queue, waiting up to the
@@ -219,7 +221,7 @@ public interface BlockingQueue extends Queue {
* @throws IllegalArgumentException if some property of the specified
* element prevents it from being added to this queue
*/
- boolean offer(Object e, long timeout, TimeUnit unit)
+ boolean offer(E e, long timeout, TimeUnit unit)
throws InterruptedException;
/**
@@ -229,7 +231,7 @@ public interface BlockingQueue extends Queue {
* @return the head of this queue
* @throws InterruptedException if interrupted while waiting
*/
- Object take() throws InterruptedException;
+ E take() throws InterruptedException;
/**
* Retrieves and removes the head of this queue, waiting up to the
@@ -243,7 +245,7 @@ public interface BlockingQueue extends Queue {
* specified waiting time elapses before an element is available
* @throws InterruptedException if interrupted while waiting
*/
- Object poll(long timeout, TimeUnit unit)
+ E poll(long timeout, TimeUnit unit)
throws InterruptedException;
/**
@@ -313,7 +315,7 @@ public interface BlockingQueue extends Queue {
* queue, or some property of an element of this queue prevents
* it from being added to the specified collection
*/
- int drainTo(Collection c);
+ int drainTo(Collection<? super E> c);
/**
* Removes at most the given number of available elements from
@@ -338,5 +340,5 @@ public interface BlockingQueue extends Queue {
* queue, or some property of an element of this queue prevents
* it from being added to the specified collection
*/
- int drainTo(Collection c, int maxElements);
+ int drainTo(Collection<? super E> c, int maxElements);
}
diff --git a/src/actors/scala/actors/threadpool/Executors.java b/src/actors/scala/actors/threadpool/Executors.java
index e74d665f33..49a127a8db 100644
--- a/src/actors/scala/actors/threadpool/Executors.java
+++ b/src/actors/scala/actors/threadpool/Executors.java
@@ -605,12 +605,12 @@ public class Executors {
public Future submit(Runnable task, Object result) {
return e.submit(task, result);
}
- public List invokeAll(Collection tasks)
+ public List<Future> invokeAll(Collection tasks)
throws InterruptedException {
return e.invokeAll(tasks);
}
- public List invokeAll(Collection tasks,
- long timeout, TimeUnit unit)
+ public List<Future> invokeAll(Collection tasks,
+ long timeout, TimeUnit unit)
throws InterruptedException {
return e.invokeAll(tasks, timeout, unit);
}
diff --git a/src/actors/scala/actors/threadpool/LinkedBlockingQueue.java b/src/actors/scala/actors/threadpool/LinkedBlockingQueue.java
index 87fecff09c..f434ab0e7b 100644
--- a/src/actors/scala/actors/threadpool/LinkedBlockingQueue.java
+++ b/src/actors/scala/actors/threadpool/LinkedBlockingQueue.java
@@ -6,11 +6,13 @@
package scala.actors.threadpool;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.locks.Condition;
+import java.util.concurrent.locks.ReentrantLock;
+import java.util.AbstractQueue;
import java.util.Collection;
import java.util.Iterator;
import java.util.NoSuchElementException;
-//import edu.emory.mathcs.backport.java.util.*;
-import scala.actors.threadpool.helpers.*;
/**
* An optionally-bounded {@linkplain BlockingQueue blocking queue} based on
@@ -41,10 +43,11 @@ import scala.actors.threadpool.helpers.*;
*
* @since 1.5
* @author Doug Lea
+ * @param <E> the type of elements held in this collection
*
*/
-public class LinkedBlockingQueue extends AbstractQueue
- implements BlockingQueue, java.io.Serializable {
+public class LinkedBlockingQueue<E> extends AbstractQueue<E>
+ implements BlockingQueue<E>, java.io.Serializable {
private static final long serialVersionUID = -6903933977591709194L;
/*
@@ -59,43 +62,87 @@ public class LinkedBlockingQueue extends AbstractQueue
* items have been entered since the signal. And symmetrically for
* takes signalling puts. Operations such as remove(Object) and
* iterators acquire both locks.
+ *
+ * Visibility between writers and readers is provided as follows:
+ *
+ * Whenever an element is enqueued, the putLock is acquired and
+ * count updated. A subsequent reader guarantees visibility to the
+ * enqueued Node by either acquiring the putLock (via fullyLock)
+ * or by acquiring the takeLock, and then reading n = count.get();
+ * this gives visibility to the first n items.
+ *
+ * To implement weakly consistent iterators, it appears we need to
+ * keep all Nodes GC-reachable from a predecessor dequeued Node.
+ * That would cause two problems:
+ * - allow a rogue Iterator to cause unbounded memory retention
+ * - cause cross-generational linking of old Nodes to new Nodes if
+ * a Node was tenured while live, which generational GCs have a
+ * hard time dealing with, causing repeated major collections.
+ * However, only non-deleted Nodes need to be reachable from
+ * dequeued Nodes, and reachability does not necessarily have to
+ * be of the kind understood by the GC. We use the trick of
+ * linking a Node that has just been dequeued to itself. Such a
+ * self-link implicitly means to advance to head.next.
*/
/**
* Linked list node class
*/
- static class Node {
- /** The item, volatile to ensure barrier separating write and read */
- volatile Object item;
- Node next;
- Node(Object x) { item = x; }
+ static class Node<E> {
+ E item;
+
+ /**
+ * One of:
+ * - the real successor Node
+ * - this Node, meaning the successor is head.next
+ * - null, meaning there is no successor (this is the last node)
+ */
+ Node<E> next;
+
+ Node(E x) { item = x; }
}
/** The capacity bound, or Integer.MAX_VALUE if none */
private final int capacity;
/** Current number of elements */
- private volatile int count = 0;
+ private final AtomicInteger count = new AtomicInteger(0);
- /** Head of linked list */
- private transient Node head;
+ /**
+ * Head of linked list.
+ * Invariant: head.item == null
+ */
+ private transient Node<E> head;
- /** Tail of linked list */
- private transient Node last;
+ /**
+ * Tail of linked list.
+ * Invariant: last.next == null
+ */
+ private transient Node<E> last;
/** Lock held by take, poll, etc */
- private final Object takeLock = new SerializableLock();
+ private final ReentrantLock takeLock = new ReentrantLock();
+
+ /** Wait queue for waiting takes */
+ private final Condition notEmpty = takeLock.newCondition();
/** Lock held by put, offer, etc */
- private final Object putLock = new SerializableLock();
+ private final ReentrantLock putLock = new ReentrantLock();
+
+ /** Wait queue for waiting puts */
+ private final Condition notFull = putLock.newCondition();
/**
* Signals a waiting take. Called only from put/offer (which do not
* otherwise ordinarily lock takeLock.)
*/
private void signalNotEmpty() {
- synchronized (takeLock) {
- takeLock.notify();
+ final ReentrantLock takeLock = this.takeLock;
+ takeLock.lock();
+ try {
+ notEmpty.signal();
+ } finally {
+ takeLock.unlock();
}
}
@@ -103,34 +150,69 @@ public class LinkedBlockingQueue extends AbstractQueue
* Signals a waiting put. Called only from take/poll.
*/
private void signalNotFull() {
- synchronized (putLock) {
- putLock.notify();
+ final ReentrantLock putLock = this.putLock;
+ putLock.lock();
+ try {
+ notFull.signal();
+ } finally {
+ putLock.unlock();
}
}
/**
* Creates a node and links it at end of queue.
+ *
* @param x the item
*/
- private void insert(Object x) {
- last = last.next = new Node(x);
+ private void enqueue(E x) {
+ // assert putLock.isHeldByCurrentThread();
+ // assert last.next == null;
+ last = last.next = new Node<E>(x);
}
/**
- * Removes a node from head of queue,
+ * Removes a node from head of queue.
+ *
* @return the node
*/
- private Object extract() {
- Node first = head.next;
+ private E dequeue() {
+ // assert takeLock.isHeldByCurrentThread();
+ // assert head.item == null;
+ Node<E> h = head;
+ Node<E> first = h.next;
+ h.next = h; // help GC
head = first;
- Object x = first.item;
+ E x = first.item;
first.item = null;
return x;
}
+ /**
+ * Lock to prevent both puts and takes.
+ */
+ void fullyLock() {
+ putLock.lock();
+ takeLock.lock();
+ }
/**
- * Creates a <tt>LinkedBlockingQueue</tt> with a capacity of
+ * Unlock to allow both puts and takes.
+ */
+ void fullyUnlock() {
+ takeLock.unlock();
+ putLock.unlock();
+ }
+
+// /**
+// * Tells whether both locks are held by current thread.
+// */
+// boolean isFullyLocked() {
+// return (putLock.isHeldByCurrentThread() &&
+// takeLock.isHeldByCurrentThread());
+// }
+
+ /**
+ * Creates a {@code LinkedBlockingQueue} with a capacity of
* {@link Integer#MAX_VALUE}.
*/
public LinkedBlockingQueue() {
@@ -138,20 +220,20 @@ public class LinkedBlockingQueue extends AbstractQueue
}
/**
- * Creates a <tt>LinkedBlockingQueue</tt> with the given (fixed) capacity.
+ * Creates a {@code LinkedBlockingQueue} with the given (fixed) capacity.
*
* @param capacity the capacity of this queue
- * @throws IllegalArgumentException if <tt>capacity</tt> is not greater
+ * @throws IllegalArgumentException if {@code capacity} is not greater
* than zero
*/
public LinkedBlockingQueue(int capacity) {
if (capacity <= 0) throw new IllegalArgumentException();
this.capacity = capacity;
- last = head = new Node(null);
+ last = head = new Node<E>(null);
}
/**
- * Creates a <tt>LinkedBlockingQueue</tt> with a capacity of
+ * Creates a {@code LinkedBlockingQueue} with a capacity of
* {@link Integer#MAX_VALUE}, initially containing the elements of the
* given collection,
* added in traversal order of the collection's iterator.
@@ -160,11 +242,23 @@ public class LinkedBlockingQueue extends AbstractQueue
* @throws NullPointerException if the specified collection or any
* of its elements are null
*/
- public LinkedBlockingQueue(Collection c) {
+ public LinkedBlockingQueue(Collection<? extends E> c) {
this(Integer.MAX_VALUE);
- for (Iterator itr = c.iterator(); itr.hasNext();) {
- Object e = itr.next();
- add(e);
+ final ReentrantLock putLock = this.putLock;
+ putLock.lock(); // Never contended, but necessary for visibility
+ try {
+ int n = 0;
+ for (E e : c) {
+ if (e == null)
+ throw new NullPointerException();
+ if (n == capacity)
+ throw new IllegalStateException("Queue full");
+ enqueue(e);
+ ++n;
+ }
+ count.set(n);
+ } finally {
+ putLock.unlock();
}
}
@@ -177,7 +271,7 @@ public class LinkedBlockingQueue extends AbstractQueue
* @return the number of elements in this queue
*/
public int size() {
- return count;
+ return count.get();
}
// this doc comment is a modified copy of the inherited doc comment,
@@ -186,15 +280,15 @@ public class LinkedBlockingQueue extends AbstractQueue
* Returns the number of additional elements that this queue can ideally
* (in the absence of memory or resource constraints) accept without
* blocking. This is always equal to the initial capacity of this queue
- * less the current <tt>size</tt> of this queue.
+ * less the current {@code size} of this queue.
*
* <p>Note that you <em>cannot</em> always tell if an attempt to insert
- * an element will succeed by inspecting <tt>remainingCapacity</tt>
+ * an element will succeed by inspecting {@code remainingCapacity}
* because it may be the case that another thread is about to
* insert or remove an element.
*/
public int remainingCapacity() {
- return capacity - count;
+ return capacity - count.get();
}
/**
@@ -204,34 +298,33 @@ public class LinkedBlockingQueue extends AbstractQueue
* @throws InterruptedException {@inheritDoc}
* @throws NullPointerException {@inheritDoc}
*/
- public void put(Object e) throws InterruptedException {
+ public void put(E e) throws InterruptedException {
if (e == null) throw new NullPointerException();
- // Note: convention in all put/take/etc is to preset
- // local var holding count negative to indicate failure unless set.
+ // Note: convention in all put/take/etc is to preset local var
+ // holding count negative to indicate failure unless set.
int c = -1;
- synchronized (putLock) {
+ final ReentrantLock putLock = this.putLock;
+ final AtomicInteger count = this.count;
+ putLock.lockInterruptibly();
+ try {
/*
* Note that count is used in wait guard even though it is
* not protected by lock. This works because count can
* only decrease at this point (all other puts are shut
* out by lock), and we (or some other waiting put) are
- * signalled if it ever changes from
- * capacity. Similarly for all other uses of count in
- * other wait guards.
+ * signalled if it ever changes from capacity. Similarly
+ * for all other uses of count in other wait guards.
*/
- try {
- while (count == capacity)
- putLock.wait();
- } catch (InterruptedException ie) {
- putLock.notify(); // propagate to a non-interrupted thread
- throw ie;
+ while (count.get() == capacity) {
+ notFull.await();
}
- insert(e);
- synchronized (this) { c = count++; }
+ enqueue(e);
+ c = count.getAndIncrement();
if (c + 1 < capacity)
- putLock.notify();
+ notFull.signal();
+ } finally {
+ putLock.unlock();
}
-
if (c == 0)
signalNotEmpty();
}
@@ -240,37 +333,32 @@ public class LinkedBlockingQueue extends AbstractQueue
* Inserts the specified element at the tail of this queue, waiting if
* necessary up to the specified wait time for space to become available.
*
- * @return <tt>true</tt> if successful, or <tt>false</tt> if
+ * @return {@code true} if successful, or {@code false} if
* the specified waiting time elapses before space is available.
* @throws InterruptedException {@inheritDoc}
* @throws NullPointerException {@inheritDoc}
*/
- public boolean offer(Object e, long timeout, TimeUnit unit)
+ public boolean offer(E e, long timeout, TimeUnit unit)
throws InterruptedException {
if (e == null) throw new NullPointerException();
long nanos = unit.toNanos(timeout);
int c = -1;
- synchronized (putLock) {
- long deadline = Utils.nanoTime() + nanos;
- for (;;) {
- if (count < capacity) {
- insert(e);
- synchronized (this) { c = count++; }
- if (c + 1 < capacity)
- putLock.notify();
- break;
- }
+ final ReentrantLock putLock = this.putLock;
+ final AtomicInteger count = this.count;
+ putLock.lockInterruptibly();
+ try {
+ while (count.get() == capacity) {
if (nanos <= 0)
return false;
- try {
- TimeUnit.NANOSECONDS.timedWait(putLock, nanos);
- nanos = deadline - Utils.nanoTime();
- } catch (InterruptedException ie) {
- putLock.notify(); // propagate to a non-interrupted thread
- throw ie;
- }
+ nanos = notFull.awaitNanos(nanos);
}
+ enqueue(e);
+ c = count.getAndIncrement();
+ if (c + 1 < capacity)
+ notFull.signal();
+ } finally {
+ putLock.unlock();
}
if (c == 0)
signalNotEmpty();
@@ -280,7 +368,7 @@ public class LinkedBlockingQueue extends AbstractQueue
/**
* Inserts the specified element at the tail of this queue if it is
* possible to do so immediately without exceeding the queue's capacity,
- * returning <tt>true</tt> upon success and <tt>false</tt> if this queue
+ * returning {@code true} upon success and {@code false} if this queue
* is full.
* When using a capacity-restricted queue, this method is generally
* preferable to method {@link BlockingQueue#add add}, which can fail to
@@ -288,18 +376,23 @@ public class LinkedBlockingQueue extends AbstractQueue
*
* @throws NullPointerException if the specified element is null
*/
- public boolean offer(Object e) {
+ public boolean offer(E e) {
if (e == null) throw new NullPointerException();
- if (count == capacity)
+ final AtomicInteger count = this.count;
+ if (count.get() == capacity)
return false;
int c = -1;
- synchronized (putLock) {
- if (count < capacity) {
- insert(e);
- synchronized (this) { c = count++; }
+ final ReentrantLock putLock = this.putLock;
+ putLock.lock();
+ try {
+ if (count.get() < capacity) {
+ enqueue(e);
+ c = count.getAndIncrement();
if (c + 1 < capacity)
- putLock.notify();
+ notFull.signal();
}
+ } finally {
+ putLock.unlock();
}
if (c == 0)
signalNotEmpty();
@@ -307,128 +400,134 @@ public class LinkedBlockingQueue extends AbstractQueue
}
- public Object take() throws InterruptedException {
- Object x;
+ public E take() throws InterruptedException {
+ E x;
int c = -1;
- synchronized (takeLock) {
- try {
- while (count == 0)
- takeLock.wait();
- } catch (InterruptedException ie) {
- takeLock.notify(); // propagate to a non-interrupted thread
- throw ie;
+ final AtomicInteger count = this.count;
+ final ReentrantLock takeLock = this.takeLock;
+ takeLock.lockInterruptibly();
+ try {
+ while (count.get() == 0) {
+ notEmpty.await();
}
-
- x = extract();
- synchronized (this) { c = count--; }
+ x = dequeue();
+ c = count.getAndDecrement();
if (c > 1)
- takeLock.notify();
+ notEmpty.signal();
+ } finally {
+ takeLock.unlock();
}
if (c == capacity)
signalNotFull();
return x;
}
- public Object poll(long timeout, TimeUnit unit) throws InterruptedException {
- Object x = null;
+ public E poll(long timeout, TimeUnit unit) throws InterruptedException {
+ E x = null;
int c = -1;
long nanos = unit.toNanos(timeout);
- synchronized (takeLock) {
- long deadline = Utils.nanoTime() + nanos;
- for (;;) {
- if (count > 0) {
- x = extract();
- synchronized (this) { c = count--; }
- if (c > 1)
- takeLock.notify();
- break;
- }
+ final AtomicInteger count = this.count;
+ final ReentrantLock takeLock = this.takeLock;
+ takeLock.lockInterruptibly();
+ try {
+ while (count.get() == 0) {
if (nanos <= 0)
return null;
- try {
- TimeUnit.NANOSECONDS.timedWait(takeLock, nanos);
- nanos = deadline - Utils.nanoTime();
- } catch (InterruptedException ie) {
- takeLock.notify(); // propagate to a non-interrupted thread
- throw ie;
- }
+ nanos = notEmpty.awaitNanos(nanos);
}
+ x = dequeue();
+ c = count.getAndDecrement();
+ if (c > 1)
+ notEmpty.signal();
+ } finally {
+ takeLock.unlock();
}
if (c == capacity)
signalNotFull();
return x;
}
- public Object poll() {
- if (count == 0)
+ public E poll() {
+ final AtomicInteger count = this.count;
+ if (count.get() == 0)
return null;
- Object x = null;
+ E x = null;
int c = -1;
- synchronized (takeLock) {
- if (count > 0) {
- x = extract();
- synchronized (this) { c = count--; }
+ final ReentrantLock takeLock = this.takeLock;
+ takeLock.lock();
+ try {
+ if (count.get() > 0) {
+ x = dequeue();
+ c = count.getAndDecrement();
if (c > 1)
- takeLock.notify();
+ notEmpty.signal();
}
+ } finally {
+ takeLock.unlock();
}
if (c == capacity)
signalNotFull();
return x;
}
-
- public Object peek() {
- if (count == 0)
+ public E peek() {
+ if (count.get() == 0)
return null;
- synchronized (takeLock) {
- Node first = head.next;
+ final ReentrantLock takeLock = this.takeLock;
+ takeLock.lock();
+ try {
+ Node<E> first = head.next;
if (first == null)
return null;
else
return first.item;
+ } finally {
+ takeLock.unlock();
}
}
/**
+ * Unlinks interior Node p with predecessor trail.
+ */
+ void unlink(Node<E> p, Node<E> trail) {
+ // assert isFullyLocked();
+ // p.next is not changed, to allow iterators that are
+ // traversing p to maintain their weak-consistency guarantee.
+ p.item = null;
+ trail.next = p.next;
+ if (last == p)
+ last = trail;
+ if (count.getAndDecrement() == capacity)
+ notFull.signal();
+ }
+
+ /**
* Removes a single instance of the specified element from this queue,
- * if it is present. More formally, removes an element <tt>e</tt> such
- * that <tt>o.equals(e)</tt>, if this queue contains one or more such
+ * if it is present. More formally, removes an element {@code e} such
+ * that {@code o.equals(e)}, if this queue contains one or more such
* elements.
- * Returns <tt>true</tt> if this queue contained the specified element
+ * Returns {@code true} if this queue contained the specified element
* (or equivalently, if this queue changed as a result of the call).
*
* @param o element to be removed from this queue, if present
- * @return <tt>true</tt> if this queue changed as a result of the call
+ * @return {@code true} if this queue changed as a result of the call
*/
public boolean remove(Object o) {
if (o == null) return false;
- boolean removed = false;
- synchronized (putLock) {
- synchronized (takeLock) {
- Node trail = head;
- Node p = head.next;
- while (p != null) {
- if (o.equals(p.item)) {
- removed = true;
- break;
- }
- trail = p;
- p = p.next;
- }
- if (removed) {
- p.item = null;
- trail.next = p.next;
- if (last == p)
- last = trail;
- synchronized (this) {
- if (count-- == capacity)
- putLock.notifyAll();
- }
+ fullyLock();
+ try {
+ for (Node<E> trail = head, p = trail.next;
+ p != null;
+ trail = p, p = p.next) {
+ if (o.equals(p.item)) {
+ unlink(p, trail);
+ return true;
}
}
+ return false;
+ } finally {
+ fullyUnlock();
}
- return removed;
}
/**
@@ -445,15 +544,16 @@ public class LinkedBlockingQueue extends AbstractQueue
* @return an array containing all of the elements in this queue
*/
public Object[] toArray() {
- synchronized (putLock) {
- synchronized (takeLock) {
- int size = count;
- Object[] a = new Object[size];
- int k = 0;
- for (Node p = head.next; p != null; p = p.next)
- a[k++] = p.item;
- return a;
- }
+ fullyLock();
+ try {
+ int size = count.get();
+ Object[] a = new Object[size];
+ int k = 0;
+ for (Node<E> p = head.next; p != null; p = p.next)
+ a[k++] = p.item;
+ return a;
+ } finally {
+ fullyUnlock();
}
}
@@ -467,22 +567,22 @@ public class LinkedBlockingQueue extends AbstractQueue
* <p>If this queue fits in the specified array with room to spare
* (i.e., the array has more elements than this queue), the element in
* the array immediately following the end of the queue is set to
- * <tt>null</tt>.
+ * {@code null}.
*
* <p>Like the {@link #toArray()} method, this method acts as bridge between
* array-based and collection-based APIs. Further, this method allows
* precise control over the runtime type of the output array, and may,
* under certain circumstances, be used to save allocation costs.
*
- * <p>Suppose <tt>x</tt> is a queue known to contain only strings.
+ * <p>Suppose {@code x} is a queue known to contain only strings.
* The following code can be used to dump the queue into a newly
- * allocated array of <tt>String</tt>:
+ * allocated array of {@code String}:
*
* <pre>
* String[] y = x.toArray(new String[0]);</pre>
*
- * Note that <tt>toArray(new Object[0])</tt> is identical in function to
- * <tt>toArray()</tt>.
+ * Note that {@code toArray(new Object[0])} is identical in function to
+ * {@code toArray()}.
*
* @param a the array into which the elements of the queue are to
* be stored, if it is big enough; otherwise, a new array of the
@@ -493,29 +593,32 @@ public class LinkedBlockingQueue extends AbstractQueue
* this queue
* @throws NullPointerException if the specified array is null
*/
- public Object[] toArray(Object[] a) {
- synchronized (putLock) {
- synchronized (takeLock) {
- int size = count;
- if (a.length < size)
- a = (Object[])java.lang.reflect.Array.newInstance
- (a.getClass().getComponentType(), size);
-
- int k = 0;
- for (Node p = head.next; p != null; p = p.next)
- a[k++] = (Object)p.item;
- if (a.length > k)
- a[k] = null;
- return a;
- }
+ @SuppressWarnings("unchecked")
+ public <T> T[] toArray(T[] a) {
+ fullyLock();
+ try {
+ int size = count.get();
+ if (a.length < size)
+ a = (T[])java.lang.reflect.Array.newInstance
+ (a.getClass().getComponentType(), size);
+
+ int k = 0;
+ for (Node<E> p = head.next; p != null; p = p.next)
+ a[k++] = (T)p.item;
+ if (a.length > k)
+ a[k] = null;
+ return a;
+ } finally {
+ fullyUnlock();
}
}
public String toString() {
- synchronized (putLock) {
- synchronized (takeLock) {
- return super.toString();
- }
+ fullyLock();
+ try {
+ return super.toString();
+ } finally {
+ fullyUnlock();
}
}
@@ -524,19 +627,18 @@ public class LinkedBlockingQueue extends AbstractQueue
* The queue will be empty after this call returns.
*/
public void clear() {
- synchronized (putLock) {
- synchronized (takeLock) {
- head.next = null;
- assert head.item == null;
- last = head;
- int c;
- synchronized (this) {
- c = count;
- count = 0;
- }
- if (c == capacity)
- putLock.notifyAll();
+ fullyLock();
+ try {
+ for (Node<E> p, h = head; (p = h.next) != null; h = p) {
+ h.next = h;
+ p.item = null;
}
+ head = last;
+ // assert head.item == null && head.next == null;
+ if (count.getAndSet(0) == capacity)
+ notFull.signal();
+ } finally {
+ fullyUnlock();
}
}
@@ -546,35 +648,8 @@ public class LinkedBlockingQueue extends AbstractQueue
* @throws NullPointerException {@inheritDoc}
* @throws IllegalArgumentException {@inheritDoc}
*/
- public int drainTo(Collection c) {
- if (c == null)
- throw new NullPointerException();
- if (c == this)
- throw new IllegalArgumentException();
- Node first;
- synchronized (putLock) {
- synchronized (takeLock) {
- first = head.next;
- head.next = null;
- assert head.item == null;
- last = head;
- int cold;
- synchronized (this) {
- cold = count;
- count = 0;
- }
- if (cold == capacity)
- putLock.notifyAll();
- }
- }
- // Transfer the elements outside of locks
- int n = 0;
- for (Node p = first; p != null; p = p.next) {
- c.add(p.item);
- p.item = null;
- ++n;
- }
- return n;
+ public int drainTo(Collection<? super E> c) {
+ return drainTo(c, Integer.MAX_VALUE);
}
/**
@@ -583,70 +658,77 @@ public class LinkedBlockingQueue extends AbstractQueue
* @throws NullPointerException {@inheritDoc}
* @throws IllegalArgumentException {@inheritDoc}
*/
- public int drainTo(Collection c, int maxElements) {
+ public int drainTo(Collection<? super E> c, int maxElements) {
if (c == null)
throw new NullPointerException();
if (c == this)
throw new IllegalArgumentException();
- synchronized (putLock) {
- synchronized (takeLock) {
- int n = 0;
- Node p = head.next;
- while (p != null && n < maxElements) {
+ boolean signalNotFull = false;
+ final ReentrantLock takeLock = this.takeLock;
+ takeLock.lock();
+ try {
+ int n = Math.min(maxElements, count.get());
+ // count.get provides visibility to first n Nodes
+ Node<E> h = head;
+ int i = 0;
+ try {
+ while (i < n) {
+ Node<E> p = h.next;
c.add(p.item);
p.item = null;
- p = p.next;
- ++n;
- }
- if (n != 0) {
- head.next = p;
- assert head.item == null;
- if (p == null)
- last = head;
- int cold;
- synchronized (this) {
- cold = count;
- count -= n;
- }
- if (cold == capacity)
- putLock.notifyAll();
+ h.next = h;
+ h = p;
+ ++i;
}
return n;
+ } finally {
+ // Restore invariants even if c.add() threw
+ if (i > 0) {
+ // assert h.item == null;
+ head = h;
+ signalNotFull = (count.getAndAdd(-i) == capacity);
+ }
}
+ } finally {
+ takeLock.unlock();
+ if (signalNotFull)
+ signalNotFull();
}
}
/**
* Returns an iterator over the elements in this queue in proper sequence.
- * The returned <tt>Iterator</tt> is a "weakly consistent" iterator that
- * will never throw {@link java.util.ConcurrentModificationException},
+ * The returned {@code Iterator} is a "weakly consistent" iterator that
+ * will never throw {@link java.util.ConcurrentModificationException
+ * ConcurrentModificationException},
* and guarantees to traverse elements as they existed upon
* construction of the iterator, and may (but is not guaranteed to)
* reflect any modifications subsequent to construction.
*
* @return an iterator over the elements in this queue in proper sequence
*/
- public Iterator iterator() {
+ public Iterator<E> iterator() {
return new Itr();
}
- private class Itr implements Iterator {
+ private class Itr implements Iterator<E> {
/*
- * Basic weak-consistent iterator. At all times hold the next
+ * Basic weakly-consistent iterator. At all times hold the next
* item to hand out so that if hasNext() reports true, we will
* still have it to return even if lost race with a take etc.
*/
- private Node current;
- private Node lastRet;
- private Object currentElement;
+ private Node<E> current;
+ private Node<E> lastRet;
+ private E currentElement;
Itr() {
- synchronized (putLock) {
- synchronized (takeLock) {
- current = head.next;
- if (current != null)
- currentElement = current.item;
- }
+ fullyLock();
+ try {
+ current = head.next;
+ if (current != null)
+ currentElement = current.item;
+ } finally {
+ fullyUnlock();
}
}
@@ -654,45 +736,56 @@ public class LinkedBlockingQueue extends AbstractQueue
return current != null;
}
- public Object next() {
- synchronized (putLock) {
- synchronized (takeLock) {
- if (current == null)
- throw new NoSuchElementException();
- Object x = currentElement;
- lastRet = current;
- current = current.next;
- if (current != null)
- currentElement = current.item;
- return x;
- }
+ /**
+ * Returns the next live successor of p, or null if no such.
+ *
+ * Unlike other traversal methods, iterators need to handle both:
+ * - dequeued nodes (p.next == p)
+ * - (possibly multiple) interior removed nodes (p.item == null)
+ */
+ private Node<E> nextNode(Node<E> p) {
+ for (;;) {
+ Node<E> s = p.next;
+ if (s == p)
+ return head.next;
+ if (s == null || s.item != null)
+ return s;
+ p = s;
+ }
+ }
+
+ public E next() {
+ fullyLock();
+ try {
+ if (current == null)
+ throw new NoSuchElementException();
+ E x = currentElement;
+ lastRet = current;
+ current = nextNode(current);
+ currentElement = (current == null) ? null : current.item;
+ return x;
+ } finally {
+ fullyUnlock();
}
}
public void remove() {
if (lastRet == null)
throw new IllegalStateException();
- synchronized (putLock) {
- synchronized (takeLock) {
- Node node = lastRet;
- lastRet = null;
- Node trail = head;
- Node p = head.next;
- while (p != null && p != node) {
- trail = p;
- p = p.next;
- }
+ fullyLock();
+ try {
+ Node<E> node = lastRet;
+ lastRet = null;
+ for (Node<E> trail = head, p = trail.next;
+ p != null;
+ trail = p, p = p.next) {
if (p == node) {
- p.item = null;
- trail.next = p.next;
- if (last == p)
- last = trail;
- int c;
- synchronized (this) { c = count--; }
- if (c == capacity)
- putLock.notifyAll();
+ unlink(p, trail);
+ break;
}
}
+ } finally {
+ fullyUnlock();
}
}
}
@@ -701,31 +794,33 @@ public class LinkedBlockingQueue extends AbstractQueue
* Save the state to a stream (that is, serialize it).
*
* @serialData The capacity is emitted (int), followed by all of
- * its elements (each an <tt>Object</tt>) in the proper order,
+ * its elements (each an {@code Object}) in the proper order,
* followed by a null
* @param s the stream
*/
private void writeObject(java.io.ObjectOutputStream s)
throws java.io.IOException {
- synchronized (putLock) {
- synchronized (takeLock) {
- // Write out any hidden stuff, plus capacity
- s.defaultWriteObject();
+ fullyLock();
+ try {
+ // Write out any hidden stuff, plus capacity
+ s.defaultWriteObject();
- // Write out all elements in the proper order.
- for (Node p = head.next; p != null; p = p.next)
- s.writeObject(p.item);
+ // Write out all elements in the proper order.
+ for (Node<E> p = head.next; p != null; p = p.next)
+ s.writeObject(p.item);
- // Use trailing null as sentinel
- s.writeObject(null);
- }
+ // Use trailing null as sentinel
+ s.writeObject(null);
+ } finally {
+ fullyUnlock();
}
}
/**
* Reconstitute this queue instance from a stream (that is,
* deserialize it).
+ *
* @param s the stream
*/
private void readObject(java.io.ObjectInputStream s)
@@ -733,19 +828,16 @@ public class LinkedBlockingQueue extends AbstractQueue
// Read in capacity, and any hidden stuff
s.defaultReadObject();
- synchronized (this) { count = 0; }
- last = head = new Node(null);
+ count.set(0);
+ last = head = new Node<E>(null);
// Read in all elements and place in queue
for (;;) {
- Object item = (Object)s.readObject();
+ @SuppressWarnings("unchecked")
+ E item = (E)s.readObject();
if (item == null)
break;
add(item);
}
}
-
- private static class SerializableLock implements java.io.Serializable {
- private final static long serialVersionUID = -8856990691138858668L;
- }
}
diff --git a/src/actors/scala/actors/threadpool/ThreadPoolExecutor.java b/src/actors/scala/actors/threadpool/ThreadPoolExecutor.java
index f41b2790b6..11e35b034c 100644
--- a/src/actors/scala/actors/threadpool/ThreadPoolExecutor.java
+++ b/src/actors/scala/actors/threadpool/ThreadPoolExecutor.java
@@ -791,7 +791,7 @@ public class ThreadPoolExecutor extends AbstractExecutorService {
*/
private List drainQueue() {
BlockingQueue q = workQueue;
- List taskList = new ArrayList();
+ List<Runnable> taskList = new ArrayList<Runnable>();
q.drainTo(taskList);
if (!q.isEmpty()) {
Runnable[] arr = (Runnable[])q.toArray(new Runnable[0]);
diff --git a/src/actors/scala/actors/threadpool/helpers/FIFOWaitQueue.java b/src/actors/scala/actors/threadpool/helpers/FIFOWaitQueue.java
index 6306faa08f..432b851f3e 100644
--- a/src/actors/scala/actors/threadpool/helpers/FIFOWaitQueue.java
+++ b/src/actors/scala/actors/threadpool/helpers/FIFOWaitQueue.java
@@ -64,7 +64,7 @@ public class FIFOWaitQueue extends WaitQueue implements java.io.Serializable {
}
public Collection getWaitingThreads() {
- List list = new ArrayList();
+ List<Thread> list = new ArrayList<Thread>();
int count = 0;
WaitNode node = head_;
while (node != null) {
diff --git a/src/actors/scala/actors/threadpool/helpers/Utils.java b/src/actors/scala/actors/threadpool/helpers/Utils.java
index df1dbd4960..d12389215d 100644
--- a/src/actors/scala/actors/threadpool/helpers/Utils.java
+++ b/src/actors/scala/actors/threadpool/helpers/Utils.java
@@ -41,9 +41,9 @@ public final class Utils {
static {
NanoTimer timer = null;
try {
- String nanoTimerClassName = (String)
- AccessController.doPrivileged(new PrivilegedAction() {
- public Object run() {
+ String nanoTimerClassName =
+ AccessController.doPrivileged(new PrivilegedAction<String>() {
+ public String run() {
return System.getProperty(providerProp);
}
});
@@ -206,9 +206,9 @@ public final class Utils {
final Perf perf;
final long multiplier, divisor;
SunPerfProvider() {
- perf = (Perf)
- AccessController.doPrivileged(new PrivilegedAction() {
- public Object run() {
+ perf =
+ AccessController.doPrivileged(new PrivilegedAction<Perf>() {
+ public Perf run() {
return Perf.getPerf();
}
});
diff --git a/src/actors/scala/actors/threadpool/locks/CondVar.java b/src/actors/scala/actors/threadpool/locks/CondVar.java
index 132e72fe2a..44df1c0b97 100644
--- a/src/actors/scala/actors/threadpool/locks/CondVar.java
+++ b/src/actors/scala/actors/threadpool/locks/CondVar.java
@@ -17,6 +17,7 @@ import scala.actors.threadpool.*;
import scala.actors.threadpool.helpers.*;
class CondVar implements Condition, java.io.Serializable {
+ private static final long serialVersionUID = -5009898475638427940L;
/** The lock **/
protected final ExclusiveLock lock;
diff --git a/src/actors/scala/actors/threadpool/locks/FIFOCondVar.java b/src/actors/scala/actors/threadpool/locks/FIFOCondVar.java
index 7495a8a884..144ac54d37 100644
--- a/src/actors/scala/actors/threadpool/locks/FIFOCondVar.java
+++ b/src/actors/scala/actors/threadpool/locks/FIFOCondVar.java
@@ -17,6 +17,7 @@ import scala.actors.threadpool.*;
import scala.actors.threadpool.helpers.*;
class FIFOCondVar extends CondVar implements Condition, java.io.Serializable {
+ private static final long serialVersionUID = -497497271881010475L;
private static final WaitQueue.QueuedSync sync = new WaitQueue.QueuedSync() {
public boolean recheck(WaitQueue.WaitNode node) { return false; }
diff --git a/src/actors/scala/actors/threadpool/locks/ReentrantReadWriteLock.java b/src/actors/scala/actors/threadpool/locks/ReentrantReadWriteLock.java
index 6411bbea01..437af77c7a 100644
--- a/src/actors/scala/actors/threadpool/locks/ReentrantReadWriteLock.java
+++ b/src/actors/scala/actors/threadpool/locks/ReentrantReadWriteLock.java
@@ -190,7 +190,7 @@ public class ReentrantReadWriteLock implements ReadWriteLock, java.io.Serializab
transient int writeHolds_ = 0;
/** Number of acquires on read lock by any reader thread **/
- transient HashMap readers_ = new HashMap();
+ transient HashMap<Thread, Integer> readers_ = new HashMap<Thread, Integer>();
/** cache/reuse the special Integer value one to speed up readlocks **/
static final Integer IONE = new Integer(1);
@@ -344,7 +344,7 @@ public class ReentrantReadWriteLock implements ReadWriteLock, java.io.Serializab
synchronized int getReadHoldCount() {
if (activeReaders_ == 0) return 0;
Thread t = Thread.currentThread();
- Integer i = (Integer)readers_.get(t);
+ Integer i = readers_.get(t);
return (i == null) ? 0 : i.intValue();
}
@@ -363,7 +363,7 @@ public class ReentrantReadWriteLock implements ReadWriteLock, java.io.Serializab
// and ensure visibility by synchronizing (all other accesses to
// readers_ are also synchronized on "this")
synchronized (this) {
- readers_ = new HashMap();
+ readers_ = new HashMap<Thread, Integer>();
}
}
}
@@ -372,6 +372,8 @@ public class ReentrantReadWriteLock implements ReadWriteLock, java.io.Serializab
* Nonfair version of Sync
*/
private static class NonfairSync extends Sync {
+ private static final long serialVersionUID = -2392241841540339773L;
+
NonfairSync() {}
}
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index 7efd8ad2a0..768f207968 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -216,8 +216,10 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
}
}
- if (settings.verbose.value || settings.Ylogcp.value)
- inform("[Classpath = " + classPath.asClasspathString + "]")
+ if (settings.verbose.value || settings.Ylogcp.value) {
+ inform("[search path for source files: " + classPath.sourcepaths.mkString(",") + "]")
+ inform("[search path for class files: " + classPath.asClasspathString + "]")
+ }
/** True if -Xscript has been set, indicating a script run.
*/
diff --git a/src/compiler/scala/tools/nsc/Interpreter.scala b/src/compiler/scala/tools/nsc/Interpreter.scala
index 128c378e23..5d11973a74 100644
--- a/src/compiler/scala/tools/nsc/Interpreter.scala
+++ b/src/compiler/scala/tools/nsc/Interpreter.scala
@@ -146,7 +146,7 @@ class Interpreter(val settings: Settings, out: PrintWriter) {
else null
}
- import compiler.{ Traverser, CompilationUnit, Symbol, Name, Type }
+ import compiler.{ Traverser, CompilationUnit, Symbol, Name, Type, TypeRef, PolyType }
import compiler.{
Tree, TermTree, ValOrDefDef, ValDef, DefDef, Assign, ClassDef,
ModuleDef, Ident, Select, TypeDef, Import, MemberDef, DocDef,
@@ -946,14 +946,19 @@ class Interpreter(val settings: Settings, out: PrintWriter) {
lazy val typeOf: Map[Name, String] = {
def getTypes(names: List[Name], nameMap: Name => Name): Map[Name, String] = {
names.foldLeft(Map.empty[Name, String]) { (map, name) =>
- val rawType = atNextPhase(resObjSym.info.member(name).tpe)
+ val tp1 = atNextPhase(resObjSym.info.nonPrivateDecl(name).tpe)
// the types are all =>T; remove the =>
- val cleanedType = rawType match {
- case compiler.PolyType(Nil, rt) => rt
- case rawType => rawType
+ val tp2 = tp1 match {
+ case PolyType(Nil, tp) => tp
+ case tp => tp
}
+ // normalize non-public types so we don't see protected aliases like Self
+ val tp3 = compiler.atPhase(objRun.typerPhase)(tp2 match {
+ case TypeRef(_, sym, _) if !sym.isPublic => tp2.normalize.toString
+ case tp => tp.toString
+ })
- map + (name -> atNextPhase(cleanedType.toString))
+ map + (name -> tp3)
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/DocComments.scala b/src/compiler/scala/tools/nsc/ast/DocComments.scala
index 735416874d..d851d6ffdf 100755
--- a/src/compiler/scala/tools/nsc/ast/DocComments.scala
+++ b/src/compiler/scala/tools/nsc/ast/DocComments.scala
@@ -415,7 +415,7 @@ trait DocComments { self: SymbolTable =>
}
for (defn <- defined) yield {
- defn.cloneSymbol(defn.owner).setFlag(Flags.SYNTHETIC).setInfo(
+ defn.cloneSymbol.setFlag(Flags.SYNTHETIC).setInfo(
substAliases(defn.info).asSeenFrom(site.thisType, defn.owner))
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
index 34d3423401..a24c8c01d3 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
@@ -76,20 +76,24 @@ trait TreeDSL {
else gen.mkAnd(target, other)
/** Note - calling ANY_== in the matcher caused primitives to get boxed
- * for the comparison, whereas looking up nme.EQ does not.
+ * for the comparison, whereas looking up nme.EQ does not. See #3570 for
+ * an example of how target.tpe can be non-null, yet it claims not to have
+ * a mmeber called nme.EQ. Not sure if that should happen, but we can be
+ * robust by dragging in Any regardless.
*/
def MEMBER_== (other: Tree) = {
- if (target.tpe == null) ANY_==(other)
- else fn(target, target.tpe member nme.EQ, other)
+ val opSym = if (target.tpe == null) NoSymbol else target.tpe member nme.EQ
+ if (opSym == NoSymbol) ANY_==(other)
+ else fn(target, opSym, other)
}
- def ANY_NE (other: Tree) = fn(target, nme.ne, toAnyRef(other))
def ANY_EQ (other: Tree) = fn(target, nme.eq, toAnyRef(other))
+ def ANY_NE (other: Tree) = fn(target, nme.ne, toAnyRef(other))
def ANY_== (other: Tree) = fn(target, Any_==, other)
- def ANY_>= (other: Tree) = fn(target, nme.GE, other)
- def ANY_<= (other: Tree) = fn(target, nme.LE, other)
- def OBJ_!= (other: Tree) = fn(target, Object_ne, other)
- def OBJ_EQ (other: Tree) = fn(target, nme.eq, other)
- def OBJ_NE (other: Tree) = fn(target, nme.ne, other)
+ def ANY_!= (other: Tree) = fn(target, Any_!=, other)
+ def OBJ_== (other: Tree) = fn(target, Object_==, other)
+ def OBJ_!= (other: Tree) = fn(target, Object_!=, other)
+ def OBJ_EQ (other: Tree) = fn(target, Object_eq, other)
+ def OBJ_NE (other: Tree) = fn(target, Object_ne, other)
def INT_| (other: Tree) = fn(target, getMember(IntClass, nme.OR), other)
def INT_& (other: Tree) = fn(target, getMember(IntClass, nme.AND), other)
diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
index 5044105684..41e93ae386 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
@@ -137,7 +137,7 @@ abstract class TreeGen {
assert(!tree.tpe.isInstanceOf[MethodType], tree)
assert(!pt.typeSymbol.isPackageClass)
assert(!pt.typeSymbol.isPackageObjectClass)
- assert(pt eq pt.normalize) //@MAT only called during erasure, which already takes care of that
+ assert(pt eq pt.normalize, tree +" : "+ debugString(pt) +" ~>"+ debugString(pt.normalize)) //@MAT only called during erasure, which already takes care of that
atPos(tree.pos)(mkAsInstanceOf(tree, pt, false))
}
diff --git a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
index f21b1b20ff..46ddf7b24b 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
@@ -151,6 +151,12 @@ abstract class TreeInfo {
case _ :: stats1 => firstConstructor(stats1)
}
+ /** The arguments to the first constructor in `stats'. */
+ def firstConstructorArgs(stats: List[Tree]): List[Tree] = firstConstructor(stats) match {
+ case DefDef(_, _, _, args :: _, _, _) => args
+ case _ => Nil
+ }
+
/** The value definitions marked PRESUPER in this statement sequence */
def preSuperFields(stats: List[Tree]): List[ValDef] =
for (vdef @ ValDef(mods, _, _, _) <- stats if mods hasFlag PRESUPER) yield vdef
diff --git a/src/compiler/scala/tools/nsc/ast/TreePrinters.scala b/src/compiler/scala/tools/nsc/ast/TreePrinters.scala
index ddc1c3169a..10b50db6d5 100644
--- a/src/compiler/scala/tools/nsc/ast/TreePrinters.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreePrinters.scala
@@ -117,7 +117,7 @@ trait TreePrinters { trees: SymbolTable =>
def pw = tree.symbol.privateWithin
val args =
if (tree.symbol == NoSymbol) (mods.flags, mods.privateWithin)
- else if (pw == NoSymbol || pw == tree.symbol.owner) (tree.symbol.flags, "")
+ else if (pw == NoSymbol) (tree.symbol.flags, "")
else (tree.symbol.flags, pw.name)
printFlags(args._1, args._2.toString)
@@ -379,6 +379,9 @@ trait TreePrinters { trees: SymbolTable =>
case SelectFromArray(qualifier, name, _) =>
print(qualifier); print(".<arr>"); print(symName(tree, name))
+ case TypeTreeWithDeferredRefCheck() =>
+ print("<tree with deferred refcheck>")
+
case tree =>
print("<unknown tree of class "+tree.getClass+">")
}
@@ -575,6 +578,7 @@ trait TreePrinters { trees: SymbolTable =>
// eliminated by refchecks
case ModuleDef(mods, name, impl) =>
+ case TypeTreeWithDeferredRefCheck() =>
// eliminated by erasure
case TypeDef(mods, name, tparams, rhs) =>
diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala
index 35db3c0984..dbe4a587ba 100644
--- a/src/compiler/scala/tools/nsc/ast/Trees.scala
+++ b/src/compiler/scala/tools/nsc/ast/Trees.scala
@@ -338,6 +338,9 @@ trait Trees extends reflect.generic.Trees { self: SymbolTable =>
case class Parens(args: List[Tree]) extends Tree // only used during parsing
+ /** emitted by typer, eliminated by refchecks **/
+ case class TypeTreeWithDeferredRefCheck()(val check: () => TypeTree) extends AbsTypeTree
+
// ----- subconstructors --------------------------------------------
class ApplyToImplicitArgs(fun: Tree, args: List[Tree]) extends Apply(fun, args)
@@ -383,6 +386,7 @@ trait Trees extends reflect.generic.Trees { self: SymbolTable =>
def Ident(tree: Tree, name: Name): Ident
def Literal(tree: Tree, value: Constant): Literal
def TypeTree(tree: Tree): TypeTree
+ def TypeTreeWithDeferredRefCheck(tree: Tree): TypeTreeWithDeferredRefCheck
def Annotated(tree: Tree, annot: Tree, arg: Tree): Annotated
def SingletonTypeTree(tree: Tree, ref: Tree): SingletonTypeTree
def SelectFromTypeTree(tree: Tree, qualifier: Tree, selector: Name): SelectFromTypeTree
@@ -470,6 +474,9 @@ trait Trees extends reflect.generic.Trees { self: SymbolTable =>
new Literal(value).copyAttrs(tree)
def TypeTree(tree: Tree) =
new TypeTree().copyAttrs(tree)
+ def TypeTreeWithDeferredRefCheck(tree: Tree) = tree match {
+ case dc@TypeTreeWithDeferredRefCheck() => new TypeTreeWithDeferredRefCheck()(dc.check).copyAttrs(tree)
+ }
def Annotated(tree: Tree, annot: Tree, arg: Tree) =
new Annotated(annot, arg).copyAttrs(tree)
def SingletonTypeTree(tree: Tree, ref: Tree) =
@@ -670,6 +677,10 @@ trait Trees extends reflect.generic.Trees { self: SymbolTable =>
case t @ TypeTree() => t
case _ => treeCopy.TypeTree(tree)
}
+ def TypeTreeWithDeferredRefCheck(tree: Tree) = tree match {
+ case t @ TypeTreeWithDeferredRefCheck() => t
+ case _ => treeCopy.TypeTreeWithDeferredRefCheck(tree)
+ }
def Annotated(tree: Tree, annot: Tree, arg: Tree) = tree match {
case t @ Annotated(annot0, arg0)
if (annot0==annot) => t
@@ -816,6 +827,8 @@ trait Trees extends reflect.generic.Trees { self: SymbolTable =>
treeCopy.Literal(tree, value)
case TypeTree() =>
treeCopy.TypeTree(tree)
+ case TypeTreeWithDeferredRefCheck() =>
+ treeCopy.TypeTreeWithDeferredRefCheck(tree)
case Annotated(annot, arg) =>
treeCopy.Annotated(tree, transform(annot), transform(arg))
case SingletonTypeTree(ref) =>
@@ -878,6 +891,8 @@ trait Trees extends reflect.generic.Trees { self: SymbolTable =>
traverse(definition)
case Parens(ts) =>
traverseTrees(ts)
+ case TypeTreeWithDeferredRefCheck() => // TODO: should we traverse the wrapped tree?
+ // (and rewrap the result? how to update the deferred check? would need to store wrapped tree instead of returning it from check)
case _ => super.traverse(tree)
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index e28f07e840..7a28fd0538 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -1312,6 +1312,9 @@ self =>
}
def simpleExprRest(t: Tree, canApply: Boolean): Tree = {
+ // Various errors in XML literals can cause xmlLiteral to propagate
+ // EmptyTree's. Watch out for them here (see also postfixExpr).
+ if (EmptyTree == t) return EmptyTree // #3604 (mics)
if (canApply) newLineOptWhenFollowedBy(LBRACE)
in.token match {
case DOT =>
@@ -1820,7 +1823,7 @@ self =>
if (in.token != RPAREN) {
if (in.token == IMPLICIT) {
if (!contextBounds.isEmpty)
- syntaxError("cannot have both implicit parameters and context bounds `: ...' on type parameters", false)
+ syntaxError("cannot have both implicit parameters and context bounds `: ...' or view bounds `<% ...' on type parameters", false)
in.nextToken()
implicitmod = Flags.IMPLICIT
}
@@ -2328,7 +2331,7 @@ self =>
classContextBounds = contextBoundBuf.toList
val tstart = (in.offset::classContextBounds.map(_.pos.startOrPoint)).min
if (!classContextBounds.isEmpty && mods.hasFlag(Flags.TRAIT)) {
- syntaxError("traits cannot have type parameters with context bounds `: ...'", false)
+ syntaxError("traits cannot have type parameters with context bounds `: ...' nor view bounds `<% ...'", false)
classContextBounds = List()
}
val constrAnnots = annotations(false, true)
@@ -2731,10 +2734,10 @@ self =>
topstats() match {
case List(stat @ PackageDef(_, _)) => stat
case stats =>
- val start = stats match {
- case Nil => 0
- case _ => wrappingPos(stats).startOrPoint
- }
+ val start =
+ if (stats forall (_ == EmptyTree)) 0
+ else wrappingPos(stats).startOrPoint
+
makePackaging(start, atPos(start, start, start) { Ident(nme.EMPTY_PACKAGE_NAME) }, stats)
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
index 668565ddf6..b86e22787b 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
@@ -7,9 +7,9 @@
package scala.tools.nsc
package backend.opt
-
import scala.util.control.Breaks._
-import scala.collection.mutable.{Map, HashMap, Set, HashSet}
+import scala.collection.{ mutable, immutable }
+import mutable.{ HashMap, HashSet }
import scala.tools.nsc.symtab._
/**
@@ -19,6 +19,11 @@ abstract class Inliners extends SubComponent {
import global._
import icodes._
import icodes.opcodes._
+ import definitions.{
+ NullClass, NothingClass, ObjectClass,
+ PredefModule, RuntimePackage, ScalaInlineClass, ScalaNoInlineClass,
+ isFunctionType
+ }
val phaseName = "inliner"
@@ -28,15 +33,24 @@ abstract class Inliners extends SubComponent {
val res = body
val t2 = System.currentTimeMillis()
val ms = (t2 - t1).toInt
- if (ms >= 2000)
+ if (ms >= MAX_INLINE_MILLIS)
println("%s: %d milliseconds".format(s, ms))
res
}
+ /* A warning threshold */
+ private final val MAX_INLINE_MILLIS = 2000
+
/** The maximum size in basic blocks of methods considered for inlining. */
final val MAX_INLINE_SIZE = 16
+ /** Maximum loop iterations. */
+ final val MAX_INLINE_RETRY = 15
+
+ /** Small method size (in blocks) */
+ val SMALL_METHOD_SIZE = 1
+
/** Create a new phase */
override def newPhase(p: Phase) = new InliningPhase(p)
@@ -47,255 +61,59 @@ abstract class Inliners extends SubComponent {
val inliner = new Inliner
override def apply(c: IClass) {
- inliner.analyzeClass(c)
+ inliner analyzeClass c
}
}
+ def isBottomType(sym: Symbol) = sym == NullClass || sym == NothingClass
+ def posToStr(pos: util.Position) = if (pos.isDefined) pos.point.toString else "<nopos>"
+
/** Is the given class a closure? */
def isClosureClass(cls: Symbol): Boolean =
cls.isFinal && cls.isSynthetic && !cls.isModuleClass && cls.isAnonymousFunction
/**
* Simple inliner.
- *
*/
class Inliner {
+ object NonPublicRefs extends Enumeration {
+ val Public, Protected, Private = Value
- val fresh = new HashMap[String, Int]
+ /** Cache whether a method calls private members. */
+ val usesNonPublics: mutable.Map[IMethod, Value] = new HashMap
+ }
+ import NonPublicRefs._
/* fresh name counter */
+ val fresh = new HashMap[String, Int]
var count = 0
-
- def freshName(s: String) = fresh.get(s) match {
- case Some(count) =>
- fresh(s) = count + 1
- s + count
- case None =>
- fresh(s) = 1
- s + "0"
+ def freshName(s: String) = {
+ val count = fresh.getOrElseUpdate(s, 0)
+ fresh(s) += 1
+ s + count
}
- private def hasInline(sym: Symbol) = sym hasAnnotation definitions.ScalaInlineClass
- private def hasNoInline(sym: Symbol) = sym hasAnnotation definitions.ScalaNoInlineClass
-
- /** Inline the 'callee' method inside the 'caller' in the given
- * basic block, at the given instruction (which has to be a CALL_METHOD).
- */
- def inline(caller: IMethod,
- block: BasicBlock,
- instr: Instruction,
- callee: IMethod) {
- def posToStr(pos: util.Position) = if (pos.isDefined) pos.point.toString else "<nopos>"
- log("Inlining " + callee + " in " + caller + " at pos: " + posToStr(instr.pos))
-
- val targetPos = instr.pos
- val a = new analysis.MethodTFA(callee)
-
- /* The exception handlers that are active at the current block. */
- val activeHandlers = caller.exh.filter(_.covered.contains(block))
-
- /* Map 'original' blocks to the ones inlined in the caller. */
- val inlinedBlock: Map[BasicBlock, BasicBlock] = new HashMap
-
- val varsInScope: Set[Local] = HashSet() ++= block.varsInScope
-
- val instrBefore = block.toList.takeWhile {
- case i @ SCOPE_ENTER(l) => varsInScope += l
- i ne instr
- case i =>
- i ne instr
- }
- val instrAfter = block.toList.drop(instrBefore.length + 1);
-
- assert(!instrAfter.isEmpty, "CALL_METHOD cannot be the last instruction in block!");
-
- // store the '$this' into the special local
- val inlinedThis = new Local(caller.symbol.newVariable(instr.pos, freshName("$inlThis")), REFERENCE(definitions.ObjectClass), false);
-
- /** buffer for the returned value */
- val retVal =
- if (callee.returnType != UNIT)
- new Local(caller.symbol.newVariable(instr.pos, freshName("$retVal")), callee.returnType, false);
- else
- null;
-
- /** Add a new block in the current context. */
- def newBlock = {
- val b = caller.code.newBlock
- activeHandlers.foreach (_.addCoveredBlock(b))
- if (retVal ne null) b.varsInScope += retVal
- b.varsInScope += inlinedThis
- b.varsInScope ++= varsInScope
- b
- }
-
- def translateExh(e: ExceptionHandler) = {
- var handler: ExceptionHandler = e.dup
- handler.covered = handler.covered.map(inlinedBlock)
- handler.setStartBlock(inlinedBlock(e.startBlock))
- handler
- }
-
- var inlinedLocals: Map[Local, Local] = new HashMap
-
- /** alfa-rename `l' in caller's context. */
- def dupLocal(l: Local): Local = {
- val sym = caller.symbol.newVariable(l.sym.pos, freshName(l.sym.name.toString()));
-// sym.setInfo(l.sym.tpe);
- val dupped = new Local(sym, l.kind, false)
- inlinedLocals(l) = dupped
- dupped
- }
-
- def addLocals(m: IMethod, ls: List[Local]) =
- m.locals = m.locals ::: ls;
- def addLocal(m: IMethod, l: Local): Unit =
- addLocals(m, List(l));
-
- val afterBlock = newBlock;
-
- /** Map from nw.init instructions to their matching NEW call */
- val pending: collection.mutable.Map[Instruction, NEW] = new collection.mutable.HashMap
-
- /** Map an instruction from the callee to one suitable for the caller. */
- def map(i: Instruction): Instruction = {
- val newInstr = i match {
- case THIS(clasz) =>
- LOAD_LOCAL(inlinedThis);
-
- case STORE_THIS(_) =>
- STORE_LOCAL(inlinedThis)
-
- case JUMP(whereto) =>
- JUMP(inlinedBlock(whereto));
-
- case CJUMP(success, failure, cond, kind) =>
- CJUMP(inlinedBlock(success), inlinedBlock(failure), cond, kind);
-
- case CZJUMP(success, failure, cond, kind) =>
- CZJUMP(inlinedBlock(success), inlinedBlock(failure), cond, kind);
-
- case SWITCH(tags, labels) =>
- SWITCH(tags, labels map inlinedBlock);
-
- case RETURN(kind) =>
- JUMP(afterBlock);
-
- case LOAD_LOCAL(l) if inlinedLocals.isDefinedAt(l) =>
- LOAD_LOCAL(inlinedLocals(l))
-
- case STORE_LOCAL(l) if inlinedLocals.isDefinedAt(l) =>
- STORE_LOCAL(inlinedLocals(l))
-
- case LOAD_LOCAL(l) =>
- assert(caller.locals contains l,
- "Could not find local '" + l + "' in locals, nor in inlinedLocals: " + inlinedLocals)
- i
- case STORE_LOCAL(l) =>
- assert(caller.locals contains l,
- "Could not find local '" + l + "' in locals, nor in inlinedLocals: " + inlinedLocals)
- i
-
- case SCOPE_ENTER(l) if inlinedLocals.isDefinedAt(l) =>
- SCOPE_ENTER(inlinedLocals(l))
-
- case SCOPE_EXIT(l) if inlinedLocals.isDefinedAt(l) =>
- SCOPE_EXIT(inlinedLocals(l))
-
- case nw @ NEW(sym) =>
- val r = NEW(sym)
- pending(nw.init) = r
- r
-
- case CALL_METHOD(meth, Static(true)) if (meth.isClassConstructor) =>
- CALL_METHOD(meth, Static(true))
-
- case _ => i.clone
- }
- // check any pending NEW's
- if (pending isDefinedAt i) {
- pending(i).init = newInstr.asInstanceOf[CALL_METHOD]
- pending -= i
- }
- newInstr
- }
-
- addLocals(caller, callee.locals map dupLocal);
- addLocal(caller, inlinedThis);
- if (retVal ne null)
- addLocal(caller, retVal);
- callee.code.blocks.foreach { b =>
- inlinedBlock += (b -> newBlock)
- inlinedBlock(b).varsInScope ++= (b.varsInScope map inlinedLocals)
- }
-
- // analyse callee
- a.run
-
- // re-emit the instructions before the call
- block.open
- block.clear
- instrBefore.foreach(i => block.emit(i, i.pos))
-
- // store the arguments into special locals
- callee.params.reverse.foreach { param =>
- block.emit(STORE_LOCAL(inlinedLocals(param)), targetPos);
- }
- block.emit(STORE_LOCAL(inlinedThis), targetPos);
-
- // jump to the start block of the callee
- block.emit(JUMP(inlinedBlock(callee.code.startBlock)), targetPos);
- block.close
-
- // duplicate the other blocks in the callee
- linearizer.linearize(callee).foreach { bb =>
- var info = a.in(bb);
- for (i <- bb) {
- i match {
- case RETURN(kind) => kind match {
- case UNIT =>
- if (!info.stack.types.isEmpty) {
- info.stack.types foreach { t => inlinedBlock(bb).emit(DROP(t), targetPos); }
- }
- case _ =>
- if (info.stack.length > 1) {
- inlinedBlock(bb).emit(STORE_LOCAL(retVal), targetPos);
- info.stack.types.drop(1) foreach { t => inlinedBlock(bb).emit(DROP(t), targetPos); }
- inlinedBlock(bb).emit(LOAD_LOCAL(retVal), targetPos);
- }
- }
- case _ => ();
- }
- inlinedBlock(bb).emit(map(i), targetPos);
- info = a.interpret(info, i);
- }
- inlinedBlock(bb).close
- }
-
- instrAfter.foreach(i => afterBlock.emit(i, i.pos));
- afterBlock.close;
- count += 1
-
- // add exception handlers of the callee
- caller.exh = (callee.exh map translateExh) ::: caller.exh;
- assert(pending.isEmpty, "Pending NEW elements: " + pending)
- }
+ private def hasInline(sym: Symbol) = sym hasAnnotation ScalaInlineClass
+ private def hasNoInline(sym: Symbol) = sym hasAnnotation ScalaNoInlineClass
/** The current iclass */
private var currentIClazz: IClass = _
+ private def warn(pos: Position, msg: String) = currentIClazz.cunit.warning(pos, msg)
- def analyzeClass(cls: IClass): Unit = if (settings.inline.value) {
- if (settings.debug.value)
- log("Analyzing " + cls);
- this.currentIClazz = cls
- cls.methods filterNot (_.symbol.isConstructor) foreach analyzeMethod
- }
+ def analyzeClass(cls: IClass): Unit =
+ if (settings.inline.value) {
+ if (settings.debug.value)
+ log("Analyzing " + cls)
- val tfa = new analysis.MethodTFA();
- tfa.stat = settings.Ystatistics.value
+ this.currentIClazz = cls
+ cls.methods filterNot (_.symbol.isConstructor) foreach analyzeMethod
+ }
+
+ val tfa = new analysis.MethodTFA()
+ tfa.stat = settings.Ystatistics.value
// how many times have we already inlined this method here?
- private val inlinedMethods: Map[Symbol, Int] = new HashMap[Symbol, Int] {
+ private val inlinedMethodCount: mutable.Map[Symbol, Int] = new HashMap[Symbol, Int] {
override def default(k: Symbol) = 0
}
@@ -303,303 +121,525 @@ abstract class Inliners extends SubComponent {
var retry = false
var count = 0
fresh.clear
- inlinedMethods.clear
+ inlinedMethodCount.clear
val caller = new IMethodInfo(m)
+ var info: tfa.lattice.Elem = null
+
+ def analyzeInc(msym: Symbol, i: Instruction, bb: BasicBlock) = {
+ def paramTypes = msym.info.paramTypes
+ val receiver = (info.stack.types drop paramTypes.length).head match {
+ case REFERENCE(s) => s
+ case _ => NoSymbol
+ }
+ val concreteMethod = lookupImplFor(msym, receiver)
+
+ def warnNoInline(reason: String) = {
+ if (hasInline(msym) && !caller.isBridge)
+ warn(i.pos, "Could not inline required method %s because %s.".format(msym.originalName.decode, reason))
+ }
+
+ if (shouldLoadImplFor(concreteMethod, receiver))
+ icodes.icode(receiver, true)
+
+ def isAvailable = icodes available receiver
+ def isCandidate = isClosureClass(receiver) || concreteMethod.isEffectivelyFinal || receiver.isFinal
+ def isApply = concreteMethod.name == nme.apply
+ def isCountable = !(isClosureClass(receiver) && isApply) // only count non-closures
+
+ if (settings.debug.value)
+ log("Treating " + i
+ + "\n\treceiver: " + receiver
+ + "\n\ticodes.available: " + isAvailable
+ + "\n\tconcreteMethod.isEffectivelyFinal: " + concreteMethod.isEffectivelyFinal)
+
+ if (isAvailable && isCandidate) {
+ lookupIMethod(concreteMethod, receiver) match {
+ case Some(callee) =>
+ val inc = new IMethodInfo(callee)
+ val pair = new CallerCalleeInfo(caller, inc)
+
+ if (pair isStampedForInlining info.stack) {
+ retry = true
+ if (isCountable)
+ count += 1
+
+ pair.doInline(bb, i)
+ inlinedMethodCount(inc.sym) += 1
+
+ /* Remove this method from the cache, as the calls-private relation
+ * might have changed after the inlining.
+ */
+ usesNonPublics -= m
+ }
+ else {
+ if (settings.debug.value)
+ pair logFailure info.stack
+
+ warnNoInline(pair failureReason info.stack)
+ }
+ case None =>
+ warnNoInline("bytecode was not available")
+ if (settings.debug.value)
+ log("could not find icode\n\treceiver: " + receiver + "\n\tmethod: " + concreteMethod)
+ }
+ }
+ else warnNoInline(
+ if (!isAvailable) "bytecode was not available"
+ else "it is not final"
+ )
+ }
do {
- retry = false;
- if (m.code ne null) {
- log("Analyzing " + m + " count " + count + " with " + m.code.blocks.length + " blocks");
- tfa.init(m)
+ retry = false
+ if (caller.inline) {
+ log("Not inlining into " + caller.sym.originalName.decode + " because it is marked @inline.")
+ }
+ else if (caller.hasCode) {
+ log("Analyzing " + m + " count " + count + " with " + caller.length + " blocks")
+ tfa init m
tfa.run
- for (bb <- linearizer.linearize(m)) {
- var info = tfa.in(bb);
+ caller.linearized foreach { bb =>
+ info = tfa in bb
+
for (i <- bb) {
if (!retry) {
i match {
- case CALL_METHOD(msym, Dynamic) =>
- val inc = new SymMethodInfo(msym)
-
- def warnNoInline(reason: String) = {
- if (caller.inline && !inc.isBridge && !inc.hasBlocker)
- currentIClazz.cunit.warning(i.pos,
- "Could not inline required method %s because %s.".format(msym.originalName.decode, reason))
- }
-
- val receiver = info.stack.types.drop(msym.info.paramTypes.length).head match {
- case REFERENCE(s) => s;
- case _ => NoSymbol;
- }
- var concreteMethod = msym;
- if (receiver != msym.owner && receiver != NoSymbol) {
- if (settings.debug.value)
- log("" + i + " has actual receiver: " + receiver);
- if (!concreteMethod.isEffectivelyFinal && receiver.isFinal) {
- concreteMethod = lookupImpl(concreteMethod, receiver)
- if (settings.debug.value)
- log("\tlooked up method: " + concreteMethod.fullName)
- }
- }
-
- if (shouldLoad(receiver, concreteMethod)) {
- icodes.icode(receiver, true)
- }
- if (settings.debug.value)
- log("Treating " + i
- + "\n\treceiver: " + receiver
- + "\n\ticodes.available: " + icodes.available(receiver)
- + "\n\tconcreteMethod.isEffectivelyFinal: " + concreteMethod.isFinal);
-
- if ( icodes.available(receiver)
- && (isClosureClass(receiver)
- || concreteMethod.isEffectivelyFinal
- || receiver.isFinal)) {
- icodes.icode(receiver).get.lookupMethod(concreteMethod) match {
- case Some(inc) =>
- if (inc.symbol != m.symbol
- && (inc.code ne null)
- && shouldInline(m, inc)
- && isSafeToInline(m, inc, info.stack)) {
- retry = true;
- if (!(isClosureClass(receiver) && (concreteMethod.name == nme.apply))) // only count non-closures
- count = count + 1;
- inline(m, bb, i, inc);
- inlinedMethods(inc.symbol) += 1
-
- /* Remove this method from the cache, as the calls-private relation
- might have changed after the inlining. */
- usesNonPublics -= m;
- }
- else {
- if (settings.debug.value)
- log("inline failed for " + inc + " because:\n\tinc.symbol != m.symbol: " + (inc.symbol != m.symbol)
- + "\n\t(inlinedMethods(inc.symbol) < 2): " + (inlinedMethods(inc.symbol) < 2)
- + "\n\tinc.code ne null: " + (inc.code ne null) + (if (inc.code ne null)
- "\n\tisSafeToInline(m, inc, info.stack): " + isSafeToInline(m, inc, info.stack)
- + "\n\tshouldInline heuristics: " + shouldInline(m, inc) else ""));
- warnNoInline(
- if (inc.code eq null) "bytecode was unavailable"
- else if (!isSafeToInline(m, inc, info.stack)) "it is unsafe (target may reference private fields)"
- else "of a bug (run with -Ylog:inline -Ydebug for more information)")
- }
- case None =>
- warnNoInline("bytecode was not available")
- if (settings.debug.value)
- log("could not find icode\n\treceiver: " + receiver + "\n\tmethod: " + concreteMethod)
- }
- } else
- warnNoInline(if (icodes.available(receiver)) "it is not final" else "bytecode was not available")
-
- case _ => ();
+ case CALL_METHOD(msym, Dynamic) => analyzeInc(msym, i, bb)
+ case _ => ()
}
info = tfa.interpret(info, i)
- }}}
- if (tfa.stat) log(m.symbol.fullName + " iterations: " + tfa.iterations + " (size: " + m.code.blocks.length + ")")
- }} while (retry && count < 15)
+ }
+ }
+ }
+
+ if (tfa.stat)
+ log(m.symbol.fullName + " iterations: " + tfa.iterations + " (size: " + caller.length + ")")
+ }
+ }
+ while (retry && count < MAX_INLINE_RETRY)
+
m.normalize
}
- /** small method size (in blocks) */
- val SMALL_METHOD_SIZE = 1
+ private def isMonadicMethod(sym: Symbol) = sym.name match {
+ case nme.foreach | nme.filter | nme.map | nme.flatMap => true
+ case _ => false
+ }
+ private def isHigherOrderMethod(sym: Symbol) =
+ sym.isMethod && atPhase(currentRun.erasurePhase.prev)(sym.info.paramTypes exists isFunctionType)
- class SymMethodInfo(val sym: Symbol) {
- val name = sym.name
- val owner = sym.owner
+ /** Should method 'sym' being called in 'receiver' be loaded from disk? */
+ def shouldLoadImplFor(sym: Symbol, receiver: Symbol): Boolean = {
+ if (settings.debug.value)
+ log("shouldLoadImplFor: " + receiver + "." + sym)
- def inline = hasInline(sym)
- def noinline = hasNoInline(sym)
- def numInlined = inlinedMethods(sym)
+ def alwaysLoad = (receiver.enclosingPackage == RuntimePackage) || (receiver == PredefModule.moduleClass)
+ def loadCondition = sym.isEffectivelyFinal && isMonadicMethod(sym) && isHigherOrderMethod(sym)
- def isBridge = sym.isBridge
- def isInClosure = isClosureClass(owner)
- def isHigherOrder = sym.isMethod && atPhase(currentRun.erasurePhase.prev)(sym.info.paramTypes exists definitions.isFunctionType)
- def isMonadic = name match {
- case nme.foreach | nme.filter | nme.map | nme.flatMap => true
- case _ => false
- }
- def isEffectivelyFinal = sym.isEffectivelyFinal
- def coMembers = sym.owner.tpe.members
- def coPrivates = coMembers filter (_.isPrivate)
- def coGetters = coMembers filter (_.isGetter)
-
- /** Does this method have a quality which blocks us from inlining it
- * until later? At present that means private members or getters exist
- * in the class alongside it.
- */
- def hasBlocker = coPrivates.nonEmpty || coGetters.nonEmpty
+ hasInline(sym) || alwaysLoad || loadCondition
}
- class IMethodInfo(m: IMethod) extends SymMethodInfo(m.symbol) {
- def length = m.code.blocks.length
- def isRecursive = m.recursive
+ /** Look up implementation of method 'sym in 'clazz'.
+ */
+ def lookupImplFor(sym: Symbol, clazz: Symbol): Symbol = {
+ // TODO: verify that clazz.superClass is equivalent here to clazz.tpe.parents(0).typeSymbol (.tpe vs .info)
+ def needsLookup = (clazz != NoSymbol) && (clazz != sym.owner) && !sym.isEffectivelyFinal && clazz.isFinal
+
+ def lookup(clazz: Symbol): Symbol = {
+ // println("\t\tlooking up " + meth + " in " + clazz.fullName + " meth.owner = " + meth.owner)
+ if (sym.owner == clazz || isBottomType(clazz)) sym
+ else sym.overridingSymbol(clazz) match {
+ case NoSymbol => if (sym.owner.isTrait) sym else lookup(clazz.superClass)
+ case imp => imp
+ }
+ }
+ if (needsLookup) {
+ val concreteMethod = lookup(clazz)
+ if (settings.debug.value)
+ log("\tlooked up method: " + concreteMethod.fullName)
- def isSmall = length <= SMALL_METHOD_SIZE
- def isLarge = length > MAX_INLINE_SIZE
- def isLargeSum(other: IMethodInfo) = length + other.length - 1 > SMALL_METHOD_SIZE
+ concreteMethod
+ }
+ else sym
}
- /** Should the given method be loaded from disk? */
- def shouldLoad(receiver: Symbol, method: Symbol): Boolean = {
- if (settings.debug.value)
- log("shouldLoad: " + receiver + "." + method)
+ class IMethodInfo(val m: IMethod) {
+ val sym = m.symbol
+ val name = sym.name
+ def owner = sym.owner
+ def paramTypes = sym.info.paramTypes
+ def minimumStack = paramTypes.length + 1
+
+ def inline = hasInline(sym)
+ def noinline = hasNoInline(sym)
+ def numInlined = inlinedMethodCount(sym)
+
+ def isBridge = sym.isBridge
+ def isInClosure = isClosureClass(owner)
+ def isHigherOrder = isHigherOrderMethod(sym)
+ def isMonadic = isMonadicMethod(sym)
+
+ def handlers = m.exh
+ def blocks = m.code.blocks
+ def locals = m.locals
+ def length = blocks.length
+ def openBlocks = blocks filterNot (_.closed)
+ def instructions = blocks.flatten
+ def linearized = linearizer linearize m
+
+ def isSmall = length <= SMALL_METHOD_SIZE
+ def isLarge = length > MAX_INLINE_SIZE
+ def isRecursive = m.recursive
+ def hasCode = m.code != null
+ def hasSourceFile = m.sourceFile != null
+ def hasHandlers = handlers.nonEmpty
+
+ def addLocals(ls: List[Local]) = m.locals ++= ls
+ def addLocal(l: Local) = addLocals(List(l))
+ def addHandlers(exhs: List[ExceptionHandler]) = m.exh = exhs ::: m.exh
+ }
+
+ class CallerCalleeInfo(val caller: IMethodInfo, val inc: IMethodInfo) {
+ def isLargeSum = caller.length + inc.length - 1 > SMALL_METHOD_SIZE
+
- val caller = new SymMethodInfo(method)
- def alwaysLoad = (
- (receiver.enclosingPackage == definitions.RuntimePackage) ||
- (receiver == definitions.PredefModule.moduleClass) ||
- caller.inline
+ /** Inline 'inc' into 'caller' at the given block and instruction.
+ * The instruction must be a CALL_METHOD.
+ */
+ def doInline(block: BasicBlock, instr: Instruction) {
+ val targetPos = instr.pos
+ log("Inlining " + inc.m + " in " + caller.m + " at pos: " + posToStr(targetPos))
+
+ def blockEmit(i: Instruction) = block.emit(i, targetPos)
+ def newLocal(baseName: String, kind: TypeKind) =
+ new Local(caller.sym.newVariable(targetPos, freshName(baseName)), kind, false)
+
+ val a = new analysis.MethodTFA(inc.m)
+
+ /* The exception handlers that are active at the current block. */
+ val activeHandlers = caller.handlers filter (_ covered block)
+
+ /* Map 'original' blocks to the ones inlined in the caller. */
+ val inlinedBlock: mutable.Map[BasicBlock, BasicBlock] = new HashMap
+
+ val varsInScope: mutable.Set[Local] = HashSet() ++= block.varsInScope
+
+ /** Side effects varsInScope when it sees SCOPE_ENTERs. */
+ def instrBeforeFilter(i: Instruction): Boolean = {
+ i match { case SCOPE_ENTER(l) => varsInScope += l ; case _ => () }
+ i ne instr
+ }
+ val instrBefore = block.toList takeWhile instrBeforeFilter
+ val instrAfter = block.toList drop (instrBefore.length + 1)
+
+ assert(!instrAfter.isEmpty, "CALL_METHOD cannot be the last instruction in block!")
+
+ // store the '$this' into the special local
+ val inlinedThis = newLocal("$inlThis", REFERENCE(ObjectClass))
+
+ /** buffer for the returned value */
+ val retVal = inc.m.returnType match {
+ case UNIT => null
+ case x => newLocal("$retVal", x)
+ }
+
+ val inlinedLocals: mutable.Map[Local, Local] = new HashMap
+
+ /** Add a new block in the current context. */
+ def newBlock() = {
+ val b = caller.m.code.newBlock
+ activeHandlers foreach (_ addCoveredBlock b)
+ if (retVal ne null) b.varsInScope += retVal
+ b.varsInScope += inlinedThis
+ b.varsInScope ++= varsInScope
+ b
+ }
+
+ def translateExh(e: ExceptionHandler) = {
+ val handler: ExceptionHandler = e.dup
+ handler.covered = handler.covered map inlinedBlock
+ handler setStartBlock inlinedBlock(e.startBlock)
+ handler
+ }
+
+ /** alfa-rename `l' in caller's context. */
+ def dupLocal(l: Local): Local = {
+ val sym = caller.sym.newVariable(l.sym.pos, freshName(l.sym.name.toString()))
+ // sym.setInfo(l.sym.tpe)
+ val dupped = new Local(sym, l.kind, false)
+ inlinedLocals(l) = dupped
+ dupped
+ }
+
+ val afterBlock = newBlock()
+
+ /** Map from nw.init instructions to their matching NEW call */
+ val pending: mutable.Map[Instruction, NEW] = new HashMap
+
+ /** Map an instruction from the callee to one suitable for the caller. */
+ def map(i: Instruction): Instruction = {
+ def assertLocal(l: Local) = {
+ assert(caller.locals contains l, "Could not find local '" + l + "' in locals, nor in inlinedLocals: " + inlinedLocals)
+ i
+ }
+ def isInlined(l: Local) = inlinedLocals isDefinedAt l
+
+ val newInstr = i match {
+ case THIS(clasz) =>
+ LOAD_LOCAL(inlinedThis)
+
+ case STORE_THIS(_) =>
+ STORE_LOCAL(inlinedThis)
+
+ case JUMP(whereto) =>
+ JUMP(inlinedBlock(whereto))
+
+ case CJUMP(success, failure, cond, kind) =>
+ CJUMP(inlinedBlock(success), inlinedBlock(failure), cond, kind)
+
+ case CZJUMP(success, failure, cond, kind) =>
+ CZJUMP(inlinedBlock(success), inlinedBlock(failure), cond, kind)
+
+ case SWITCH(tags, labels) =>
+ SWITCH(tags, labels map inlinedBlock)
+
+ case RETURN(kind) =>
+ JUMP(afterBlock)
+
+ case LOAD_LOCAL(l) if isInlined(l) =>
+ LOAD_LOCAL(inlinedLocals(l))
+
+ case STORE_LOCAL(l) if isInlined(l) =>
+ STORE_LOCAL(inlinedLocals(l))
+
+ case LOAD_LOCAL(l) => assertLocal(l)
+ case STORE_LOCAL(l) => assertLocal(l)
+
+ case SCOPE_ENTER(l) if isInlined(l) =>
+ SCOPE_ENTER(inlinedLocals(l))
+
+ case SCOPE_EXIT(l) if isInlined(l) =>
+ SCOPE_EXIT(inlinedLocals(l))
+
+ case nw @ NEW(sym) =>
+ val r = NEW(sym)
+ pending(nw.init) = r
+ r
+
+ case CALL_METHOD(meth, Static(true)) if meth.isClassConstructor =>
+ CALL_METHOD(meth, Static(true))
+
+ case _ => i.clone()
+ }
+ // check any pending NEW's
+ pending remove i foreach (_.init = newInstr.asInstanceOf[CALL_METHOD])
+ newInstr
+ }
+
+ caller addLocals (inc.locals map dupLocal)
+ caller addLocal inlinedThis
+
+ if (retVal ne null)
+ caller addLocal retVal
+
+ inc.blocks foreach { b =>
+ inlinedBlock += (b -> newBlock())
+ inlinedBlock(b).varsInScope ++= (b.varsInScope map inlinedLocals)
+ }
+
+ // analyse callee
+ a.run
+
+ // re-emit the instructions before the call
+ block.open
+ block.clear
+ block emit instrBefore
+
+ // store the arguments into special locals
+ inc.m.params.reverse foreach (p => blockEmit(STORE_LOCAL(inlinedLocals(p))))
+ blockEmit(STORE_LOCAL(inlinedThis))
+
+ // jump to the start block of the callee
+ blockEmit(JUMP(inlinedBlock(inc.m.code.startBlock)))
+ block.close
+
+ // duplicate the other blocks in the callee
+ linearizer linearize inc.m foreach { bb =>
+ var info = a in bb
+ def emitInlined(i: Instruction) = inlinedBlock(bb).emit(i, targetPos)
+ def emitDrops(toDrop: Int) = info.stack.types drop toDrop foreach (t => emitInlined(DROP(t)))
+
+ for (i <- bb) {
+ i match {
+ case RETURN(UNIT) => emitDrops(0)
+ case RETURN(kind) =>
+ if (info.stack.length > 1) {
+ emitInlined(STORE_LOCAL(retVal))
+ emitDrops(1)
+ emitInlined(LOAD_LOCAL(retVal))
+ }
+ case _ => ()
+ }
+ emitInlined(map(i))
+ info = a.interpret(info, i)
+ }
+ inlinedBlock(bb).close
+ }
+
+ afterBlock emit instrAfter
+ afterBlock.close
+ count += 1
+
+ // add exception handlers of the callee
+ caller addHandlers (inc.handlers map translateExh)
+ assert(pending.isEmpty, "Pending NEW elements: " + pending)
+ }
+
+ def isStampedForInlining(stack: TypeStack) =
+ !sameSymbols && inc.hasCode && shouldInline && isSafeToInline(stack)
+
+ def logFailure(stack: TypeStack) = log(
+ """|inline failed for %s:
+ | pair.sameSymbols: %s
+ | inc.numInlined < 2: %s
+ | inc.hasCode: %s
+ | isSafeToInline: %s
+ | shouldInline: %s
+ """.stripMargin.format(
+ inc.m, sameSymbols, inc.numInlined < 2,
+ inc.hasCode, isSafeToInline(stack), shouldInline
+ )
)
- (caller.isEffectivelyFinal && caller.isMonadic && caller.isHigherOrder) || alwaysLoad
- }
+ def failureReason(stack: TypeStack) =
+ if (!inc.hasCode) "bytecode was unavailable"
+ else if (!isSafeToInline(stack)) "it is unsafe (target may reference private fields)"
+ else "of a bug (run with -Ylog:inline -Ydebug for more information)"
- /** Cache whether a method calls private members. */
- val usesNonPublics: Map[IMethod, NonPublicRefs.Value] = new HashMap;
+ def canAccess(level: NonPublicRefs.Value) = level match {
+ case Private => caller.owner == inc.owner
+ case Protected => caller.owner.tpe <:< inc.owner.tpe
+ case Public => true
+ }
+ private def sameSymbols = caller.sym == inc.sym
+
+ /** A method is safe to inline when:
+ * - it does not contain calls to private methods when
+ * called from another class
+ * - it is not inlined into a position with non-empty stack,
+ * while having a top-level finalizer (see liftedTry problem)
+ * - it is not recursive
+ * Note:
+ * - synthetic private members are made public in this pass.
+ */
+ def isSafeToInline(stack: TypeStack): Boolean = {
+ def makePublic(f: Symbol): Boolean =
+ inc.hasSourceFile && (f.isSynthetic || f.isParamAccessor) && {
+ if (settings.debug.value)
+ log("Making not-private symbol out of synthetic: " + f)
+
+ f setFlag Flags.notPRIVATE
+ true
+ }
- object NonPublicRefs extends Enumeration {
- val Public, Protected, Private = Value
- }
+ if (!inc.hasCode || inc.isRecursive)
+ return false
- /** A method is safe to inline when:
- * - it does not contain calls to private methods when
- * called from another class
- * - it is not inlined into a position with non-empty stack,
- * while having a top-level finalizer (see liftedTry problem)
- * - it is not recursive
- * Note:
- * - synthetic private members are made public in this pass.
- */
- def isSafeToInline(caller: IMethod, callee: IMethod, stack: TypeStack): Boolean = {
- def makePublic(f: Symbol): Boolean =
- if ((callee.sourceFile ne null)
- && (f.hasFlag(Flags.SYNTHETIC | Flags.PARAMACCESSOR))) {
- if (settings.debug.value) log("Making not-private symbol out of synthetic: " + f)
- f.setFlag(Flags.notPRIVATE)
- true
- } else false
-
- import NonPublicRefs._
- var callsNonPublic = Public
-
- if (callee.recursive) return false
-
- usesNonPublics.get(callee) match {
- case Some(b) =>
- callsNonPublic = b
- case None =>
+ val accessNeeded = usesNonPublics.getOrElseUpdate(inc.m, {
// Avoiding crashing the compiler if there are open blocks.
- callee.code.blocks filterNot (_.closed) foreach { b =>
- currentIClazz.cunit.warning(callee.symbol.pos,
- "Encountered open block in isSafeToInline: this indicates a bug in the optimizer!\n" +
- " caller = " + caller + ", callee = " + callee
- )
+ inc.openBlocks foreach { b =>
+ warn(inc.sym.pos,
+ "Encountered open block in isSafeToInline: this indicates a bug in the optimizer!\n" +
+ " caller = " + caller.m + ", callee = " + inc.m
+ )
return false
}
+ def check(sym: Symbol, cond: Boolean) =
+ if (cond) Private
+ else if (sym.isProtected) Protected
+ else Public
+
+ def checkField(f: Symbol) = check(f, f.isPrivate && !makePublic(f))
+ def checkSuper(m: Symbol) = check(m, m.isPrivate || !m.isClassConstructor)
+ def checkMethod(m: Symbol) = check(m, m.isPrivate)
+
+ def getAccess(i: Instruction) = i match {
+ case CALL_METHOD(m, SuperCall(_)) => checkSuper(m)
+ case CALL_METHOD(m, _) => checkMethod(m)
+ case LOAD_FIELD(f, _) => checkField(f)
+ case STORE_FIELD(f, _) => checkField(f)
+ case _ => Public
+ }
- breakable {
- for (b <- callee.code.blocks; i <- b)
- i match {
- case CALL_METHOD(m, style) =>
- if (m.hasFlag(Flags.PRIVATE) ||
- (style.isSuper && !m.isClassConstructor)) {
- callsNonPublic = Private
- break
- }
- if (m.hasFlag(Flags.PROTECTED)) callsNonPublic = Protected
-
- case LOAD_FIELD(f, _) =>
- if (f.hasFlag(Flags.PRIVATE) && !makePublic(f)) {
- callsNonPublic = Private;
- break
- }
- if (f.hasFlag(Flags.PROTECTED)) callsNonPublic = Protected
-
- case STORE_FIELD(f, _) =>
- if (f.hasFlag(Flags.PRIVATE) && !makePublic(f)) {
- callsNonPublic = Private;
- break
- }
- if (f.hasFlag(Flags.PROTECTED)) callsNonPublic = Protected
-
- case _ => ()
+ def iterate(): NonPublicRefs.Value = {
+ var seenProtected = false
+ inc.instructions foreach { i =>
+ getAccess(i) match {
+ case Private => return Private
+ case Protected => seenProtected = true
+ case _ => ()
}
+ }
+ if (seenProtected) Protected else Public
}
- usesNonPublics += (callee -> callsNonPublic)
- }
+ iterate()
+ })
- if ((callsNonPublic == Private && (caller.symbol.owner != callee.symbol.owner))
- || callsNonPublic == Protected && !(caller.symbol.owner.tpe <:< callee.symbol.owner.tpe))
- return false;
+ def isIllegalStack = (stack.length > inc.minimumStack && inc.hasHandlers) || {
+ if (settings.debug.value)
+ log("method " + inc.sym + " is used on a non-empty stack with finalizer.")
- if (stack.length > (1 + callee.symbol.info.paramTypes.length) &&
- callee.exh != Nil) {
- if (settings.debug.value) log("method " + callee.symbol + " is used on a non-empty stack with finalizer.");
- false
- } else
- true
- }
+ false
+ }
- private def lookupImpl(meth: Symbol, clazz: Symbol): Symbol = {
- //println("\t\tlooking up " + meth + " in " + clazz.fullName + " meth.owner = " + meth.owner)
- if (meth.owner == clazz
- || clazz == definitions.NullClass
- || clazz == definitions.NothingClass) meth
- else {
- val implementingMethod = meth.overridingSymbol(clazz)
- if (implementingMethod != NoSymbol)
- implementingMethod
- else if (meth.owner.isTrait)
- meth
- else
- lookupImpl(meth, clazz.tpe.parents(0).typeSymbol)
+ canAccess(accessNeeded) && !isIllegalStack
}
- }
-
- /** Decide whether to inline or not. Heuristics:
- * - it's bad to make the caller larger (> SMALL_METHOD_SIZE)
- * if it was small
- * - it's bad to inline large methods
- * - it's good to inline higher order functions
- * - it's good to inline closures functions.
- * - it's bad (useless) to inline inside bridge methods
- */
- def shouldInline(mcaller: IMethod, mcallee: IMethod): Boolean = {
- val caller = new IMethodInfo(mcaller)
- val inc = new IMethodInfo(mcallee)
- if (caller.isBridge || inc.noinline || inc.hasBlocker)
- return false
-
- if (inc.inline)
- return true
+ /** Decide whether to inline or not. Heuristics:
+ * - it's bad to make the caller larger (> SMALL_METHOD_SIZE) if it was small
+ * - it's bad to inline large methods
+ * - it's good to inline higher order functions
+ * - it's good to inline closures functions.
+ * - it's bad (useless) to inline inside bridge methods
+ */
+ private def neverInline = caller.isBridge || !inc.hasCode || inc.noinline
+ private def alwaysInline = inc.inline
- if (settings.debug.value)
- log("shouldInline: " + mcaller + " with " + mcallee)
+ def shouldInline: Boolean = !neverInline && (alwaysInline || {
+ if (settings.debug.value)
+ log("shouldInline: " + caller.m + " with " + inc.m)
+
+ var score = 0
+ if (inc.isSmall)
+ score += 1
+ if (caller.isSmall && isLargeSum) {
+ score -= 1
+ if (settings.debug.value)
+ log("shouldInline: score decreased to " + score + " because small " + caller + " would become large")
+ }
+ if (inc.isLarge)
+ score -= 1
+
+ if (inc.isMonadic)
+ score += 2
+ else if (inc.isHigherOrder)
+ score += 1
+ if (inc.isInClosure)
+ score += 2
+ if (inc.numInlined > 2)
+ score -= 2
- var score = 0
- if (inc.isSmall)
- score += 1
- if (caller.isSmall && (caller isLargeSum inc)) {
- score -= 1
if (settings.debug.value)
- log("shouldInline: score decreased to " + score + " because small " + caller + " would become large")
- }
- if (inc.isLarge)
- score -= 1
-
- if (inc.isMonadic)
- score += 2
- else if (inc.isHigherOrder)
- score += 1
- if (inc.isInClosure)
- score += 2
- if (inc.numInlined > 2)
- score -= 2
+ log("shouldInline(" + inc.m + ") score: " + score)
- if (settings.debug.value)
- log("shouldInline(" + mcallee + ") score: " + score)
+ score > 0
+ })
+ }
+
+ def lookupIMethod(meth: Symbol, receiver: Symbol): Option[IMethod] = {
+ def tryParent(sym: Symbol) = icodes icode sym flatMap (_ lookupMethod meth)
- score > 0
+ receiver.info.baseClasses.iterator map tryParent find (_.isDefined) getOrElse None
}
} /* class Inliner */
} /* class Inliners */
diff --git a/src/compiler/scala/tools/nsc/doc/DocFactory.scala b/src/compiler/scala/tools/nsc/doc/DocFactory.scala
index 42aab918f7..066f800f79 100644
--- a/src/compiler/scala/tools/nsc/doc/DocFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/DocFactory.scala
@@ -54,7 +54,7 @@ class DocFactory(val reporter: Reporter, val settings: doc.Settings) { processor
val modelFactory = (new model.ModelFactory(compiler, settings) with model.comment.CommentFactory)
val docModel = modelFactory.makeModel
println("model contains " + modelFactory.templatesCount + " documentable templates")
- (new html.HtmlFactory(docModel)) generate docModel
+ (new html.HtmlFactory(docModel)).generate
}
}
diff --git a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala b/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
index 348dc4b26c..94b1f38b65 100644
--- a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
@@ -26,7 +26,7 @@ class HtmlFactory(val universe: Universe) {
/** Generates the Scaladoc site for a model into the site root. A scaladoc site is a set of HTML and related files
* that document a model extracted from a compiler run.
* @param model The model to generate in the form of a sequence of packages. */
- def generate(universe: Universe): Unit = {
+ def generate : Unit = {
def copyResource(subPath: String) {
val bytes = new Streamable.Bytes {
diff --git a/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala b/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala
index 66e2ba2260..014dee3b20 100644
--- a/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala
@@ -47,7 +47,6 @@ abstract class HtmlPage { thisPage =>
<head>
<title>{ title }</title>
<meta http-equiv="content-type" content={ "text/html; charset=" + site.encoding }/>
- <script type="text/javascript" src={ relativeLinkTo{List("jquery.js", "lib")} }></script>
{ headers }
</head>
{ body }
@@ -201,6 +200,12 @@ abstract class HtmlPage { thisPage =>
xml.Text(string)
}
+ def typesToHtml(tpess: List[model.TypeEntity], hasLinks: Boolean, sep: NodeSeq): NodeSeq = tpess match {
+ case Nil => NodeSeq.Empty
+ case tpe :: Nil => typeToHtml(tpe, hasLinks)
+ case tpe :: tpes => typeToHtml(tpe, hasLinks) ++ sep ++ typesToHtml(tpes, hasLinks, sep)
+ }
+
/** Returns the HTML code that represents the template in `tpl` as a hyperlinked name. */
def templateToHtml(tpl: TemplateEntity) = tpl match {
case dTpl: DocTemplateEntity =>
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Index.scala b/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
index 784a92f1ff..8675058c24 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
@@ -26,17 +26,18 @@ class Index(universe: Universe) extends HtmlPage {
def headers =
<xml:group>
<link href={ relativeLinkTo(List("index.css", "lib")) } media="screen" type="text/css" rel="stylesheet"/>
- <script type="text/javascript" src={ relativeLinkTo{List("index.js", "lib")} }></script>
+ <script type="text/javascript" src={ relativeLinkTo{List("jquery.js", "lib")} }></script>
<script type="text/javascript" src={ relativeLinkTo{List("scheduler.js", "lib")} }></script>
+ <script type="text/javascript" src={ relativeLinkTo{List("index.js", "lib")} }></script>
</xml:group>
def body =
<body>
<div id="library">
- <img class='class icon' src='lib/class.png'/>
- <img class='trait icon' src='lib/trait.png'/>
- <img class='object icon' src='lib/object.png'/>
- <img class='package icon' src='lib/package.png'/>
+ <img class='class icon' width="13" height="13" src='lib/class.png'/>
+ <img class='trait icon' width="13" height="13" src='lib/trait.png'/>
+ <img class='object icon' width="13" height="13" src='lib/object.png'/>
+ <img class='package icon' width="13" height="13" src='lib/package.png'/>
</div>
<div id="browser">
<div id="filter"></div>
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala b/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
index 72cfd9c662..06a2aaee6b 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
@@ -23,8 +23,9 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
val headers =
<xml:group>
<link href={ relativeLinkTo(List("template.css", "lib")) } media="screen" type="text/css" rel="stylesheet"/>
- <script type="text/javascript" src={ relativeLinkTo{List("template.js", "lib")} }></script>
+ <script type="text/javascript" src={ relativeLinkTo{List("jquery.js", "lib")} }></script>
<script type="text/javascript" src={ relativeLinkTo{List("tools.tooltip.js", "lib")} }></script>
+ <script type="text/javascript" src={ relativeLinkTo{List("template.js", "lib")} }></script>
</xml:group>
val valueMembers =
@@ -58,17 +59,32 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
<div id="template">
<div id="mbrsel">
+ <div id='textfilter'><span class='pre'/><input type='text' accesskey='/'/><span class='post'/></div>
+ { if (tpl.linearization.isEmpty) NodeSeq.Empty else
+ <div id="order">
+ <span class="filtertype">Ordering</span>
+ <ol><li class="alpha in">Alphabetic</li><li class="inherit out">By inheritance</li></ol>
+ </div>
+ }
{ if (tpl.linearization.isEmpty) NodeSeq.Empty else
<div id="ancestors">
<span class="filtertype">Inherited</span>
<ol><li class="hideall">Hide All</li><li class="showall">Show all</li></ol>
- <ol id="linearization">{ tpl.linearization map { wte => <li class="in" name={ wte.qualifiedName }>{ wte.name }</li> } }</ol>
+ <ol id="linearization">{ (tpl :: tpl.linearization) map { wte => <li class="in" name={ wte.qualifiedName }>{ wte.name }</li> } }</ol>
</div>
}
- <div id="visbl">
- <span class="filtertype">Visibility</span>
- <ol><li class="public in">Public</li><li class="all out">All</li></ol>
- </div>
+ {
+ <div id="visbl">
+ <span class="filtertype">Visibility</span>
+ <ol><li class="public in">Public</li><li class="all out">All</li></ol>
+ </div>
+ }
+ {
+ <div id="impl">
+ <span class="filtertype">Impl.</span>
+ <ol><li class="concrete in">Concrete</li><li class="abstract in">Abstract</li></ol>
+ </div>
+ }
</div>
{ if (constructors.isEmpty) NodeSeq.Empty else
@@ -79,28 +95,66 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
}
{ if (typeMembers.isEmpty) NodeSeq.Empty else
- <div id="types" class="members">
+ <div id="types" class="types members">
<h3>Type Members</h3>
<ol>{ typeMembers map (memberToHtml(_)) }</ol>
</div>
}
{ if (valueMembers.isEmpty) NodeSeq.Empty else
- <div id="values" class="members">
+ <div id="values" class="values members">
<h3>Value Members</h3>
<ol>{ valueMembers map (memberToHtml(_)) }</ol>
</div>
}
+ {
+ NodeSeq fromSeq (for (parent <- tpl.linearization) yield
+ <div class="parent" name={ parent.qualifiedName }>
+ <h3>Inherited from { templateToHtml(parent) }</h3>
+ </div>
+ )
+ }
+
</div>
<div id="tooltip" ></div>
</body>
+ def boundsToString(hi: Option[TypeEntity], lo: Option[TypeEntity]): String = {
+ def bound0(bnd: Option[TypeEntity], pre: String): String = bnd match {
+ case None => ""
+ case Some(tpe) => pre ++ tpe.toString
+ }
+ bound0(hi, "<:") ++ bound0(lo, ">:")
+ }
+
+ def tparamsToString(tpss: List[TypeParam]): String =
+ if (tpss.isEmpty) "" else {
+ def tparam0(tp: TypeParam): String =
+ tp.variance + tp.name + boundsToString(tp.hi, tp.lo)
+ def tparams0(tpss: List[TypeParam]): String = (tpss: @unchecked) match {
+ case tp :: Nil => tparam0(tp)
+ case tp :: tps => tparam0(tp) ++ ", " ++ tparams0(tps)
+ }
+ "[" + tparams0(tpss) + "]"
+ }
+
+ def defParamsToString(d: MemberEntity with Def):String = {
+ val namess = for( ps <- d.valueParams ) yield
+ for( p <- ps ) yield p.resultType.name
+ tparamsToString(d.typeParams) + namess.foldLeft("") { (s,names) => s + (names mkString("(",",",")")) }
+ }
+
def memberToHtml(mbr: MemberEntity): NodeSeq = {
- val attributes: List[comment.Body] = Nil
- <li name={ mbr.definitionName } visbl={ if (mbr.visibility.isProtected) "prt" else "pub" }>
+ val defParamsString = mbr match {
+ case d:MemberEntity with Def => defParamsToString(d)
+ case _ => ""
+ }
+ <li name={ mbr.definitionName } visbl={ if (mbr.visibility.isProtected) "prt" else "pub" }
+ data-isabs={ mbr.isAbstract.toString }>
+ <a id={ mbr.name +defParamsString +":"+ mbr.resultType.name}/>
{ signature(mbr, false) }
{ memberToCommentHtml(mbr, false) }
</li>
@@ -205,6 +259,14 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
}
} ++
{ mbr match {
+ case dtpl: DocTemplateEntity if (isSelf && !dtpl.linearizationTypes.isEmpty) =>
+ <div class="block">
+ linear super types: { typesToHtml(dtpl.linearizationTypes, hasLinks = true, sep = xml.Text(", ")) }
+ </div>
+ case _ => NodeSeq.Empty
+ }
+ } ++
+ { mbr match {
case dtpl: DocTemplateEntity if (isSelf && !dtpl.subClasses.isEmpty) =>
<div class="block">
known subclasses: { templatesToHtml(dtpl.subClasses, xml.Text(", ")) }
@@ -213,6 +275,14 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
}
} ++
{ mbr match {
+ case dtpl: DocTemplateEntity if (isSelf && !dtpl.selfType.isEmpty) =>
+ <div class="block">
+ self type: { typeToHtml(dtpl.selfType.get, hasLinks = true) }
+ </div>
+ case _ => NodeSeq.Empty
+ }
+ } ++
+ { mbr match {
case dtpl: DocTemplateEntity if (isSelf && dtpl.sourceUrl.isDefined) =>
val sourceUrl = tpl.sourceUrl.get
<div class="block">
@@ -279,7 +349,7 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
case None => NodeSeq.Empty
case Some(tpe) => xml.Text(pre) ++ typeToHtml(tpe, hasLinks)
}
- bound0(hi, " <: ") ++ bound0(lo, " >: ")
+ bound0(lo, " >: ") ++ bound0(hi, " <: ")
}
def visibility(mbr: MemberEntity): Option[comment.Paragraph] = {
@@ -309,23 +379,25 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
<xml:group>
<span class="kind">{ kindToString(mbr) }</span>
<span class="symbol">
- <span class={"name" + (if (mbr.deprecation.isDefined) " deprecated" else "") }>{ if (mbr.isConstructor) tpl.name else mbr.name }</span>{
- def tparamsToHtml(tpss: List[TypeParam]): NodeSeq =
- if (tpss.isEmpty) NodeSeq.Empty else {
- def tparam0(tp: TypeParam): NodeSeq =
- <span name={ tp.name }>{ tp.variance + tp.name }{ boundsToHtml(tp.hi, tp.lo, hasLinks)}</span>
- def tparams0(tpss: List[TypeParam]): NodeSeq = (tpss: @unchecked) match {
- case tp :: Nil => tparam0(tp)
- case tp :: tps => tparam0(tp) ++ Text(", ") ++ tparams0(tps)
+ <span class={"name" + (if (mbr.deprecation.isDefined) " deprecated" else "") }>{ if (mbr.isConstructor) tpl.name else mbr.name }</span>
+ {
+ def tparamsToHtml(mbr: Entity): NodeSeq = mbr match {
+ case hk: HigherKinded =>
+ val tpss = hk.typeParams
+ if (tpss.isEmpty) NodeSeq.Empty else {
+ def tparam0(tp: TypeParam): NodeSeq =
+ <span name={ tp.name }>{ tp.variance + tp.name }{ tparamsToHtml(tp) }{ boundsToHtml(tp.hi, tp.lo, hasLinks)}</span>
+ def tparams0(tpss: List[TypeParam]): NodeSeq = (tpss: @unchecked) match {
+ case tp :: Nil => tparam0(tp)
+ case tp :: tps => tparam0(tp) ++ Text(", ") ++ tparams0(tps)
+ }
+ <span class="tparams">[{ tparams0(tpss) }]</span>
}
- <span class="tparams">[{ tparams0(tpss) }]</span>
- }
- mbr match {
- case trt: Trait => tparamsToHtml(trt.typeParams)
- case dfe: Def => tparamsToHtml(dfe.typeParams)
- case _ => NodeSeq.Empty
+ case _ => NodeSeq.Empty
}
- }{
+ tparamsToHtml(mbr)
+ }
+ {
def paramsToHtml(vlsss: List[List[ValueParam]]): NodeSeq = {
def param0(vl: ValueParam): NodeSeq =
// notice the }{ in the next lines, they are necessary to avoid a undesired withspace in output
@@ -353,7 +425,8 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
case dfe: Def => paramsToHtml(dfe.valueParams)
case _ => NodeSeq.Empty
}
- }{
+ }
+ {
mbr match {
case tpl: DocTemplateEntity if (!tpl.isPackage) =>
tpl.parentType match {
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.png
index f4cca45dc0..4127dbf3c2 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.png
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psd b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psd
index 9fb3991b14..4d740f3b17 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psd
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psd
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.png
index 1fda869beb..942736e44d 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.png
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psd b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psd
index 860833d2b5..63a1ae8349 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psd
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psd
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css b/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css
index fc3f6d4c29..0d30662da6 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css
@@ -47,30 +47,39 @@ h1 {
#textfilter {
position: relative;
display: block;
+ height: 20px;
+ margin-bottom: 5px;
}
-#textfilter:before {
+#textfilter > .pre {
display: block;
- content: url("filter_box_left.png");
+ position: absolute;
+ top: 0;
+ left: 0;
+ height: 20px;
+ width: 20px;
+ background: url("filter_box_left.png");
}
-#textfilter:after {
+#textfilter > .post {
display: block;
- position: absolute;
- top: 0;
- right: 0;
- content: url("filter_box_right.png");
+ position: absolute;
+ top: 0;
+ right: 0;
+ height: 20px;
+ width: 20px;
+ background: url("filter_box_right.png");
}
#textfilter input {
display: block;
- position: absolute;
- top: 0;
- left: 32px;
- right: 16px;
- height: 22px;
- width: 232px;
- padding: 5px;
+ position: absolute;
+ top: 0;
+ right: 20px;
+ left: 20px;
+ height: 16px;
+ width: 246px;
+ padding: 2px;
font-weight: bold;
color: #993300;
background-color: white;
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js
index e7cf484fa2..4d361a5c5a 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js
@@ -95,7 +95,7 @@ function prepareEntityList() {
/* Configures the text filter */
function configureTextFilter() {
scheduler.add("init", function() {
- $("#filter").append("<div id='textfilter'><input type='text' accesskey='/'/></div>");
+ $("#filter").append("<div id='textfilter'><span class='pre'/><input type='text' accesskey='/'/><span class='post'/></div>");
var input = $("#textfilter > input");
resizeFilterBlock();
input.bind("keyup", function(event) {
@@ -106,6 +106,12 @@ function configureTextFilter() {
});
input.focus(function(event) { input.select(); });
});
+ scheduler.add("init", function() {
+ $("#textfilter > .post").click(function(){
+ $("#textfilter > input").attr("value", "");
+ textFilter();
+ });
+ });
}
// Filters all focused templates and packages. This function should be made less-blocking.
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css
index 0c17d9fa2a..8399a6abe8 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css
@@ -15,7 +15,6 @@ body {
}
a {
- cursor: pointer;
text-decoration: underline;
color: #69481D; /* brown */
}
@@ -85,7 +84,7 @@ a:hover {
padding-left: 8px;
}
-#values > h3 {
+#template .values > h3 {
color: white;
padding: 4px;
background-color: #7996AC;
@@ -93,7 +92,7 @@ a:hover {
font-weight: bold;
}
-#types > h3 {
+#template .types > h3 {
padding: 4px;
color: white;
font-weight: bold;
@@ -109,6 +108,18 @@ a:hover {
background-color: #333;
}
+#template > div.parent > h3 {
+ color: white;
+ padding: 4px;
+ background-color: #385E1A;
+ font-size: 12pt;
+ font-weight: bold;
+}
+
+#template > div.parent > h3 > a {
+ color: white;
+}
+
/* Member cells */
div.members > ol {
@@ -181,11 +192,11 @@ div.members > ol > li {
font-style: italic;
}
-#values .signature .name {
+#template .values .signature .name {
color: blue;
}
-#types .signature .name {
+#template .types .signature .name {
color: green;
}
@@ -299,14 +310,17 @@ p.comment {
p.shortcomment {
display: block;
margin-left: 8.7em;
- cursor: help;
+ cursor: pointer;
}
div.fullcomment {
- display: block;
margin: 10px 0 10px 0;
}
+#types div.fullcomment, #values div.fullcomment {
+ display:none
+}
+
#template div.fullcomment {
margin: 6px 0 6px 8.7em;
}
@@ -364,6 +378,46 @@ div.fullcomment dl.paramcmts > dd + dt + dd {
/* Members filter tool */
+#textfilter {
+ position: relative;
+ display: block;
+ height: 20px;
+ margin-bottom: 5px;
+}
+
+#textfilter > .pre {
+ display: block;
+ position: absolute;
+ top: 0;
+ left: 0;
+ height: 20px;
+ width: 20px;
+ background: url("filter_box_left.png");
+}
+
+#textfilter > .post {
+ display: block;
+ position: absolute;
+ top: 0;
+ right: 0;
+ height: 20px;
+ width: 20px;
+ background: url("filter_box_right.png");
+}
+
+#textfilter input {
+ display: block;
+ position: absolute;
+ top: 0;
+ right: 20px;
+ left: 20px;
+ height: 16px;
+ padding: 2px;
+ font-weight: bold;
+ color: #993300;
+ background-color: white;
+}
+
#mbrsel {
padding: 4px;
background-color: #B78E99; /* grayish pink */
@@ -392,7 +446,7 @@ div.fullcomment dl.paramcmts > dd + dt + dd {
padding: 4px 8px 4px 8px;
background-color: white;
display: inline-block;
- cursor: crosshair;
+ cursor: pointer;
}
#mbrsel > div > ol > li.in {
@@ -413,4 +467,4 @@ div.fullcomment dl.paramcmts > dd + dt + dd {
#mbrsel .showall {
color: #4C4C4C;
font-weight: bold;
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js
index a13c502ba5..6f01e56ddc 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js
@@ -9,6 +9,20 @@ $(document).ready(function(){
prefilters.removeClass("in");
prefilters.addClass("out");
filter();
+
+ var input = $("#textfilter > input");
+ input.bind("keyup", function(event) {
+ if (event.keyCode == 27) { // escape
+ input.attr("value", "");
+ }
+ filter();
+ });
+ input.focus(function(event) { input.select(); });
+ $("#textfilter > .post").click(function(){
+ $("#textfilter > input").attr("value", "");
+ filter();
+ });
+
$("#ancestors > ol > li").click(function(){
if ($(this).hasClass("in")) {
$(this).removeClass("in");
@@ -47,6 +61,38 @@ $(document).ready(function(){
filter();
};
});
+ $("#impl > ol > li.concrete").click(function() {
+ if ($(this).hasClass("out")) {
+ $(this).removeClass("out").addClass("in");
+ $("li[data-isabs='false']").show();
+ } else {
+ $(this).removeClass("in").addClass("out");
+ $("li[data-isabs='false']").hide();
+ }
+ });
+ $("#impl > ol > li.abstract").click(function() {
+ if ($(this).hasClass("out")) {
+ $(this).removeClass("out").addClass("in");
+ $("li[data-isabs='true']").show();
+ } else {
+ $(this).removeClass("in").addClass("out");
+ $("li[data-isabs='true']").hide();
+ }
+ });
+ $("#order > ol > li.alpha").click(function() {
+ if ($(this).hasClass("out")) {
+ $(this).removeClass("out").addClass("in");
+ $("#order > ol > li.inherit").removeClass("in").addClass("out");
+ orderAlpha();
+ };
+ })
+ $("#order > ol > li.inherit").click(function() {
+ if ($(this).hasClass("out")) {
+ $(this).removeClass("out").addClass("in");
+ $("#order > ol > li.alpha").removeClass("in").addClass("out");
+ orderInherit();
+ };
+ });
//http://flowplayer.org/tools/tooltip.html
$(".extype").tooltip({
tip: "#tooltip",
@@ -55,7 +101,6 @@ $(document).ready(function(){
$(this.getTip()).text(this.getTrigger().attr("name"));
}
});
- $("#template div.fullcomment").hide();
var docAllSigs = $("#template .signature");
function commentShowFct(fullComment){
var vis = $(":visible", fullComment);
@@ -69,7 +114,7 @@ $(document).ready(function(){
var docShowSigs = docAllSigs.filter(function(){
return $("+ div.fullcomment", $(this)).length > 0;
});
- docShowSigs.css("cursor", "help");
+ docShowSigs.css("cursor", "pointer");
docShowSigs.click(function(){
commentShowFct($("+ div.fullcomment", $(this)));
});
@@ -88,18 +133,89 @@ $(document).ready(function(){
var docToggleSigs = docAllSigs.filter(function(){
return $("+ p.shortcomment", $(this)).length > 0;
});
- docToggleSigs.css("cursor", "help");
+ docToggleSigs.css("cursor", "pointer");
docToggleSigs.click(function(){
commentToggleFct($("+ p.shortcomment", $(this)));
});
$("p.shortcomment").click(function(){
commentToggleFct($(this));
});
+ initInherit();
});
+function orderAlpha() {
+ $("#template > div.parent").hide();
+ $("#ancestors").show();
+ filter();
+};
+
+function orderInherit() {
+ $("#template > div.parent").show();
+ $("#ancestors").hide();
+ filter();
+};
+
+/** Prepares the DOM for inheritance-based display. To do so it will:
+ * - hide all statically-generated parents headings;
+ * - copy all members from the value and type members lists (flat members) to corresponding lists nested below the
+ * parent headings (inheritance-grouped members);
+ * - initialises a control variable used by the filter method to control whether filtering happens on flat members
+ * or on inheritance-grouped members. */
+function initInherit() {
+ // parents is a map from fully-qualified names to the DOM node of parent headings.
+ var parents = new Object();
+ $("#template > div.parent").each(function(){
+ parents[$(this).attr("name")] = $(this);
+ });
+ //
+ $("#types > ol > li").each(function(){
+ var qualName = $(this).attr("name");
+ var owner = qualName.slice(0, qualName.indexOf("#"));
+ var name = qualName.slice(qualName.indexOf("#") + 1);
+ var parent = parents[owner];
+ if (parent != undefined) {
+ var types = $("> .types > ol", parent);
+ if (types.length == 0) {
+ parent.append("<div class='types members'><h3>Type Members</h3><ol></ol></div>");
+ types = $("> .types > ol", parent);
+ }
+ types.append($(this).clone());
+ }
+ });
+ $("#values > ol > li").each(function(){
+ var qualName = $(this).attr("name");
+ var owner = qualName.slice(0, qualName.indexOf("#"));
+ var name = qualName.slice(qualName.indexOf("#") + 1);
+ var parent = parents[owner];
+ if (parent != undefined) {
+ var values = $("> .values > ol", parent);
+ if (values.length == 0) {
+ parent.append("<div class='values members'><h3>Value Members</h3><ol></ol></div>");
+ values = $("> .values > ol", parent);
+ }
+ values.append($(this).clone());
+ }
+ });
+ $("#template > div.parent").each(function(){
+ if ($("> div.members", this).length == 0) { $(this).remove(); };
+ });
+ $("#template > div.parent").each(function(){
+ $(this).hide();
+ });
+};
+
function filter() {
+ var query = $("#textfilter > input").attr("value").toLowerCase();
+ var queryRegExp = new RegExp(query, "i");
+ var inheritHides = null
+ if ($("#order > ol > li.inherit").hasClass("in")) {
+ inheritHides = $("#linearization > li:gt(0)");
+ }
+ else {
+ inheritHides = $("#linearization > li.out");
+ }
var outOwners =
- $("#mbrsel ol#linearization > li.out").map(function(){
+ inheritHides.map(function(){
var r = $(this).attr("name");
return r
}).get();
@@ -107,24 +223,36 @@ function filter() {
$(".members > ol > li").each(function(){
var vis1 = $(this).attr("visbl");
var qualName1 = $(this).attr("name");
- var owner1 = qualName1.slice(0, qualName1.indexOf("#"));
//var name1 = qualName1.slice(qualName1.indexOf("#") + 1);
var showByOwned = true;
- for (out in outOwners) {
- if (outOwners[out] == owner1) {
- showByOwned = false;
+ if ($(this).parents(".parent").length == 0) {
+ // owner filtering must not happen in "inherited from" member lists
+ var owner1 = qualName1.slice(0, qualName1.indexOf("#"));
+ for (out in outOwners) {
+ if (outOwners[out] == owner1) {
+ showByOwned = false;
+ };
};
};
- var showByVis = true
+ var showByVis = true;
if (vis1 == "prt") {
showByVis = prtVisbl;
};
- if (showByOwned && showByVis) {
+ var showByName = true;
+ if (query != "") {
+ var content = $(this).attr("name") + $("> .fullcomment .cmt", this).text();
+ showByName = queryRegExp.test(content);
+ };
+ if (showByOwned && showByVis && showByName) {
$(this).show();
}
else {
$(this).hide();
};
});
+ $(".members").each(function(){
+ $(this).show();
+ if ($(" > ol > li:visible", this).length == 0) { $(this).hide(); }
+ });
return false
};
diff --git a/src/compiler/scala/tools/nsc/doc/model/Entity.scala b/src/compiler/scala/tools/nsc/doc/model/Entity.scala
index b5f12d312d..8c47fd7fbb 100644
--- a/src/compiler/scala/tools/nsc/doc/model/Entity.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/Entity.scala
@@ -19,11 +19,12 @@ trait Entity {
override def toString = qualifiedName
}
-
-/** A class, trait, object or package. A package is represented as an instance of the `Package` subclass. A class,
- * trait, object or package may be directly an instance of `WeakTemplateEntity` if it is not ''documentable'' (that
- * is, if there is no documentation page for it in the current site), otherwise, it will be represented as an instance
- * of the `TemplateEntity` subclass. */
+/** A class, trait, object or package. A package is represented as an instance
+ * of the `Package` subclass. A class, trait, object or package may be
+ * directly an instance of `WeakTemplateEntity` if it is not ''documentable''
+ * (that is, if there is no documentation page for it in the current site),
+ * otherwise, it will be represented as an instance of the `TemplateEntity`
+ * subclass. */
trait TemplateEntity extends Entity {
def isPackage: Boolean
def isRootPackage: Boolean
@@ -31,6 +32,7 @@ trait TemplateEntity extends Entity {
def isClass: Boolean
def isObject: Boolean
def isDocTemplate: Boolean
+ def selfType : Option[TypeEntity]
}
trait NoDocTemplate extends TemplateEntity
@@ -51,22 +53,27 @@ trait MemberEntity extends Entity {
def isLazyVal: Boolean
def isVar: Boolean
def isImplicit: Boolean
+ def isAbstract: Boolean
def isConstructor: Boolean
def isAliasType: Boolean
def isAbstractType: Boolean
def isTemplate: Boolean
}
-/** A ''documentable'' class, trait or object (that is, a documentation page will be generated for it in the current
- * site). */
+trait HigherKinded extends Entity {
+ def typeParams: List[TypeParam]
+}
+
+/** A ''documentable'' class, trait or object (that is, a documentation page
+ * will be generated for it in the current site). */
trait DocTemplateEntity extends TemplateEntity with MemberEntity {
def toRoot: List[DocTemplateEntity]
def inSource: Option[(io.AbstractFile, Int)]
def sourceUrl: Option[java.net.URL]
- def typeParams: List[TypeParam]
def parentType: Option[TypeEntity]
def parentTemplates: List[TemplateEntity]
def linearization: List[TemplateEntity]
+ def linearizationTypes: List[TypeEntity]
def subClasses: List[DocTemplateEntity]
def members: List[MemberEntity]
def templates: List[DocTemplateEntity]
@@ -93,12 +100,12 @@ trait DocTemplateEntity extends TemplateEntity with MemberEntity {
}
/** A ''documentable'' trait. */
-trait Trait extends DocTemplateEntity {
+trait Trait extends DocTemplateEntity with HigherKinded {
def valueParams : List[List[ValueParam]]
}
/** A ''documentable'' class. */
-trait Class extends Trait {
+trait Class extends Trait with HigherKinded {
def primaryConstructor: Option[Constructor]
def constructors: List[Constructor]
def isCaseClass: Boolean
@@ -107,7 +114,8 @@ trait Class extends Trait {
/** A ''documentable'' object. */
trait Object extends DocTemplateEntity
-/** A package that contains at least one ''documentable'' class, trait, object or package. */
+/** A package that contains at least one ''documentable'' class, trait,
+ * object or package. */
trait Package extends Object {
def inTemplate: Package
def toRoot: List[Package]
@@ -122,8 +130,7 @@ trait NonTemplateMemberEntity extends MemberEntity {
}
/** A method (`def`) of a ''documentable'' class, trait or object. */
-trait Def extends NonTemplateMemberEntity {
- def typeParams: List[TypeParam]
+trait Def extends NonTemplateMemberEntity with HigherKinded {
def valueParams : List[List[ValueParam]]
}
@@ -132,30 +139,28 @@ trait Constructor extends NonTemplateMemberEntity {
def valueParams : List[List[ValueParam]]
}
-/** A value (`val`), lazy val (`lazy val`) or variable (`var`) of a ''documentable'' class, trait or object. */
+/** A value (`val`), lazy val (`lazy val`) or variable (`var`) of a
+ * ''documentable'' class, trait or object. */
trait Val extends NonTemplateMemberEntity
/** An abstract type of a ''documentable'' class, trait or object. */
-trait AbstractType extends NonTemplateMemberEntity {
- // TODO: typeParams
+trait AbstractType extends NonTemplateMemberEntity with HigherKinded {
def lo: Option[TypeEntity]
def hi: Option[TypeEntity]
}
/** An abstract type of a ''documentable'' class, trait or object. */
-trait AliasType extends NonTemplateMemberEntity {
- // TODO: typeParams
+trait AliasType extends NonTemplateMemberEntity with HigherKinded {
def alias: TypeEntity
}
trait ParameterEntity extends Entity {
- def inTemplate: DocTemplateEntity
def isTypeParam: Boolean
def isValueParam: Boolean
}
/** A type parameter to a class or trait or to a method. */
-trait TypeParam extends ParameterEntity {
+trait TypeParam extends ParameterEntity with HigherKinded {
def variance: String
def lo: Option[TypeEntity]
def hi: Option[TypeEntity]
@@ -175,26 +180,26 @@ sealed trait Visibility {
}
/** The visibility of `private[this]` members. */
-case class PrivateInInstance extends Visibility
+case class PrivateInInstance() extends Visibility
/** The visibility of `protected[this]` members. */
-case class ProtectedInInstance extends Visibility {
+case class ProtectedInInstance() extends Visibility {
override def isProtected = true
}
-/** The visibility of `private[owner]` members. An unqualified private members is encoded with `owner` equal to the
- * members's `inTemplate`. */
+/** The visibility of `private[owner]` members. An unqualified private members
+ * is encoded with `owner` equal to the members's `inTemplate`. */
case class PrivateInTemplate(owner: TemplateEntity) extends Visibility
-/** The visibility of `protected[owner]` members. An unqualified protected members is encoded with `owner` equal to the
- * members's `inTemplate`.
- * Note that whilst the member is visible in any template owned by `owner`, it is only visible in subclasses of the
- * member's `inTemplate`. */
+/** The visibility of `protected[owner]` members. An unqualified protected
+ * members is encoded with `owner` equal to the members's `inTemplate`.
+ * Note that whilst the member is visible in any template owned by `owner`,
+ * it is only visible in subclasses of the member's `inTemplate`. */
case class ProtectedInTemplate(owner: TemplateEntity) extends Visibility {
override def isProtected = true
}
/** The visibility of public members. */
-case class Public extends Visibility {
+case class Public() extends Visibility {
override def isPublic = true
}
diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
index e2a25d7ea4..218d3158f9 100644
--- a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
@@ -49,7 +49,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { thisFactory
/** Provides a default implementation for instances of the `Entity` type. */
abstract class EntityImpl(val sym: Symbol, inTpl: => TemplateImpl) extends Entity {
val name = optimize(sym.nameString)
- def inTemplate = inTpl
+ def inTemplate: TemplateImpl = inTpl
def toRoot: List[EntityImpl] = this :: inTpl.toRoot
def qualifiedName = name
}
@@ -57,12 +57,14 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { thisFactory
/** Provides a default implementation for instances of the `WeakTemplateEntity` type. It must be instantiated as a
* `SymbolicEntity` to access the compiler symbol that underlies the entity. */
trait TemplateImpl extends EntityImpl with TemplateEntity {
- override def qualifiedName = if (inTemplate.isRootPackage) name else optimize(inTemplate.qualifiedName + "." + name)
+ override def qualifiedName: String =
+ if (inTemplate.isRootPackage) name else optimize(inTemplate.qualifiedName + "." + name)
def isPackage = sym.isPackage
def isTrait = sym.isTrait
def isClass = sym.isClass && !sym.isTrait
def isObject = sym.isModule && !sym.isPackage
def isRootPackage = false
+ def selfType = if (sym.thisSym eq sym) None else Some(makeType(sym.thisSym.typeOfThis, this))
}
/** Provides a default implementation for instances of the `WeakTemplateEntity` type. It must be instantiated as a
@@ -126,6 +128,9 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { thisFactory
def isConstructor = false
def isAliasType = false
def isAbstractType = false
+ def isAbstract =
+ ((!sym.isTrait && ((sym hasFlag Flags.ABSTRACT) || (sym hasFlag Flags.DEFERRED))) ||
+ sym.isAbstractClass || sym.isAbstractType) && !sym.isSynthetic
def isTemplate = false
}
@@ -137,7 +142,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { thisFactory
* * The owner of the template (as a full template);
* * All ancestors of the template (as weak templates);
* * All non-package members (including other templates, as full templates). */
- abstract class DocTemplateImpl(sym: Symbol, inTpl: => DocTemplateImpl) extends MemberImpl(sym, inTpl) with TemplateImpl with DocTemplateEntity {
+ abstract class DocTemplateImpl(sym: Symbol, inTpl: => DocTemplateImpl) extends MemberImpl(sym, inTpl) with TemplateImpl with HigherKindedImpl with DocTemplateEntity {
//if (inTpl != null) println("mbr " + sym + " in " + (inTpl.toRoot map (_.sym)).mkString(" > "))
templatesCache += (sym -> this)
lazy val definitionName = optimize(inDefinitionTemplates.head.qualifiedName + "." + name)
@@ -155,22 +160,24 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { thisFactory
}
else None
}
- def typeParams = if (sym.isClass) sym.typeParams map (makeTypeParam(_, this)) else Nil
def parentTemplates = sym.info.parents map { x: Type => makeTemplate(x.typeSymbol) }
- def parentType =
+ def parentType = {
if (sym.isPackage) None else
- Some(makeType(RefinedType(sym.tpe.parents filter (_ != ScalaObjectClass.tpe), EmptyScope)))
+ Some(makeType(RefinedType((sym.tpe.parents filter (_ != ScalaObjectClass.tpe)) map { _.asSeenFrom(sym.thisType, sym) }, EmptyScope), inTpl))
+ }
val linearization = {
- sym.info.parents map { prt =>
- makeTemplate(prt.typeSymbol) match {
+ val tpls = sym.ancestors filter { _ != ScalaObjectClass } map { makeTemplate(_) }
+ tpls map {
case dtpl: DocTemplateImpl => dtpl.registerSubClass(this)
case _ =>
- }
}
- sym.ancestors filter (_ != ScalaObjectClass) map { makeTemplate(_) }
+ tpls
+ }
+ def linearizationTypes = {
+ ((sym.info.baseClasses filter (_ != ScalaObjectClass)) map { cls => makeType(sym.info.baseType(cls), this) }).tail
}
private lazy val subClassesCache = mutable.Buffer.empty[DocTemplateEntity]
- def registerSubClass(sc: DocTemplateEntity) = {
+ def registerSubClass(sc: DocTemplateEntity): Unit = {
assert(subClassesCache != null)
subClassesCache += sc
}
@@ -213,21 +220,28 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { thisFactory
}}
}
- abstract class ParameterImpl(sym: Symbol, inTpl: => DocTemplateImpl) extends EntityImpl(sym, inTpl) with ParameterEntity {
+ abstract class ParameterImpl(sym: Symbol, inTpl: => TemplateImpl) extends EntityImpl(sym, inTpl) with ParameterEntity {
override def inTemplate = inTpl
}
private trait TypeBoundsImpl extends EntityImpl {
- def lo = sym.info.normalize match {
- case TypeBounds(lo, hi) if lo.typeSymbol != NothingClass => Some(makeType(lo, inTemplate, sym))
+ def lo = sym.info.bounds match {
+ case TypeBounds(lo, hi) if lo.typeSymbol != NothingClass =>
+ Some(makeType(appliedType(lo, sym.info.typeParams map {_.tpe}), inTemplate))
case _ => None
}
- def hi = sym.info.normalize match {
- case TypeBounds(lo, hi) if hi.typeSymbol != AnyClass => Some(makeType(hi, inTemplate, sym))
+ def hi = sym.info.bounds match {
+ case TypeBounds(lo, hi) if hi.typeSymbol != AnyClass =>
+ Some(makeType(appliedType(hi, sym.info.typeParams map {_.tpe}), inTemplate))
case _ => None
}
}
+ trait HigherKindedImpl extends EntityImpl with HigherKinded {
+ def typeParams =
+ sym.typeParams map (makeTypeParam(_, inTemplate))
+ }
+
/* ============== MAKER METHODS ============== */
/** */
@@ -342,10 +356,8 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { thisFactory
override def isVar = true
})
else if (bSym.isMethod && !bSym.isGetterOrSetter && !bSym.isConstructor && !bSym.isModule)
- Some(new NonTemplateParamMemberImpl(bSym, inTpl) with Def {
+ Some(new NonTemplateParamMemberImpl(bSym, inTpl) with HigherKindedImpl with Def {
override def isDef = true
- def typeParams =
- sym.tpe.typeParams map (makeTypeParam(_, inTpl))
})
else if (bSym.isConstructor)
Some(new NonTemplateParamMemberImpl(bSym, inTpl) with Constructor {
@@ -357,13 +369,13 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { thisFactory
override def isVal = true
})
else if (bSym.isAbstractType)
- Some(new NonTemplateMemberImpl(bSym, inTpl) with TypeBoundsImpl with AbstractType {
+ Some(new NonTemplateMemberImpl(bSym, inTpl) with TypeBoundsImpl with HigherKindedImpl with AbstractType {
override def isAbstractType = true
})
else if (bSym.isAliasType)
- Some(new NonTemplateMemberImpl(bSym, inTpl) with AliasType {
+ Some(new NonTemplateMemberImpl(bSym, inTpl) with HigherKindedImpl with AliasType {
override def isAliasType = true
- def alias = makeType(sym.tpe, inTpl, sym)
+ def alias = makeType(appliedType(sym.tpe, sym.info.typeParams map {_.tpe}).normalize, inTpl, sym)
})
else if (bSym.isPackage)
inTpl match { case inPkg: PackageImpl => makePackage(bSym, inPkg) }
@@ -385,8 +397,8 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { thisFactory
}
/** */
- def makeTypeParam(aSym: Symbol, inTpl: => DocTemplateImpl): TypeParam = {
- new ParameterImpl(aSym, inTpl) with TypeBoundsImpl with TypeParam {
+ def makeTypeParam(aSym: Symbol, inTpl: => TemplateImpl): TypeParam =
+ new ParameterImpl(aSym, inTpl) with TypeBoundsImpl with HigherKindedImpl with TypeParam {
def isTypeParam = true
def isValueParam = false
def variance: String = {
@@ -395,7 +407,6 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { thisFactory
else ""
}
}
- }
/** */
def makeValueParam(aSym: Symbol, inTpl: => DocTemplateImpl): ValueParam = {
@@ -426,14 +437,14 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { thisFactory
}
/** */
- def makeType(aType: Type, seeInTpl: => TemplateImpl, dclSym: Symbol): TypeEntity = {
+ def makeType(aType: Type, inTpl: => TemplateImpl, dclSym: Symbol): TypeEntity = {
def ownerTpl(sym: Symbol): Symbol =
if (sym.isClass || sym.isModule || sym == NoSymbol) sym else ownerTpl(sym.owner)
- makeType(aType.asSeenFrom(seeInTpl.sym.thisType, ownerTpl(dclSym)))
+ makeType(aType.asSeenFrom(inTpl.sym.thisType, ownerTpl(dclSym)), inTpl)
}
/** */
- def makeType(aType: Type): TypeEntity =
+ def makeType(aType: Type, inTpl: => TemplateImpl): TypeEntity =
new TypeEntity {
private val nameBuffer = new StringBuilder
private var refBuffer = new immutable.TreeMap[Int, (TemplateEntity, Int)]
@@ -446,9 +457,14 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { thisFactory
nameBuffer append sep
appendTypes0(tps, sep)
}
- private def appendType0(tpe: Type): Unit = tpe.normalize match {
+ private def checkFunctionType(tpe: TypeRef): Boolean = {
+ val TypeRef(_, sym, args) = tpe
+ (args.length > 0) && (args.length - 1 <= definitions.MaxFunctionArity) &&
+ (sym == definitions.FunctionClass(args.length - 1))
+ }
+ private def appendType0(tpe: Type): Unit = tpe match {
/* Type refs */
- case tp: TypeRef if (definitions.isFunctionType(tp)) =>
+ case tp: TypeRef if (checkFunctionType(tp)) =>
nameBuffer append '('
appendTypes0(tp.args.init, ", ")
nameBuffer append ") ⇒ "
@@ -486,11 +502,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { thisFactory
}
/* Polymorphic types */
case PolyType(tparams, result) if (!tparams.isEmpty) =>
- appendType0(result)
- nameBuffer append '['
- appendTypes0(tparams map (_.tpe), ", ") // TODO: actually print the polytype's symbols (not just types)
- nameBuffer append ']'
- /* Eval-by-name types */
+ throw new Error("Polymorphic type '" + tpe + "' cannot be printed as a type")
case PolyType(tparams, result) if (tparams.isEmpty) =>
nameBuffer append '⇒'
appendType0(result)
diff --git a/src/compiler/scala/tools/nsc/doc/model/comment/Comment.scala b/src/compiler/scala/tools/nsc/doc/model/comment/Comment.scala
index 7fe2e58991..2a463959e5 100644
--- a/src/compiler/scala/tools/nsc/doc/model/comment/Comment.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/comment/Comment.scala
@@ -69,4 +69,3 @@ abstract class Comment {
(version map ("@version " + _.toString)).mkString
}
-
diff --git a/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala b/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala
index 475fbf584e..c7cf146894 100644
--- a/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala
@@ -71,14 +71,13 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
case "/h3" => " ===\n"
case "h4" | "h5" | "h6" => "\n==== "
case "/h4" | "/h5" | "/h6" => " ====\n"
- case "code" | "/code" => "`"
case "li" => "\n * - "
case _ => ""
}
/** Safe HTML tags that can be kept. */
protected val SafeTags =
- new Regex("""(</?(abbr|acronym|address|area|a|bdo|big|blockquote|br|button|b|caption|code|cite|col|colgroup|dd|del|dfn|em|fieldset|form|hr|img|input|ins|i|kbd|label|legend|link|map|object|optgroup|option|param|pre|q|samp|select|small|span|strong|sub|sup|table|tbody|td|textarea|tfoot|th|thead|tr|tt|var)( [^>]*)?/?>)""")
+ new Regex("""((<code( [^>]*)?>.*</code>)|(</?(abbr|acronym|address|area|a|bdo|big|blockquote|br|button|b|caption|cite|col|colgroup|dd|del|dfn|em|fieldset|form|hr|img|input|ins|i|kbd|label|legend|link|map|object|optgroup|option|param|pre|q|samp|select|small|span|strong|sub|sup|table|tbody|td|textarea|tfoot|th|thead|tr|tt|var)( [^>]*)?/?>))""")
protected val safeTagMarker = '\u000E'
diff --git a/src/compiler/scala/tools/nsc/interpreter/Completion.scala b/src/compiler/scala/tools/nsc/interpreter/Completion.scala
index c163960f86..22a95a4bf8 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Completion.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/Completion.scala
@@ -100,10 +100,12 @@ class Completion(val repl: Interpreter) extends CompletionOutput {
def imported(tp: Type) = new ImportCompletion(tp)
}
- class TypeMemberCompletion(val tp: Type) extends CompletionAware with CompilerCompletion {
+ class TypeMemberCompletion(val tp: Type) extends CompletionAware
+ with CompilerCompletion {
def excludeEndsWith: List[String] = Nil
def excludeStartsWith: List[String] = List("<") // <byname>, <repeated>, etc.
- def excludeNames: List[String] = anyref.methodNames -- anyRefMethodsToShow ++ List("_root_")
+ def excludeNames: List[String] =
+ anyref.methodNames.filterNot(anyRefMethodsToShow contains) ++ List("_root_")
def methodSignatureString(sym: Symbol) = {
def asString = new MethodSymbolOutput(sym).methodString()
@@ -298,7 +300,8 @@ class Completion(val repl: Interpreter) extends CompletionOutput {
private var lastCursor: Int = -1
// Does this represent two consecutive tabs?
- def isConsecutiveTabs(buf: String, cursor: Int) = cursor == lastCursor && buf == lastBuf
+ def isConsecutiveTabs(buf: String, cursor: Int) =
+ cursor == lastCursor && buf == lastBuf
// Longest common prefix
def commonPrefix(xs: List[String]) =
diff --git a/src/compiler/scala/tools/nsc/io/AbstractFile.scala b/src/compiler/scala/tools/nsc/io/AbstractFile.scala
index 5d4f7b8464..4d86dbf114 100644
--- a/src/compiler/scala/tools/nsc/io/AbstractFile.scala
+++ b/src/compiler/scala/tools/nsc/io/AbstractFile.scala
@@ -17,8 +17,7 @@ import scala.collection.mutable.ArrayBuffer
* @author Philippe Altherr
* @version 1.0, 23/03/2004
*/
-object AbstractFile
-{
+object AbstractFile {
/** Returns "getFile(new File(path))". */
def getFile(path: String): AbstractFile = getFile(Path(path))
def getFile(path: Path): AbstractFile = getFile(path.toFile)
diff --git a/src/compiler/scala/tools/nsc/io/NullPrintStream.scala b/src/compiler/scala/tools/nsc/io/NullPrintStream.scala
new file mode 100644
index 0000000000..9340796a83
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/io/NullPrintStream.scala
@@ -0,0 +1,19 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package io
+
+import java.io.{ PrintStream, ByteArrayOutputStream }
+
+/** A sink for when you want to discard all output.
+ */
+class NullPrintStream extends PrintStream(new ByteArrayOutputStream()) { }
+
+object NullPrintStream extends NullPrintStream {
+ def setOut() = Console setOut this
+ def setErr() = Console setErr this
+ def setOutAndErr() = { setOut() ; setErr() }
+}
diff --git a/src/compiler/scala/tools/nsc/io/Path.scala b/src/compiler/scala/tools/nsc/io/Path.scala
index afef38be3c..4373cc64a5 100644
--- a/src/compiler/scala/tools/nsc/io/Path.scala
+++ b/src/compiler/scala/tools/nsc/io/Path.scala
@@ -26,15 +26,14 @@ import scala.util.Random.alphanumeric
* @since 2.8
*/
-object Path
-{
+object Path {
private val ZipMagicNumber = List[Byte](80, 75, 3, 4)
+ private def magicNumberIsZip(f: Path) = f.isFile && (f.toFile.bytes().take(4).toList == ZipMagicNumber)
/** If examineFile is true, it will look at the first four bytes of the file
* and see if the magic number indicates it may be a jar or zip.
*/
- private def magicNumberIsZip(f: Path) = f.isFile && (f.toFile.bytes().take(4).toList == ZipMagicNumber)
- def isJarOrZip(f: Path): Boolean = isJarOrZip(f, false)
+ def isJarOrZip(f: Path): Boolean = isJarOrZip(f, true)
def isJarOrZip(f: Path, examineFile: Boolean): Boolean =
f.hasExtension("zip", "jar") || (examineFile && magicNumberIsZip(f))
diff --git a/src/compiler/scala/tools/nsc/io/ZipArchive.scala b/src/compiler/scala/tools/nsc/io/ZipArchive.scala
index 4be11fc9a8..e65e0040c0 100644
--- a/src/compiler/scala/tools/nsc/io/ZipArchive.scala
+++ b/src/compiler/scala/tools/nsc/io/ZipArchive.scala
@@ -15,13 +15,13 @@ import PartialFunction._
import scala.collection.mutable.{ Map, HashMap }
import scala.collection.JavaConversions.asIterator
+import annotation.tailrec
/**
* @author Philippe Altherr
* @version 1.0, 23/03/2004
*/
-object ZipArchive
-{
+object ZipArchive {
def fromPath(path: Path): ZipArchive = fromFile(path.toFile)
/**
@@ -48,15 +48,24 @@ object ZipArchive
def fromURL(url: URL): AbstractFile = new URLZipArchive(url)
private[io] class ZipEntryTraversableClass(in: InputStream) extends Traversable[ZipEntry] {
- val zis = new ZipInputStream(in)
+ val zis = () => new ZipInputStream(in)
def foreach[U](f: ZipEntry => U) = {
- def loop(x: ZipEntry): Unit = if (x != null) {
- f(x)
- zis.closeEntry()
- loop(zis.getNextEntry())
+ var in: ZipInputStream = null
+ @tailrec def loop(): Unit = {
+ val entry = in.getNextEntry()
+ if (entry != null) {
+ f(entry)
+ in.closeEntry()
+ loop()
+ }
+ }
+
+ try {
+ in = zis()
+ loop()
}
- loop(zis.getNextEntry())
+ finally in.close()
}
}
}
@@ -70,7 +79,7 @@ private[io] trait ZipContainer extends AbstractFile
/** Abstract types */
type SourceType // InputStream or AbstractFile
type CreationType // InputStream or ZipFile
- type ZipTrav = Traversable[ZipEntry] { def zis: ZipInputStream }
+ type ZipTrav = Traversable[ZipEntry] { def zis: () => ZipInputStream }
/** Abstract values */
protected val creationSource: CreationType
@@ -185,8 +194,7 @@ private[io] trait ZipContainer extends AbstractFile
* @author Philippe Altherr
* @version 1.0, 23/03/2004
*/
-final class ZipArchive(file: File, val archive: ZipFile) extends PlainFile(file) with ZipContainer
-{
+final class ZipArchive(file: File, val archive: ZipFile) extends PlainFile(file) with ZipContainer {
self =>
type SourceType = AbstractFile
@@ -236,7 +244,7 @@ final class ZipArchive(file: File, val archive: ZipFile) extends PlainFile(file)
private def zipTraversableFromZipFile(z: ZipFile): ZipTrav =
new Iterable[ZipEntry] {
- def zis: ZipInputStream = null // not valid for this type
+ def zis: () => ZipInputStream = null // not valid for this type
def iterator = asIterator(z.entries())
}
}
@@ -254,7 +262,7 @@ final class URLZipArchive(url: URL) extends AbstractFile with ZipContainer
type CreationType = InputStream
protected lazy val creationSource = input
- protected lazy val root = new ZipRootCreator(x => byteInputStream(x.traverser.zis))()
+ protected lazy val root = new ZipRootCreator(x => byteInputStream(x.traverser.zis()))()
protected def DirEntryConstructor = (_, name, path) => new DirEntry(name, path)
protected def FileEntryConstructor = new FileEntry(_, _, _, _)
diff --git a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
index 77997c4565..f9e7a1bdcf 100644
--- a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
+++ b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
@@ -886,6 +886,7 @@ trait ParallelMatching extends ast.TreeDSL
}
case _: SingletonType if useEqTest =>
val eqTest = REF(tpe.termSymbol) MEMBER_== scrutTree
+
// See ticket #1503 for the motivation behind checking for a binding.
// The upshot is that it is unsound to assume equality means the right
// type, but if the value doesn't appear on the right hand side of the
diff --git a/src/compiler/scala/tools/nsc/plugins/Plugin.scala b/src/compiler/scala/tools/nsc/plugins/Plugin.scala
index 1b7e208334..49bbb12a9a 100644
--- a/src/compiler/scala/tools/nsc/plugins/Plugin.scala
+++ b/src/compiler/scala/tools/nsc/plugins/Plugin.scala
@@ -133,7 +133,7 @@ object Plugin {
val alljars = (jars ::: (for {
dir <- dirs if dir.isDirectory
entry <- dir.toDirectory.files.toList sortBy (_.name)
- if entry.extension == "jar"
+ if Path.isJarOrZip(entry)
pdesc <- loadDescription(entry)
if !(ignoring contains pdesc.name)
} yield entry)).distinct
diff --git a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala
index 12ae5c9d0e..dcefd8bb16 100644
--- a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala
@@ -24,7 +24,7 @@ trait StandardScalaSettings {
val extdirs = PathSetting ("-extdirs", "dirs", "Override location of installed extensions", Defaults.scalaExtDirs)
val javabootclasspath = PathSetting ("-javabootclasspath", "path", "Override java boot classpath.", Defaults.javaBootClassPath)
val javaextdirs = PathSetting ("-javaextdirs", "path", "Override java extdirs classpath.", Defaults.javaExtDirs)
- val sourcepath = StringSetting ("-sourcepath", "path", "Specify where to find input source files", "")
+ val sourcepath = PathSetting ("-sourcepath", "path", "Specify where to find input source files", "") // Defaults.scalaSourcePath
/** Other settings.
*/
diff --git a/src/compiler/scala/tools/nsc/symtab/Definitions.scala b/src/compiler/scala/tools/nsc/symtab/Definitions.scala
index 4f8386f335..1684b5f071 100644
--- a/src/compiler/scala/tools/nsc/symtab/Definitions.scala
+++ b/src/compiler/scala/tools/nsc/symtab/Definitions.scala
@@ -224,6 +224,7 @@ trait Definitions extends reflect.generic.StandardDefinitions {
lazy val SoftReferenceClass = getClass("java.lang.ref.SoftReference")
lazy val WeakReferenceClass = getClass("java.lang.ref.WeakReference")
lazy val MethodClass = getClass(sn.MethodAsObject)
+ def methodClass_setAccessible = getMember(MethodClass, nme.setAccessible)
lazy val EmptyMethodCacheClass = getClass("scala.runtime.EmptyMethodCache")
lazy val MethodCacheClass = getClass("scala.runtime.MethodCache")
def methodCache_find = getMember(MethodCacheClass, nme.find_)
diff --git a/src/compiler/scala/tools/nsc/symtab/StdNames.scala b/src/compiler/scala/tools/nsc/symtab/StdNames.scala
index ed72fc16fa..8c4078e91e 100644
--- a/src/compiler/scala/tools/nsc/symtab/StdNames.scala
+++ b/src/compiler/scala/tools/nsc/symtab/StdNames.scala
@@ -335,6 +335,7 @@ trait StdNames extends reflect.generic.StdNames { self: SymbolTable =>
val sameElements = newTermName("sameElements")
val scala_ = newTermName("scala")
val self = newTermName("self")
+ val setAccessible = newTermName("setAccessible")
val synchronized_ = newTermName("synchronized")
val tail = newTermName("tail")
val toArray = newTermName("toArray")
diff --git a/src/compiler/scala/tools/nsc/symtab/Symbols.scala b/src/compiler/scala/tools/nsc/symtab/Symbols.scala
index 3157e5cc20..e386508bdd 100644
--- a/src/compiler/scala/tools/nsc/symtab/Symbols.scala
+++ b/src/compiler/scala/tools/nsc/symtab/Symbols.scala
@@ -776,6 +776,7 @@ trait Symbols extends reflect.generic.Symbols { self: SymbolTable =>
assert(phaseId(infos.validFrom) <= phase.id)
if (phaseId(infos.validFrom) == phase.id) infos = infos.prev
infos = TypeHistory(currentPeriod, info, infos)
+ validTo = if (info.isComplete) currentPeriod else NoPeriod
this
}
@@ -1961,7 +1962,7 @@ trait Symbols extends reflect.generic.Symbols { self: SymbolTable =>
newTypeName(rawname+"$trait") // (part of DEVIRTUALIZE)
} else if (phase.flatClasses && rawowner != NoSymbol && !rawowner.isPackageClass) {
if (flatname == nme.EMPTY) {
- assert(rawowner.isClass, "fatal: %s has owner %s, but a class owner is required".format(rawname, rawowner))
+ assert(rawowner.isClass, "fatal: %s has owner %s, but a class owner is required".format(rawname+idString, rawowner))
flatname = newTypeName(compactify(rawowner.name.toString() + "$" + rawname))
}
flatname
diff --git a/src/compiler/scala/tools/nsc/symtab/Types.scala b/src/compiler/scala/tools/nsc/symtab/Types.scala
index 2eca101aab..9e10d3a408 100644
--- a/src/compiler/scala/tools/nsc/symtab/Types.scala
+++ b/src/compiler/scala/tools/nsc/symtab/Types.scala
@@ -394,9 +394,10 @@ trait Types extends reflect.generic.Types { self: SymbolTable =>
/** Replace formal type parameter symbols with actual type arguments.
*
- * Amounts to substitution except for higher-kinded types. (See overridden method in TypeRef) -- @M (contact adriaan.moors at cs.kuleuven.be)
+ * Amounts to substitution except for higher-kinded types. (See overridden method in TypeRef) -- @M
*/
- def instantiateTypeParams(formals: List[Symbol], actuals: List[Type]): Type = this.subst(formals, actuals)
+ def instantiateTypeParams(formals: List[Symbol], actuals: List[Type]): Type =
+ if(formals.length == actuals.length) this.subst(formals, actuals) else ErrorType
/** If this type is an existential, turn all existentially bound variables to type skolems.
* @param owner The owner of the created type skolems
@@ -1327,7 +1328,7 @@ trait Types extends reflect.generic.Types { self: SymbolTable =>
* to take the intersection of their bounds
*/
override def normalize = {
- if (isHigherKinded)
+ if (isHigherKinded) {
PolyType(
typeParams,
RefinedType(
@@ -1337,6 +1338,7 @@ trait Types extends reflect.generic.Types { self: SymbolTable =>
},
decls,
typeSymbol))
+ }
else super.normalize
}
@@ -1705,8 +1707,9 @@ A type's typeSymbol should never be inspected directly.
if (substTps.length == typeParams.length)
typeRef(pre, sym, actuals)
- else // partial application (needed in infer when bunching type arguments from classes and methods together)
+ else if(formals.length == actuals.length) // partial application (needed in infer when bunching type arguments from classes and methods together)
typeRef(pre, sym, dummyArgs).subst(formals, actuals)
+ else ErrorType
}
else
super.instantiateTypeParams(formals, actuals)
@@ -1725,21 +1728,15 @@ A type's typeSymbol should never be inspected directly.
if (sym == clazz && !args.isEmpty) args.head else this
def normalize0: Type =
- if (sym.isAliasType) { // beta-reduce
- if (sym.info.typeParams.length == args.length || !isHigherKinded) {
- /* !isHigherKinded && sym.info.typeParams.length != args.length only happens when compiling e.g.,
- `val x: Class' with -Xgenerics, while `type Class = java.lang.Class' had already been compiled without -Xgenerics */
- val xform = transform(sym.info.resultType)
- assert(xform ne this, this)
- xform.normalize // cycles have been checked in typeRef
- } else { // should rarely happen, if at all
- PolyType(sym.info.typeParams, transform(sym.info.resultType).normalize) // eta-expand -- for regularity, go through sym.info for typeParams
- // @M TODO: should not use PolyType, as that's the type of a polymorphic value -- we really want a type *function*
- }
- } else if (isHigherKinded) {
+ if (isHigherKinded) {
// @M TODO: should not use PolyType, as that's the type of a polymorphic value -- we really want a type *function*
// @M: initialize (by sym.info call) needed (see test/files/pos/ticket0137.scala)
PolyType(sym.info.typeParams, typeRef(pre, sym, dummyArgs)) // must go through sym.info for typeParams
+ } else if (sym.isAliasType) { // beta-reduce
+ if(sym.info.typeParams.length == args.length) // don't do partial application
+ transform(sym.info.resultType).normalize // cycles have been checked in typeRef
+ else
+ ErrorType
} else if (sym.isRefinementClass) {
sym.info.normalize // @MO to AM: OK?
//@M I think this is okay, but changeset 12414 (which fixed #1241) re-introduced another bug (#2208)
@@ -5086,37 +5083,41 @@ A type's typeSymbol should never be inspected directly.
case List(tp) =>
Some(tp)
case TypeRef(_, sym, _) :: rest =>
- val pres = tps map (_.prefix)
+ val pres = tps map (_.prefix) // prefix normalizes automatically
val pre = if (variance == 1) lub(pres, depth) else glb(pres, depth)
- val argss = tps map (_.typeArgs)
+ val argss = tps map (_.normalize.typeArgs) // symbol equality (of the tp in tps) was checked using typeSymbol, which normalizes, so should normalize before retrieving arguments
val capturedParams = new ListBuffer[Symbol]
- val args = (sym.typeParams, argss.transpose).zipped map {
- (tparam, as) =>
- if (depth == 0)
- if (tparam.variance == variance) AnyClass.tpe
- else if (tparam.variance == -variance) NothingClass.tpe
- else NoType
- else
- if (tparam.variance == variance) lub(as, decr(depth))
- else if (tparam.variance == -variance) glb(as, decr(depth))
- else {
- val l = lub(as, decr(depth))
- val g = glb(as, decr(depth))
- if (l <:< g) l
- else { // Martin: I removed this, because incomplete. Not sure there is a good way to fix it. For the moment we
- // just err on the conservative side, i.e. with a bound that is too high.
- // if(!(tparam.info.bounds contains tparam)){ //@M can't deal with f-bounds, see #2251
- val qvar = commonOwner(as) freshExistential "" setInfo TypeBounds(g, l)
- capturedParams += qvar
- qvar.tpe
- }
- }
- }
try {
+ val args = (sym.typeParams, argss.transpose).zipped map {
+ (tparam, as) =>
+ if (depth == 0)
+ if (tparam.variance == variance) AnyClass.tpe
+ else if (tparam.variance == -variance) NothingClass.tpe
+ else NoType
+ else
+ if (tparam.variance == variance) lub(as, decr(depth))
+ else if (tparam.variance == -variance) glb(as, decr(depth))
+ else {
+ val l = lub(as, decr(depth))
+ val g = glb(as, decr(depth))
+ if (l <:< g) l
+ else { // Martin: I removed this, because incomplete. Not sure there is a good way to fix it. For the moment we
+ // just err on the conservative side, i.e. with a bound that is too high.
+ // if(!(tparam.info.bounds contains tparam)){ //@M can't deal with f-bounds, see #2251
+ val qvar = commonOwner(as) freshExistential "" setInfo TypeBounds(g, l)
+ capturedParams += qvar
+ qvar.tpe
+ }
+ }
+ }
if (args contains NoType) None
else Some(existentialAbstraction(capturedParams.toList, typeRef(pre, sym, args)))
} catch {
case ex: MalformedType => None
+ case ex: IndexOutOfBoundsException => // transpose freaked out because of irregular argss
+ // catching just in case (shouldn't happen, but also doesn't cost us)
+ if (settings.debug.value) log("transposed irregular matrix!?"+ (tps, argss))
+ None
}
case SingleType(_, sym) :: rest =>
val pres = tps map (_.prefix)
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index 9c382439bc..b7110b66df 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -386,7 +386,7 @@ abstract class ClassfileParser {
val start = starts(index)
if (in.buf(start).toInt != CONSTANT_UTF8) errorBadTag(start)
val len = in.getChar(start + 1)
- bytesBuffer ++= (in.buf, start + 3, len)
+ bytesBuffer ++= in.buf.view(start + 3, len)
}
val bytes = bytesBuffer.toArray
val decodedLength = reflect.generic.ByteCodecs.decode(bytes)
diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
index c3c60253b9..9b569fa45e 100644
--- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala
+++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
@@ -8,7 +8,7 @@ package transform
import symtab._
import Flags._
-import scala.collection.mutable.{ListBuffer, HashMap}
+import scala.collection._
abstract class CleanUp extends Transform with ast.TreeDSL {
import global._
@@ -22,11 +22,12 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
new CleanUpTransformer(unit)
class CleanUpTransformer(unit: CompilationUnit) extends Transformer {
- private val newDefs = new ListBuffer[Tree]
- private val newInits = new ListBuffer[Tree]
+ private val newStaticMembers = mutable.Buffer.empty[Tree]
+ private val newStaticInits = mutable.Buffer.empty[Tree]
+ private val symbolsStoredAsStatic = mutable.Map.empty[String, Symbol]
- private val classConstantMeth = new HashMap[String, Symbol]
- private val symbolStaticFields = new HashMap[String, (Symbol, Tree, Tree)]
+ //private val classConstantMeth = new HashMap[String, Symbol]
+ //private val symbolStaticFields = new HashMap[String, (Symbol, Tree, Tree)]
private var localTyper: analyzer.Typer = null
@@ -116,10 +117,10 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
currentClass.info.decls enter varSym
val varDef = typedPos( VAL(varSym) === forInit )
- newDefs append transform(varDef)
+ newStaticMembers append transform(varDef)
val varInit = typedPos( REF(varSym) === forInit )
- newInits append transform(varInit)
+ newStaticInits append transform(varInit)
varSym
}
@@ -133,7 +134,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
currentClass.info.decls enter methSym
val methDef = typedPos( DefDef(methSym, { forBody(Pair(methSym, methSym.paramss(0))) }) )
- newDefs append transform(methDef)
+ newStaticMembers append transform(methDef)
methSym
}
@@ -215,13 +216,12 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
case POLY_CACHE =>
/* Implementation of the cache is as follows for method "def xyz(a: A, b: B)"
- (but with the addition of a SoftReference wrapped around the MethodCache holder
- so that it does not interfere with classloader garbage collection, see ticket
+ (SoftReference so that it does not interfere with classloader garbage collection, see ticket
#2365 for details):
var reflParams$Cache: Array[Class[_]] = Array[JClass](classOf[A], classOf[B])
- var reflPoly$Cache: scala.runtime.MethodCache = new EmptyMethodCache()
+ var reflPoly$Cache: SoftReference[scala.runtime.MethodCache] = new SoftReference(new EmptyMethodCache())
def reflMethod$Method(forReceiver: JClass[_]): JMethod = {
var method: JMethod = reflPoly$Cache.find(forReceiver)
@@ -229,7 +229,8 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
return method
else {
method = forReceiver.getMethod("xyz", reflParams$Cache)
- reflPoly$Cache = reflPoly$Cache.add(forReceiver, method)
+ method.setAccessible(true) // issue #2381
+ reflPoly$Cache = new SoftReference(reflPoly$Cache.get.add(forReceiver, method))
return method
}
}
@@ -257,6 +258,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
def cacheRHS = ((getPolyCache DOT methodCache_add)(REF(forReceiverSym), REF(methodSym)))
BLOCK(
REF(methodSym) === methodSymRHS,
+ (REF(methodSym) DOT methodClass_setAccessible)(LIT(true)),
REF(reflPolyCacheSym) === gen.mkSoftRef(cacheRHS),
Return(REF(methodSym))
)
@@ -516,39 +518,25 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
/* Some cleanup transformations add members to templates (classes, traits, etc).
* When inside a template (i.e. the body of one of its members), two maps
- * (newDefs and newInits) are available in the tree transformer. Any mapping from
- * a symbol to a MemberDef (DefDef, ValDef, etc.) that is in newDefs once the
+ * (newStaticMembers and newStaticInits) are available in the tree transformer. Any mapping from
+ * a symbol to a MemberDef (DefDef, ValDef, etc.) that is in newStaticMembers once the
* transformation of the template is finished will be added as a member to the
- * template. Any mapping from a symbol to a tree that is in newInits, will be added
+ * template. Any mapping from a symbol to a tree that is in newStaticInits, will be added
* as a statement of the form "symbol = tree" to the beginning of the default
* constructor. */
case Template(parents, self, body) =>
localTyper = typer.atOwner(tree, currentClass)
- val transformedTemplate = if (!forMSIL) {
- classConstantMeth.clear
- newDefs.clear
- newInits.clear
- var newBody =
- transformTrees(body)
- val firstConstructor =
- treeInfo.firstConstructor(newBody)
- newBody =
- transformTrees(newDefs.toList) ::: (
- for (member <- newBody) yield member match {
- case thePrimaryConstructor@DefDef(mods, name, tparams, vparamss, tpt, rhs) if (thePrimaryConstructor == firstConstructor) =>
- val newRhs = rhs match {
- case theRhs@Block(stats, expr) =>
- treeCopy.Block(theRhs, transformTrees(newInits.toList) ::: stats, expr)
- }
- treeCopy.DefDef(thePrimaryConstructor, mods, name, tparams, vparamss, tpt, newRhs)
- case notThePrimaryConstructor =>
- notThePrimaryConstructor
- }
- )
- treeCopy.Template(tree, parents, self, newBody)
- }
- else super.transform(tree)
- applySymbolFieldInitsToStaticCtor(transformedTemplate.asInstanceOf[Template]) // postprocess to include static ctors
+ newStaticMembers.clear
+ newStaticInits.clear
+ symbolsStoredAsStatic.clear
+ val transformedTemplate: Template =
+ if (!forMSIL) {
+ var newBody =
+ transformTrees(body)
+ treeCopy.Template(tree, parents, self, transformTrees(newStaticMembers.toList) ::: newBody)
+ }
+ else super.transform(tree).asInstanceOf[Template]
+ addStaticInits(transformedTemplate) // postprocess to include static ctors
case Literal(c) if (c.tag == ClassTag) && !forMSIL=>
val tpe = c.typeValue
@@ -628,7 +616,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
List(Literal(Constant(symname: String)))) =>
// add the symbol name to a map if it's not there already
val rhs = gen.mkCast(Apply(gen.scalaDot(nme.Symbol), List(Literal(Constant(symname)))), symbolType)
- val (staticFieldSym, sfdef, sfinit) = getSymbolStaticField(symapp.pos, symname, rhs, symapp)
+ val staticFieldSym = getSymbolStaticField(symapp.pos, symname, rhs, symapp)
// create a reference to a static field
val ntree = typedWithPos(symapp.pos)(REF(staticFieldSym))
@@ -642,8 +630,8 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
* If it doesn't exist, i.e. the symbol is encountered the first time,
* it creates a new static field definition and initialization and returns it.
*/
- private def getSymbolStaticField(pos: Position, symname: String, rhs: Tree, tree: Tree): (Symbol, Tree, Tree) =
- symbolStaticFields.getOrElseUpdate(symname, {
+ private def getSymbolStaticField(pos: Position, symname: String, rhs: Tree, tree: Tree): Symbol =
+ symbolsStoredAsStatic.getOrElseUpdate(symname, {
val freshname = unit.fresh.newName(pos, "symbol$")
val theTyper = typer.atOwner(tree, currentClass)
@@ -658,20 +646,14 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
val stfieldInit = theTyper.typed { atPos(pos)(REF(stfieldSym) === rhs) }
// add field definition to new defs
- newDefs append stfieldDef
+ newStaticMembers append stfieldDef
+ newStaticInits append stfieldInit
- (stfieldSym, stfieldDef, stfieldInit)
+ stfieldSym
})
- /* returns a list of all trees for symbol static fields, and clear the list */
- private def flushSymbolFieldsInitializations: List[Tree] = {
- val fields = (symbolStaticFields.valuesIterator map (_._3)).toList
- symbolStaticFields.clear
- fields
- }
-
/* finds the static ctor DefDef tree within the template if it exists. */
- def findStaticCtor(template: Template): Option[Tree] =
+ private def findStaticCtor(template: Template): Option[Tree] =
template.body find {
case defdef @ DefDef(mods, nme.CONSTRUCTOR, tparam, vparam, tp, rhs) => defdef.symbol hasFlag STATIC
case _ => false
@@ -680,11 +662,10 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
/* changes the template for the class so that it contains a static constructor with symbol fields inits,
* augments an existing static ctor if one already existed.
*/
- def applySymbolFieldInitsToStaticCtor(template: Template): Template = {
- val symbolInitTrees = flushSymbolFieldsInitializations
- if (symbolInitTrees.isEmpty) template
+ private def addStaticInits(template: Template): Template =
+ if (newStaticInits.isEmpty)
+ template
else {
- val theTyper = typer.atOwner(template, currentClass)
val newCtor = findStaticCtor(template) match {
// in case there already were static ctors - augment existing ones
// currently, however, static ctors aren't being generated anywhere else
@@ -693,10 +674,10 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
val newBlock = rhs match {
case block @ Block(stats, expr) =>
// need to add inits to existing block
- treeCopy.Block(block, symbolInitTrees ::: stats, expr)
+ treeCopy.Block(block, newStaticInits.toList ::: stats, expr)
case term: TermTree =>
// need to create a new block with inits and the old term
- treeCopy.Block(term, symbolInitTrees, term)
+ treeCopy.Block(term, newStaticInits.toList, term)
}
treeCopy.DefDef(ctor, mods, name, tparams, vparamss, tpt, newBlock)
case None =>
@@ -704,13 +685,13 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
val staticCtorSym = currentClass.newConstructor(template.pos)
.setFlag(STATIC)
.setInfo(UnitClass.tpe)
- val rhs = Block(symbolInitTrees, Literal(()))
+ val rhs = Block(newStaticInits.toList, Literal(()))
val staticCtorTree = DefDef(staticCtorSym, rhs)
- theTyper.typed { atPos(template.pos)(staticCtorTree) }
+ localTyper.typed { atPos(template.pos)(staticCtorTree) }
}
treeCopy.Template(template, template.parents, template.self, newCtor :: template.body)
}
- }
+
} // CleanUpTransformer
}
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index 4c000ce3f7..8c2f79374c 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -228,7 +228,7 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
/** The Java signature of type 'info', for symbol sym. The symbol is used to give the right return
* type for constructors.
*/
- def javaSig(sym: Symbol, info: Type): Option[String] = atPhase(currentRun.erasurePhase) {
+ def javaSig(sym0: Symbol, info: Type): Option[String] = atPhase(currentRun.erasurePhase) {
def jsig(tp: Type): String = jsig2(false, List(), tp)
@@ -260,7 +260,11 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
"."+sym.name
if (sym == ArrayClass)
ARRAY_TAG.toString+(args map jsig).mkString
- else if (sym.isTypeParameterOrSkolem && !sym.owner.isTypeParameterOrSkolem /*not a higher-order type parameter, as these are suppressed*/)
+ else if (sym.isTypeParameterOrSkolem &&
+ // only refer to type params that will actually make it into the sig, this excludes:
+ !sym.owner.isTypeParameterOrSkolem && // higher-order type parameters (!sym.owner.isTypeParameterOrSkolem), and parameters of methods
+ (!sym0.isClass || sym.owner.isClass) // if we're generating the sig for a class, type params must be owned by a class (not a method -- #3249)
+ )
TVAR_TAG.toString+sym.name+";"
else if (sym == AnyClass || sym == AnyValClass || sym == SingletonClass)
jsig(ObjectClass.tpe)
@@ -302,7 +306,7 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
(if (toplevel) "<"+(tparams map paramSig).mkString+">" else "")+jsig(restpe)
case MethodType(params, restpe) =>
"("+(params map (_.tpe) map jsig).mkString+")"+
- (if (restpe.typeSymbol == UnitClass || sym.isConstructor) VOID_TAG.toString else jsig(restpe))
+ (if (restpe.typeSymbol == UnitClass || sym0.isConstructor) VOID_TAG.toString else jsig(restpe))
case RefinedType(parents, decls) if (!parents.isEmpty) =>
jsig(parents.head)
case ClassInfoType(parents, _, _) =>
@@ -310,7 +314,7 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
case AnnotatedType(_, atp, _) =>
jsig(atp)
case BoundedWildcardType(bounds) =>
- println("something's wrong: "+sym+":"+sym.tpe+" has a bounded wildcard type")
+ println("something's wrong: "+sym0+":"+sym0.tpe+" has a bounded wildcard type")
jsig(bounds.hi)
case _ =>
val etp = erasure(tp)
@@ -320,7 +324,7 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
}
if (needsJavaSig(info)) {
try {
- //println("Java sig of "+sym+" is "+jsig2(true, List(), sym.info))//DEBUG
+ //println("Java sig of "+sym0+" is "+jsig2(true, List(), sym0.info))//DEBUG
Some(jsig2(true, List(), info))
} catch {
case ex: UnknownSig => None
@@ -495,6 +499,7 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
}
/** Generate a synthetic cast operation from <code>tree.tpe</code> to <code>pt</code>.
+ * @pre pt eq pt.normalize
*/
private def cast(tree: Tree, pt: Type): Tree =
tree AS_ATTR pt
@@ -505,7 +510,7 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
/** Adapt <code>tree</code> to expected type <code>pt</code>.
*
* @param tree the given tree
- * @param pt the expected type.
+ * @param pt the expected type
* @return the adapted tree
*/
private def adaptToType(tree: Tree, pt: Type): Tree = {
@@ -921,154 +926,154 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
* </ul>
*/
private val preTransformer = new Transformer {
- override def transform(tree: Tree): Tree = {
- if (tree.symbol == ArrayClass && !tree.isType) return tree // !!! needed?
- val tree1 = tree match {
- case ClassDef(mods, name, tparams, impl) =>
- if (settings.debug.value)
- log("defs of " + tree.symbol + " = " + tree.symbol.info.decls)
- treeCopy.ClassDef(tree, mods, name, List(), impl)
- case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
- treeCopy.DefDef(tree, mods, name, List(), vparamss, tpt, rhs)
- case TypeDef(_, _, _, _) =>
- EmptyTree
- case Apply(instanceOf @ TypeApply(fun @ Select(qual, name), args @ List(arg)), List()) // !!! todo: simplify by having GenericArray also extract trees
- if ((fun.symbol == Any_isInstanceOf || fun.symbol == Object_isInstanceOf) &&
- unboundedGenericArrayLevel(arg.tpe) > 0) =>
- val level = unboundedGenericArrayLevel(arg.tpe)
- def isArrayTest(arg: Tree) =
- gen.mkRuntimeCall("isArray", List(arg, Literal(Constant(level))))
- typedPos(tree.pos) {
- if (level == 1) isArrayTest(qual)
- else
- gen.evalOnce(qual, currentOwner, unit) { qual1 =>
- gen.mkAnd(
- Apply(TypeApply(Select(qual1(), fun.symbol),
- List(TypeTree(erasure(arg.tpe)))),
- List()),
- isArrayTest(qual1()))
- }
- }
- case TypeApply(fun, args) if (fun.symbol.owner != AnyClass &&
- fun.symbol != Object_asInstanceOf &&
- fun.symbol != Object_isInstanceOf) =>
- // leave all other type tests/type casts, remove all other type applications
- fun
- case Apply(fn @ Select(qual, name), args) if (fn.symbol.owner == ArrayClass) =>
- if (unboundedGenericArrayLevel(qual.tpe.widen) == 1)
- // convert calls to apply/update/length on generic arrays to
- // calls of ScalaRunTime.array_xxx method calls
- typedPos(tree.pos) { gen.mkRuntimeCall("array_"+name, qual :: args) }
+ def preErase(tree: Tree): Tree = tree match {
+ case ClassDef(mods, name, tparams, impl) =>
+ if (settings.debug.value)
+ log("defs of " + tree.symbol + " = " + tree.symbol.info.decls)
+ treeCopy.ClassDef(tree, mods, name, List(), impl)
+ case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
+ treeCopy.DefDef(tree, mods, name, List(), vparamss, tpt, rhs)
+ case TypeDef(_, _, _, _) =>
+ EmptyTree
+ case Apply(instanceOf @ TypeApply(fun @ Select(qual, name), args @ List(arg)), List()) // !!! todo: simplify by having GenericArray also extract trees
+ if ((fun.symbol == Any_isInstanceOf || fun.symbol == Object_isInstanceOf) &&
+ unboundedGenericArrayLevel(arg.tpe) > 0) =>
+ val level = unboundedGenericArrayLevel(arg.tpe)
+ def isArrayTest(arg: Tree) =
+ gen.mkRuntimeCall("isArray", List(arg, Literal(Constant(level))))
+ typedPos(tree.pos) {
+ if (level == 1) isArrayTest(qual)
else
- // store exact array erasure in map to be retrieved later when we might
- // need to do the cast in adaptMember
- treeCopy.Apply(
- tree,
- SelectFromArray(qual, name, erasure(qual.tpe)).copyAttrs(fn),
- args)
+ gen.evalOnce(qual, currentOwner, unit) { qual1 =>
+ gen.mkAnd(
+ Apply(TypeApply(Select(qual1(), fun.symbol),
+ List(TypeTree(erasure(arg.tpe)))),
+ List()),
+ isArrayTest(qual1()))
+ }
+ }
+ case TypeApply(fun, args) if (fun.symbol.owner != AnyClass &&
+ fun.symbol != Object_asInstanceOf &&
+ fun.symbol != Object_isInstanceOf) =>
+ // leave all other type tests/type casts, remove all other type applications
+ preErase(fun)
+ case Apply(fn @ Select(qual, name), args) if (fn.symbol.owner == ArrayClass) =>
+ if (unboundedGenericArrayLevel(qual.tpe.widen) == 1)
+ // convert calls to apply/update/length on generic arrays to
+ // calls of ScalaRunTime.array_xxx method calls
+ typedPos(tree.pos) { gen.mkRuntimeCall("array_"+name, qual :: args) }
+ else
+ // store exact array erasure in map to be retrieved later when we might
+ // need to do the cast in adaptMember
+ treeCopy.Apply(
+ tree,
+ SelectFromArray(qual, name, erasure(qual.tpe)).copyAttrs(fn),
+ args)
- case Apply(fn @ Select(qual, _), Nil) if (fn.symbol == Any_## || fn.symbol == Object_##) =>
- Apply(gen.mkAttributedRef(scalaRuntimeHash), List(qual))
+ case Apply(fn @ Select(qual, _), Nil) if (fn.symbol == Any_## || fn.symbol == Object_##) =>
+ Apply(gen.mkAttributedRef(scalaRuntimeHash), List(qual))
- case Apply(fn, args) =>
- if (fn.symbol == Any_asInstanceOf)
- fn match {
- case TypeApply(Select(qual, _), List(targ)) =>
- if (qual.tpe <:< targ.tpe) {
- atPos(tree.pos) { Typed(qual, TypeTree(targ.tpe)) }
- } else if (isNumericValueClass(qual.tpe.typeSymbol) &&
- isNumericValueClass(targ.tpe.typeSymbol)) {
- // convert numeric type casts
- val cname = newTermName("to" + targ.tpe.typeSymbol.name)
- val csym = qual.tpe.member(cname)
- assert(csym != NoSymbol)
- atPos(tree.pos) { Apply(Select(qual, csym), List()) }
- } else
- tree
- }
- // todo: also handle the case where the singleton type is buried in a compound
- else if (fn.symbol == Any_isInstanceOf)
- fn match {
- case TypeApply(sel @ Select(qual, name), List(targ)) =>
- def mkIsInstanceOf(q: () => Tree)(tp: Type): Tree =
- Apply(
- TypeApply(
- Select(q(), Object_isInstanceOf) setPos sel.pos,
- List(TypeTree(tp) setPos targ.pos)) setPos fn.pos,
- List()) setPos tree.pos
- targ.tpe match {
- case SingleType(_, _) | ThisType(_) | SuperType(_, _) =>
- val cmpOp = if (targ.tpe <:< AnyValClass.tpe) Any_equals else Object_eq
+ case Apply(fn, args) =>
+ if (fn.symbol == Any_asInstanceOf)
+ fn match {
+ case TypeApply(Select(qual, _), List(targ)) =>
+ if (qual.tpe <:< targ.tpe) {
+ atPos(tree.pos) { Typed(qual, TypeTree(targ.tpe)) }
+ } else if (isNumericValueClass(qual.tpe.typeSymbol) &&
+ isNumericValueClass(targ.tpe.typeSymbol)) {
+ // convert numeric type casts
+ val cname = newTermName("to" + targ.tpe.typeSymbol.name)
+ val csym = qual.tpe.member(cname)
+ assert(csym != NoSymbol)
+ atPos(tree.pos) { Apply(Select(qual, csym), List()) }
+ } else
+ tree
+ }
+ // todo: also handle the case where the singleton type is buried in a compound
+ else if (fn.symbol == Any_isInstanceOf)
+ fn match {
+ case TypeApply(sel @ Select(qual, name), List(targ)) =>
+ def mkIsInstanceOf(q: () => Tree)(tp: Type): Tree =
+ Apply(
+ TypeApply(
+ Select(q(), Object_isInstanceOf) setPos sel.pos,
+ List(TypeTree(tp) setPos targ.pos)) setPos fn.pos,
+ List()) setPos tree.pos
+ targ.tpe match {
+ case SingleType(_, _) | ThisType(_) | SuperType(_, _) =>
+ val cmpOp = if (targ.tpe <:< AnyValClass.tpe) Any_equals else Object_eq
+ atPos(tree.pos) {
+ Apply(Select(qual, cmpOp), List(gen.mkAttributedQualifier(targ.tpe)))
+ }
+ case RefinedType(parents, decls) if (parents.length >= 2) =>
+ gen.evalOnce(qual, currentOwner, unit) { q =>
atPos(tree.pos) {
- Apply(Select(qual, cmpOp), List(gen.mkAttributedQualifier(targ.tpe)))
- }
- case RefinedType(parents, decls) if (parents.length >= 2) =>
- gen.evalOnce(qual, currentOwner, unit) { q =>
- atPos(tree.pos) {
- parents map mkIsInstanceOf(q) reduceRight gen.mkAnd
- }
+ parents map mkIsInstanceOf(q) reduceRight gen.mkAnd
}
- case _ =>
- tree
- }
- case _ => tree
- }
- else {
- def doDynamic(fn: Tree, qual: Tree): Tree = {
- if (fn.symbol.owner.isRefinementClass && fn.symbol.allOverriddenSymbols.isEmpty)
- ApplyDynamic(qual, args) setSymbol fn.symbol setPos tree.pos
- else tree
- }
- fn match {
- case Select(qual, _) => doDynamic(fn, qual)
- case TypeApply(fni@Select(qual, _), _) => doDynamic(fni, qual)// type parameters are irrelevant in case of dynamic call
- case _ =>
- tree
- }
+ }
+ case _ =>
+ tree
+ }
+ case _ => tree
}
-
- case Select(_, _) =>
- if (tree.symbol.owner.isRefinementClass) {
- val overridden = tree.symbol.allOverriddenSymbols
- assert(!overridden.isEmpty, tree.symbol)
- tree.symbol = overridden.head
+ else {
+ def doDynamic(fn: Tree, qual: Tree): Tree = {
+ if (fn.symbol.owner.isRefinementClass && fn.symbol.allOverriddenSymbols.isEmpty)
+ ApplyDynamic(qual, args) setSymbol fn.symbol setPos tree.pos
+ else tree
}
- tree
+ fn match {
+ case Select(qual, _) => doDynamic(fn, qual)
+ case TypeApply(fni@Select(qual, _), _) => doDynamic(fni, qual)// type parameters are irrelevant in case of dynamic call
+ case _ =>
+ tree
+ }
+ }
- case Template(parents, self, body) =>
- assert(!currentOwner.isImplClass)
- //Console.println("checking no dble defs " + tree)//DEBUG
- checkNoDoubleDefs(tree.symbol.owner)
- treeCopy.Template(tree, parents, emptyValDef, addBridges(body, currentOwner))
+ case Select(_, _) =>
+ // println("preXform: "+ (tree, tree.symbol, tree.symbol.owner, tree.symbol.owner.isRefinementClass))
+ if (tree.symbol.owner.isRefinementClass) {
+ val overridden = tree.symbol.allOverriddenSymbols
+ assert(!overridden.isEmpty, tree.symbol)
+ tree.symbol = overridden.head
+ }
+ tree
- case Match(selector, cases) =>
- Match(Typed(selector, TypeTree(selector.tpe)), cases)
+ case Template(parents, self, body) =>
+ assert(!currentOwner.isImplClass)
+ //Console.println("checking no dble defs " + tree)//DEBUG
+ checkNoDoubleDefs(tree.symbol.owner)
+ treeCopy.Template(tree, parents, emptyValDef, addBridges(body, currentOwner))
- case Literal(ct) if ct.tag == ClassTag
- && ct.typeValue.typeSymbol != definitions.UnitClass =>
- treeCopy.Literal(tree, Constant(erasure(ct.typeValue)))
+ case Match(selector, cases) =>
+ Match(Typed(selector, TypeTree(selector.tpe)), cases)
- case _ =>
- tree
- }
- tree1 match {
- case EmptyTree | TypeTree() =>
- tree1 setType erasure(tree1.tpe)
- case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
- val result = super.transform(tree1) setType null
- tpt.tpe = erasure(tree.symbol.tpe).resultType
- result
- case _ =>
- case class LoopControl(count: Int, ex : AssertionError) extends Throwable(ex.getMessage) with ControlThrowable
+ case Literal(ct) if ct.tag == ClassTag
+ && ct.typeValue.typeSymbol != definitions.UnitClass =>
+ treeCopy.Literal(tree, Constant(erasure(ct.typeValue)))
- try super.transform(tree1) setType null
- catch {
- case LoopControl(n, ex) if n <= 5 =>
- Console.println(tree1)
- throw LoopControl(n + 1, ex)
- }
- }
+ case _ =>
+ tree
}
+
+ override def transform(tree: Tree): Tree =
+ if (tree.symbol == ArrayClass && !tree.isType) tree // !!! needed?
+ else {
+ val tree1 = preErase(tree)
+ // println("preErase: "+ tree +" = "+ tree1)
+ val res = tree1 match {
+ case EmptyTree | TypeTree() =>
+ tree1 setType erasure(tree1.tpe)
+ case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
+ val result = super.transform(tree1) setType null
+ tpt.tpe = erasure(tree1.symbol.tpe).resultType
+ result
+ case _ =>
+ super.transform(tree1) setType null
+ }
+ // println("xform: "+ res)
+ res
+ }
}
/** The main transform function: Pretransfom the tree, and then
@@ -1083,4 +1088,4 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
}
}
}
-}
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala
index c228ee0e46..d1b3142c8a 100644
--- a/src/compiler/scala/tools/nsc/transform/Mixin.scala
+++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala
@@ -231,12 +231,27 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
for (member <- impl.info.decls.toList) {
if (isForwarded(member)) {
val imember = member.overriddenSymbol(iface)
- //Console.println("mixin member "+member+":"+member.tpe+member.locationString+" "+imember+" "+imember.overridingSymbol(clazz)+" to "+clazz+" with scope "+clazz.info.decls)//DEBUG
+ // atPhase(currentRun.erasurePhase){
+ // println(""+(clazz, iface, clazz.typeParams, iface.typeParams, imember, clazz.thisType.baseType(iface), clazz.thisType.baseType(iface).memberInfo(imember), imember.info substSym(iface.typeParams, clazz.typeParams) ))
+ // }
+ // Console.println("mixin member "+member+":"+member.tpe+member.locationString+" "+imember+" "+imember.overridingSymbol(clazz)+" to "+clazz+" with scope "+clazz.info.decls)//DEBUG
if (imember.overridingSymbol(clazz) == NoSymbol &&
clazz.info.findMember(member.name, 0, lateDEFERRED, false).alternatives.contains(imember)) {
+ val newSym = atPhase(currentRun.erasurePhase){
+ val res = imember.cloneSymbol(clazz)
+ // since we used the member (imember) from the interface that represents the trait that's being mixed in,
+ // have to instantiate the interface type params (that may occur in imember's info) as they are seen from the class
+ // we can't use the member that we get from the implementation class, as it's a clone that was made after erasure,
+ // and thus it does not know its info at the beginning of erasure anymore
+ // optimize: no need if iface has no typeparams
+ if(iface.typeParams nonEmpty) res.setInfo(clazz.thisType.baseType(iface).memberInfo(imember))
+ res
+ } // clone before erasure got rid of type info we'll need to generate a javaSig
+ // now we'll have the type info at (the beginning of) erasure in our history,
+ newSym.updateInfo(imember.info.cloneInfo(newSym)) // and now newSym has the info that's been transformed to fit this period (no need for asSeenFrom as phase.erasedTypes)
val member1 = addMember(
clazz,
- member.cloneSymbol(clazz) setPos clazz.pos resetFlag (DEFERRED | lateDEFERRED))
+ newSym setPos clazz.pos resetFlag (DEFERRED | lateDEFERRED))
member1.asInstanceOf[TermSymbol] setAlias member;
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index d9b60b9ca1..1350ab3bb4 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -612,12 +612,12 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
if (settings.debug.value) log("normalizeMember: " + sym.fullName)
if (sym.isMethod && !atPhase(currentRun.typerPhase)(sym.typeParams.isEmpty)) {
var (stps, tps) = splitParams(sym.info.typeParams)
- val unusedStvars = stps -- specializedTypeVars(sym.info).toList
+ val unusedStvars = stps filterNot (specializedTypeVars(sym.info).toList contains)
if (unusedStvars.nonEmpty && currentRun.compiles(sym) && !sym.isSynthetic) {
reporter.warning(sym.pos, "%s %s unused or used in non-specializable positions."
.format(unusedStvars.mkString("", ", ", ""), if (unusedStvars.length == 1) "is" else "are"))
unusedStvars foreach (_.removeAnnotation(SpecializedClass))
- stps = stps -- unusedStvars
+ stps = stps filterNot (unusedStvars contains)
tps = tps ::: unusedStvars
}
val res = sym :: (for (env <- specializations(stps) if needsSpecialization(env, sym)) yield {
@@ -644,8 +644,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
} else List(sym)
}
- /** Specialize member `m' w.r.t. to the outer environment and the type parameters of
- * the innermost enclosing class.
+ /** Specialize member `m' w.r.t. to the outer environment and the type
+ * parameters of the innermost enclosing class.
*
* Turns 'private' into 'protected' for members that need specialization.
*
@@ -714,7 +714,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
def checkOverriddenTParams(overridden: Symbol) {
if (currentRun.compiles(overriding))
- for (val (baseTvar, derivedTvar) <- overridden.info.typeParams.zip(overriding.info.typeParams);
+ for ((baseTvar, derivedTvar) <- overridden.info.typeParams.zip(overriding.info.typeParams);
val missing = missingSpecializations(baseTvar, derivedTvar)
if missing.nonEmpty)
reporter.error(derivedTvar.pos,
diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
index 9b54dd9428..c7a3e6a778 100644
--- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala
+++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
@@ -102,7 +102,7 @@ abstract class TailCalls extends Transform
var tailPos = false
/** The reason this method could not be optimized. */
- var tailrecFailReason = "it contains a recursive call not in tail position"
+ var tailrecFailReason = "reason indeterminate"
/** Is the label accessed? */
var accessed = false
@@ -153,6 +153,13 @@ abstract class TailCalls extends Transform
/** A possibly polymorphic apply to be considered for tail call transformation.
*/
def rewriteApply(target: Tree, fun: Tree, targs: List[Tree], args: List[Tree]) = {
+ def receiver = fun match {
+ case Select(qual, _) => Some(qual)
+ case _ => None
+ }
+
+ def receiverIsSame = receiver exists (enclosingType.widen =:= _.tpe.widen)
+ def receiverIsSuper = receiver exists (enclosingType.widen <:< _.tpe.widen)
def isRecursiveCall = ctx.currentMethod eq fun.symbol
def isMandatory = ctx.currentMethod hasAnnotation TailrecClass
def isEligible = ctx.currentMethod.isEffectivelyFinal
@@ -160,9 +167,6 @@ abstract class TailCalls extends Transform
def matchesTypeArgs = ctx.tparams sameElements (targs map (_.tpe.typeSymbol))
def defaultTree = treeCopy.Apply(tree, target, transformArgs)
- def sameTypeOfThis(receiver: Tree) =
- receiver.tpe.widen =:= enclosingType.widen
-
/** Records failure reason in Context for reporting.
*/
def cannotRewrite(reason: String) = {
@@ -171,6 +175,10 @@ abstract class TailCalls extends Transform
defaultTree
}
+ def notRecursiveReason() =
+ if (receiverIsSuper) "it contains a recursive call targetting a supertype"
+ else "it contains a recursive call not in tail position"
+
def rewriteTailCall(receiver: Tree, otherArgs: List[Tree]): Tree = {
log("Rewriting tail recursive method call at: " + fun.pos)
@@ -178,15 +186,16 @@ abstract class TailCalls extends Transform
typed { atPos(fun.pos)(Apply(Ident(ctx.label), receiver :: otherArgs)) }
}
- if (!isRecursiveCall) defaultTree
+ if (!isRecursiveCall) cannotRewrite(notRecursiveReason())
else if (!isEligible) cannotRewrite("it is neither private nor final so can be overridden")
else if (!ctx.tailPos) cannotRewrite("it contains a recursive call not in tail position")
else if (!matchesTypeArgs) cannotRewrite("it is called recursively with different type arguments")
- else fun match {
- case Select(_, _) if forMSIL => cannotRewrite("it cannot be optimized on MSIL")
- case Select(qual, _) if !sameTypeOfThis(qual) => cannotRewrite("it changes type of 'this' on a polymorphic recursive call")
- case Select(qual, _) => rewriteTailCall(qual, transformArgs)
- case _ => rewriteTailCall(This(currentClass), transformArgs)
+ else receiver match {
+ case Some(qual) =>
+ if (forMSIL) cannotRewrite("it cannot be optimized on MSIL")
+ else if (!receiverIsSame) cannotRewrite("it changes type of 'this' on a polymorphic recursive call")
+ else rewriteTailCall(qual, transformArgs)
+ case _ => rewriteTailCall(This(currentClass), transformArgs)
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index 0270323133..8e3722dd99 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -62,6 +62,8 @@ abstract class UnCurry extends InfoTransform with TypingTransformers {
case MethodType(params, ExistentialType(tparams, restpe @ MethodType(_, _))) =>
assert(false, "unexpected curried method types with intervening existential")
tp0
+ case MethodType(h :: t, restpe) if h.isImplicit =>
+ apply(MethodType(h.cloneSymbol.resetFlag(IMPLICIT) :: t, restpe))
case PolyType(List(), restpe) => // nullary method type
apply(MethodType(List(), restpe))
case PolyType(tparams, restpe) => // polymorphic nullary method type, since it didn't occur in a higher-kinded position
@@ -398,7 +400,7 @@ abstract class UnCurry extends InfoTransform with TypingTransformers {
val predef = gen.mkAttributedRef(PredefModule)
val meth =
if ((elemtp <:< AnyRefClass.tpe) && !isPhantomClass(elemtp.typeSymbol))
- Select(predef, "wrapRefArray")
+ TypeApply(Select(predef, "wrapRefArray"), List(TypeTree(elemtp)))
else if (isValueClass(elemtp.typeSymbol))
Select(predef, "wrap"+elemtp.typeSymbol.name+"Array")
else
diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
index 63e5a9fb25..80f833f03d 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
@@ -74,7 +74,7 @@ trait Analyzer extends AnyRef
val runsRightAfter = Some("packageobjects")
def newPhase(_prev: Phase): StdPhase = new StdPhase(_prev) {
override def keepsTypeParams = false
- resetTyper()
+ resetTyper() // this does not in fact to the reset for each compilation run!
override def run {
val start = startTimer(typerNanos)
currentRun.units foreach applyPhase
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index 3abaf4f337..7452ac678b 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -478,7 +478,7 @@ self: Analyzer =>
// filter out failures from type inference, don't want to remove them from undetParams!
// we must be conservative in leaving type params in undetparams
- val (okParams, okArgs, _) = adjustTypeArgs(undetParams, targs) // prototype == WildcardType: want to remove all inferred Nothing's
+ val AdjustedTypeArgs(okParams, okArgs) = adjustTypeArgs(undetParams, targs) // prototype == WildcardType: want to remove all inferred Nothing's
val subst = new TreeTypeSubstituter(okParams, okArgs)
subst traverse itree2
@@ -810,7 +810,7 @@ self: Analyzer =>
def mot(tp0: Type): Tree = {
val tp1 = tp0.normalize
tp1 match {
- case ThisType(_) | SingleType(_, _) =>
+ case ThisType(_) | SingleType(_, _) if !(tp1 exists {tp => tp.typeSymbol.isExistentiallyBound}) => // can't generate a reference to a value that's abstracted over by an existential
manifestFactoryCall("singleType", tp, gen.mkAttributedQualifier(tp1))
case ConstantType(value) =>
manifestOfType(tp1.deconst, full)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index a2594a060b..ed5f3b0e9a 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -246,6 +246,9 @@ trait Infer {
/** Check that <code>sym</code> is defined and accessible as a member of
* tree <code>site</code> with type <code>pre</code> in current context.
+ *
+ * Note: pre is not refchecked -- moreover, refchecking the resulting tree may not refcheck pre,
+ * since pre may not occur in its type (callers should wrap the result in a TypeTreeWithDeferredRefCheck)
*/
def checkAccessible(tree: Tree, sym: Symbol, pre: Type, site: Tree): Tree =
if (sym.isError) {
@@ -519,48 +522,64 @@ trait Infer {
tvars map (tvar => WildcardType)
}
+ object AdjustedTypeArgs {
+ type Result = collection.mutable.LinkedHashMap[Symbol, Option[Type]]
+
+ def unapply(m: Result): Some[(List[Symbol], List[Type])] = Some(toLists(
+ m collect {case (p, Some(a)) => (p, a)} unzip ))
+
+ object Undets {
+ def unapply(m: Result): Some[(List[Symbol], List[Type], List[Symbol])] = Some(toLists{
+ val (ok, nok) = m.map{case (p, a) => (p, a.getOrElse(null))}.partition(_._2 ne null)
+ val (okArgs, okTparams) = ok.unzip
+ (okArgs, okTparams, nok.keys)
+ })
+ }
+
+ object AllArgsAndUndets {
+ def unapply(m: Result): Some[(List[Symbol], List[Type], List[Type], List[Symbol])] = Some(toLists{
+ val (ok, nok) = m.map{case (p, a) => (p, a.getOrElse(null))}.partition(_._2 ne null)
+ val (okArgs, okTparams) = ok.unzip
+ (okArgs, okTparams, m.values.map(_.getOrElse(NothingClass.tpe)), nok.keys)
+ })
+ }
+
+ @inline private def toLists[A1, A2](pxs: (Iterable[A1], Iterable[A2])) = (pxs._1.toList, pxs._2.toList)
+ @inline private def toLists[A1, A2, A3](pxs: (Iterable[A1], Iterable[A2], Iterable[A3])) = (pxs._1.toList, pxs._2.toList, pxs._3.toList)
+ @inline private def toLists[A1, A2, A3, A4](pxs: (Iterable[A1], Iterable[A2], Iterable[A3], Iterable[A4])) = (pxs._1.toList, pxs._2.toList, pxs._3.toList, pxs._4.toList)
+ }
+
/** Retract arguments that were inferred to Nothing because inference failed. Correct types for repeated params.
*
* We detect Nothing-due-to-failure by only retracting a parameter if either:
* - it occurs in an invariant/contravariant position in `restpe`
* - `restpe == WildcardType`
*
- * Retracted parameters are collected in `uninstantiated`.
+ * Retracted parameters are mapped to None.
+ * TODO:
+ * - make sure the performance hit of storing these in a map is acceptable (it's going to be a small map in 90% of the cases, I think)
+ * - refactor further up the callstack so that we don't have to do this post-factum adjustment?
*
* Rewrite for repeated param types: Map T* entries to Seq[T].
- * @return (okTparams, okArgs, leftUndet)
- * * okTparams, okArgs: lists of tparam symbols and their inferred types
- * * leftUndet a list of remaining uninstantiated type parameters after inference
- * (type parameters mapped by the constraint solver to `scala.Nothing'
- * and not covariant in <code>restpe</code> are taken to be
- * uninstantiated. Maps all those type arguments to their
- * corresponding type parameters).
+ * @return map from tparams to inferred arg, if inference was successful, tparams that map to None are considered left undetermined
+ * type parameters that are inferred as `scala.Nothing' and that are not covariant in <code>restpe</code> are taken to be undetermined
*/
- def adjustTypeArgs(tparams: List[Symbol], targs: List[Type], restpe: Type = WildcardType): (List[Symbol], List[Type], List[Symbol]) = {
+ def adjustTypeArgs(tparams: List[Symbol], targs: List[Type], restpe: Type = WildcardType): AdjustedTypeArgs.Result = {
@inline def notCovariantIn(tparam: Symbol, restpe: Type) =
(varianceInType(restpe)(tparam) & COVARIANT) == 0 // tparam occurred non-covariantly (in invariant or contravariant position)
- val leftUndet = new ListBuffer[Symbol]
- val okParams = new ListBuffer[Symbol]
- val okArgs = new ListBuffer[Type]
-
- (tparams, targs).zipped foreach { (tparam, targ) =>
+ (tparams, targs).zipped.map{ (tparam, targ) =>
if (targ.typeSymbol == NothingClass &&
(isWildcard(restpe) || notCovariantIn(tparam, restpe))) {
- leftUndet += tparam
- // don't add anything to okArgs, it'll be filtered out later anyway
- // used `tparam.tpeHK` as dummy before
+ tparam -> None
} else {
- okParams += tparam
- okArgs += (
+ tparam -> Some(
if (targ.typeSymbol == RepeatedParamClass) targ.baseType(SeqClass)
else if (targ.typeSymbol == JavaRepeatedParamClass) targ.baseType(ArrayClass)
else targ.widen
)
}
- }
-
- (okParams.toList, okArgs.toList, leftUndet.toList)
+ }(collection.breakOut)
}
/** Return inferred type arguments, given type parameters, formal parameters,
@@ -576,18 +595,12 @@ trait Infer {
* @param restp the result type of the method
* @param argtpes the argument types of the application
* @param pt the expected return type of the application
- * @return (okTparams, okArgs, leftUndet)
- * * okTparams, okArgs: lists of tparam symbols and their inferred types
- * * leftUndet a list of remaining uninstantiated type parameters after inference
- * (type parameters mapped by the constraint solver to `scala.Nothing'
- * and not covariant in <code>restpe</code> are taken to be
- * uninstantiated. Maps all those type arguments to their
- * corresponding type parameters).
+ * @return @see adjustTypeArgs
* @throws NoInstance
*/
def methTypeArgs(tparams: List[Symbol], formals: List[Type], restpe: Type,
- argtpes: List[Type], pt: Type): (List[Symbol], List[Type], List[Symbol]) = {
+ argtpes: List[Type], pt: Type): AdjustedTypeArgs.Result = {
val tvars = tparams map freshVar
if (inferInfo)
println("methTypeArgs tparams = "+tparams+
@@ -761,7 +774,7 @@ trait Infer {
isCompatibleArgs(argtpes, formals) && isWeaklyCompatible(restpe, pt)
} else {
try {
- val (okparams, okargs, leftUndet) = methTypeArgs(undetparams, formals, restpe, argtpes, pt)
+ val AdjustedTypeArgs.Undets(okparams, okargs, leftUndet) = methTypeArgs(undetparams, formals, restpe, argtpes, pt)
// #2665: must use weak conformance, not regular one (follow the monomorphic case above)
(exprTypeArgs(leftUndet, restpe.instantiateTypeParams(okparams, okargs), pt, isWeaklyCompatible) ne null) &&
isWithinBounds(NoPrefix, NoSymbol, okparams, okargs)
@@ -1037,12 +1050,20 @@ trait Infer {
*/
def checkKindBounds(tparams: List[Symbol], targs: List[Type], pre: Type, owner: Symbol): List[String] = {
def transform(tp: Type, clazz: Symbol): Type = tp.asSeenFrom(pre, clazz) // instantiate type params that come from outside the abstract type we're currently checking
+ def transformedBounds(p: Symbol, o: Symbol) = transform(p.info.instantiateTypeParams(tparams, targs).bounds, o)
// check that the type parameters <arg>hkargs</arg> to a higher-kinded type conform to the expected params <arg>hkparams</arg>
- def checkKindBoundsHK(hkargs: List[Symbol], arg: Symbol, param: Symbol, paramowner: Symbol): (List[(Symbol, Symbol)], List[(Symbol, Symbol)], List[(Symbol, Symbol)]) = {
+ def checkKindBoundsHK(hkargs: List[Symbol], arg: Symbol, param: Symbol, paramowner: Symbol, underHKParams: List[Symbol], withHKArgs: List[Symbol]): (List[(Symbol, Symbol)], List[(Symbol, Symbol)], List[(Symbol, Symbol)]) = {
+ def bindHKParams(tp: Type) = tp.substSym(underHKParams, withHKArgs)
// @M sometimes hkargs != arg.typeParams, the symbol and the type may have very different type parameters
val hkparams = param.typeParams
+ if(printTypings) {
+ println("checkKindBoundsHK expected: "+ param +" with params "+ hkparams +" by definition in "+ paramowner)
+ println("checkKindBoundsHK supplied: "+ arg +" with params "+ hkargs +" from "+ owner)
+ println("checkKindBoundsHK under params: "+ underHKParams +" with args "+ withHKArgs)
+ }
+
if(hkargs.length != hkparams.length) {
if(arg == AnyClass || arg == NothingClass) (Nil, Nil, Nil) // Any and Nothing are kind-overloaded
else (List((arg, param)), Nil, Nil)
@@ -1065,10 +1086,16 @@ trait Infer {
// substSym(hkparams, hkargs) --> these types are going to be compared as types of kind *
// --> their arguments use different symbols, but are conceptually the same
// (could also replace the types by polytypes, but can't just strip the symbols, as ordering is lost then)
- if (!(transform(hkparam.info.instantiateTypeParams(tparams, targs).bounds.substSym(hkparams, hkargs), paramowner) <:< transform(hkarg.info.bounds, owner)))
+ if (!(bindHKParams(transformedBounds(hkparam, paramowner)) <:< transform(hkarg.info.bounds, owner)))
stricterBound(hkarg, hkparam)
+
+ if(printTypings) {
+ println("checkKindBoundsHK base case: "+ hkparam +" declared bounds: "+ transformedBounds(hkparam, paramowner) +" after instantiating earlier hkparams: "+ bindHKParams(transformedBounds(hkparam, paramowner)))
+ println("checkKindBoundsHK base case: "+ hkarg +" has bounds: "+ transform(hkarg.info.bounds, owner))
+ }
} else {
- val (am, vm, sb) = checkKindBoundsHK(hkarg.typeParams, hkarg, hkparam, paramowner)
+ if(printTypings) println("checkKindBoundsHK recursing to compare params of "+ hkparam +" with "+ hkarg)
+ val (am, vm, sb) = checkKindBoundsHK(hkarg.typeParams, hkarg, hkparam, paramowner, underHKParams ++ hkparam.typeParams, withHKArgs ++ hkarg.typeParams)
arityMismatches(am)
varianceMismatches(vm)
stricterBounds(sb)
@@ -1096,11 +1123,11 @@ trait Infer {
val errors = new ListBuffer[String]
(tparams zip targs).foreach{ case (tparam, targ) if (targ.isHigherKinded || !tparam.typeParams.isEmpty) =>
- // @M must use the typeParams of the type targ, not the typeParams of the symbol of targ!!
- val tparamsHO = targ.typeParams
+ // @M must use the typeParams of the type targ, not the typeParams of the symbol of targ!!
+ val tparamsHO = targ.typeParams
val (arityMismatches, varianceMismatches, stricterBounds) =
- checkKindBoundsHK(tparamsHO, targ.typeSymbolDirect, tparam, tparam.owner) // NOTE: *not* targ.typeSymbol, which normalizes
+ checkKindBoundsHK(tparamsHO, targ.typeSymbolDirect, tparam, tparam.owner, tparam.typeParams, tparamsHO) // NOTE: *not* targ.typeSymbol, which normalizes
if (!(arityMismatches.isEmpty && varianceMismatches.isEmpty && stricterBounds.isEmpty)){
errors += (targ+"'s type parameters do not match "+tparam+"'s expected parameters: "+
(for ((a, p) <- arityMismatches)
@@ -1155,14 +1182,16 @@ trait Infer {
" tparams = "+tparams+"\n"+
" pt = "+pt)
val targs = exprTypeArgs(tparams, tree.tpe, pt)
- val (okParams, okArgs, leftUndet) = // TODO AM: is this pattern match too expensive? should we push it down into the else of the if below?
- if (keepNothings || (targs eq null)) (tparams, targs, List()) //@M: adjustTypeArgs fails if targs==null, neg/t0226
- else adjustTypeArgs(tparams, targs)
-
- if (inferInfo) println("inferred expr instance for "+ tree +" --> (okParams, okArgs, leftUndet)= "+(okParams, okArgs, leftUndet))
- substExpr(tree, okParams, okArgs, pt)
- leftUndet
+ if (keepNothings || (targs eq null)) { //@M: adjustTypeArgs fails if targs==null, neg/t0226
+ substExpr(tree, tparams, targs, pt)
+ List()
+ } else {
+ val AdjustedTypeArgs.Undets(okParams, okArgs, leftUndet) = adjustTypeArgs(tparams, targs)
+ if (inferInfo) println("inferred expr instance for "+ tree +" --> (okParams, okArgs, leftUndet)= "+(okParams, okArgs, leftUndet))
+ substExpr(tree, okParams, okArgs, pt)
+ leftUndet
+ }
}
/** Substitite free type variables `undetparams' of polymorphic argument
@@ -1184,14 +1213,15 @@ trait Infer {
}
}
- /** Substitite free type variables <code>undetparams</code> of application
+ /** Substitute free type variables <code>undetparams</code> of application
* <code>fn(args)</code>, given prototype <code>pt</code>.
*
* @param fn ...
* @param undetparams ...
* @param args ...
* @param pt ...
- * @return Return the list of type parameters that remain uninstantiated.
+ * @return The type parameters that remain uninstantiated,
+ * and that thus have not been substituted.
*/
def inferMethodInstance(fn: Tree, undetparams: List[Symbol],
args: List[Tree], pt0: Type): List[Symbol] = fn.tpe match {
@@ -1206,8 +1236,8 @@ trait Infer {
val formals = formalTypes(params0 map (_.tpe), args.length)
val argtpes = actualTypes(args map (_.tpe.deconst), formals.length)
val restpe = fn.tpe.resultType(argtpes)
- val (okparams, okargs, leftUndet) = methTypeArgs(undetparams, formals, restpe, argtpes, pt)
- checkBounds(fn.pos, NoPrefix, NoSymbol, okparams, okargs, "inferred ")
+ val AdjustedTypeArgs.AllArgsAndUndets(okparams, okargs, allargs, leftUndet) = methTypeArgs(undetparams, formals, restpe, argtpes, pt)
+ checkBounds(fn.pos, NoPrefix, NoSymbol, undetparams, allargs, "inferred ")
val treeSubst = new TreeTypeSubstituter(okparams, okargs)
treeSubst.traverse(fn)
treeSubst.traverseTrees(args)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index d56b8ed944..756863f8f9 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -287,13 +287,11 @@ trait Namers { self: Analyzer =>
* class definition tree.
* @return the companion object symbol.
*/
- def ensureCompanionObject(tree: ClassDef, creator: => Tree): Symbol = {
- val m: Symbol = context.scope.lookup(tree.name.toTermName).filter(! _.isSourceMethod)
- if (m.isModule && inCurrentScope(m) && currentRun.compiles(m)) m
- else
- /*util.trace("enter synthetic companion object for "+currentRun.compiles(m)+":")*/(
- enterSyntheticSym(creator))
- }
+ def ensureCompanionObject(tree: ClassDef, creator: => Tree): Symbol = {
+ val m: Symbol = context.scope.lookup(tree.name.toTermName).filter(! _.isSourceMethod)
+ if (m.isModule && inCurrentScope(m) && currentRun.compiles(m)) m
+ else enterSyntheticSym(creator)
+ }
private def enterSymFinishWith(tree: Tree, tparams: List[TypeDef]) {
val sym = tree.symbol
@@ -350,6 +348,9 @@ trait Namers { self: Analyzer =>
tree.symbol = enterClassSymbol(tree)
finishWith(tparams)
if (mods.isCase) {
+ if (treeInfo.firstConstructorArgs(impl.body).size > MaxFunctionArity)
+ context.error(tree.pos, "Implementation restriction: case classes cannot have more than " + MaxFunctionArity + " parameters.")
+
val m = ensureCompanionObject(tree, caseModuleDef(tree))
caseClassOfModuleClass(m.moduleClass) = tree
}
@@ -989,6 +990,8 @@ trait Namers { self: Analyzer =>
val module = companionModuleOf(meth.owner, context)
module.initialize // call type completer (typedTemplate), adds the
// module's templateNamer to classAndNamerOfModule
+ if (!classAndNamerOfModule.contains(module))
+ return // fix #3649 (prevent crash in erroneous source code)
val (cdef, nmr) = classAndNamerOfModule(module)
moduleNamer = Some(cdef, nmr)
(cdef, nmr)
@@ -1064,24 +1067,7 @@ trait Namers { self: Analyzer =>
case tp =>
tp
}
-
- def verifyOverriding(other: Symbol): Boolean = {
- if(other.unsafeTypeParams.length != tparamSyms.length) {
- context.error(tpsym.pos,
- "The kind of "+tpsym.keyString+" "+tpsym.varianceString + tpsym.nameString+
- " does not conform to the expected kind of " + other.defString + other.locationString + ".")
- false
- } else true
- }
-
- // @M: make sure overriding in refinements respects rudimentary kinding
- // have to do this early, as otherwise we might get crashes: (see neg/bug1275.scala)
- // suppose some parameterized type member is overridden by a type member w/o params,
- // then appliedType will be called on a type that does not expect type args --> crash
- if (tpsym.owner.isRefinementClass && // only needed in refinements
- !tpsym.allOverriddenSymbols.forall{verifyOverriding(_)})
- ErrorType
- else polyType(tparamSyms, tp)
+ polyType(tparamSyms, tp)
}
/** Given a case class
diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
index f1d0537f46..91fe113019 100644
--- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
@@ -341,7 +341,7 @@ trait NamesDefaults { self: Analyzer =>
*
* Example: given
* def foo(x: Int = 2, y: String = "def")
- * foo(1)
+ * foo(y = "lt")
* the argument list (y = "lt") is transformed to (y = "lt", x = foo$default$1())
*/
def addDefaults(givenArgs: List[Tree], qual: Option[Tree], targs: List[Tree],
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index 4b5de16a31..ca642d3931 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -66,6 +66,15 @@ abstract class RefChecks extends InfoTransform {
}
}
+ val toScalaRepeatedParam = new TypeMap {
+ def apply(tp: Type): Type = tp match {
+ case tp @ TypeRef(pre, JavaRepeatedParamClass, args) =>
+ typeRef(pre, RepeatedParamClass, args)
+ case _ =>
+ mapOver(tp)
+ }
+ }
+
class RefCheckTransformer(unit: CompilationUnit) extends Transformer {
var localTyper: analyzer.Typer = typer;
@@ -390,9 +399,8 @@ abstract class RefChecks extends InfoTransform {
}
printMixinOverrideErrors()
- // 2. Check that only abstract classes have deferred members
- if (clazz.isClass && !clazz.isTrait) {
- def isClazzAbstract = clazz hasFlag ABSTRACT
+ // Verifying a concrete class has nothing unimplemented.
+ if (clazz.isClass && !clazz.isTrait && !(clazz hasFlag ABSTRACT)) {
val abstractErrors = new ListBuffer[String]
def abstractErrorMessage =
// a little formatting polish
@@ -412,30 +420,56 @@ abstract class RefChecks extends InfoTransform {
def javaErasedOverridingSym(sym: Symbol): Symbol =
clazz.tpe.nonPrivateMemberAdmitting(sym.name, BRIDGE).filter(other =>
- !other.isDeferred &&
- (other hasFlag JAVA) && {
- val tp1 = erasure.erasure(clazz.thisType.memberType(sym))
- val tp2 = erasure.erasure(clazz.thisType.memberType(other))
+ !other.isDeferred && other.isJavaDefined && {
+ def uncurryAndErase(tp: Type) = erasure.erasure(uncurry.transformInfo(sym, tp)) // #3622: erasure operates on uncurried types -- note on passing sym in both cases: only sym.isType is relevant for uncurry.transformInfo
+ val tp1 = uncurryAndErase(clazz.thisType.memberType(sym))
+ val tp2 = uncurryAndErase(clazz.thisType.memberType(other))
atPhase(currentRun.erasurePhase.next)(tp1 matches tp2)
})
def ignoreDeferred(member: Symbol) =
isAbstractTypeWithoutFBound(member) ||
- ((member hasFlag JAVA) && javaErasedOverridingSym(member) != NoSymbol)
-
- for (member <- clazz.tpe.nonPrivateMembersAdmitting(VBRIDGE))
- if (member.isDeferred && !isClazzAbstract && !ignoreDeferred(member)) {
- abstractClassError(
- false, infoString(member) + " is not defined" + analyzer.varNotice(member))
- } else if ((member hasFlag ABSOVERRIDE) && member.isIncompleteIn(clazz)) {
- val other = member.superSymbol(clazz);
- abstractClassError(true,
- infoString(member) + " is marked `abstract' and `override'" +
- (if (other != NoSymbol)
- " and overrides incomplete superclass member " + infoString(other)
- else ", but no concrete implementation could be found in a base class"))
+ (member.isJavaDefined && javaErasedOverridingSym(member) != NoSymbol)
+
+ // 2. Check that only abstract classes have deferred members
+ def checkNoAbstractMembers() = {
+ // Avoid spurious duplicates: first gather any missing members.
+ def memberList = clazz.tpe.nonPrivateMembersAdmitting(VBRIDGE)
+ val (missing, rest) = memberList partition (m => m.isDeferred && !ignoreDeferred(m))
+ // Group missing members by the underlying symbol.
+ val grouped = missing groupBy (analyzer underlying _ name)
+
+ for (member <- missing) {
+ def undefined(msg: String) = abstractClassError(false, infoString(member) + " is not defined" + msg)
+ val underlying = analyzer.underlying(member)
+
+ // Give a specific error message for abstract vars based on why it fails:
+ // It could be unimplemented, have only one accessor, or be uninitialized.
+ if (underlying.isVariable) {
+ // If both getter and setter are missing, squelch the setter error.
+ val isMultiple = grouped(underlying.name).size > 1
+ // TODO: messages shouldn't be spread over two files, and varNotice is not a clear name
+ if (member.isSetter && isMultiple) ()
+ else undefined(
+ if (member.isSetter) "\n(Note that an abstract var requires a setter in addition to the getter)"
+ else if (member.isGetter && !isMultiple) "\n(Note that an abstract var requires a getter in addition to the setter)"
+ else analyzer.varNotice(member)
+ )
+ }
+ else undefined("")
}
+ // Check the remainder for invalid absoverride.
+ for (member <- rest ; if ((member hasFlag ABSOVERRIDE) && member.isIncompleteIn(clazz))) {
+ val other = member.superSymbol(clazz)
+ val explanation =
+ if (other != NoSymbol) " and overrides incomplete superclass member " + infoString(other)
+ else ", but no concrete implementation could be found in a base class"
+
+ abstractClassError(true, infoString(member) + " is marked `abstract' and `override'" + explanation)
+ }
+ }
+
// 3. Check that concrete classes do not have deferred definitions
// that are not implemented in a subclass.
// Note that this is not the same as (2); In a situation like
@@ -458,7 +492,9 @@ abstract class RefChecks extends InfoTransform {
if (!parents.isEmpty && parents.head.typeSymbol.hasFlag(ABSTRACT))
checkNoAbstractDecls(parents.head.typeSymbol)
}
- if (abstractErrors.isEmpty && !isClazzAbstract)
+
+ checkNoAbstractMembers()
+ if (abstractErrors.isEmpty)
checkNoAbstractDecls(clazz)
if (abstractErrors.nonEmpty)
@@ -985,7 +1021,7 @@ abstract class RefChecks extends InfoTransform {
private def checkAnnotations(tpes: List[Type], pos: Position) = tpes foreach (tp => checkTypeRef(tp, pos))
private def doTypeTraversal(tree: Tree)(f: Type => Unit) = if (!inPattern) tree.tpe foreach f
- private def applyRefchecksToAnnotations(tree: Tree) = {
+ private def applyRefchecksToAnnotations(tree: Tree): Unit = {
def applyChecks(annots: List[AnnotationInfo]) = {
checkAnnotations(annots map (_.atp), tree.pos)
transformTrees(annots flatMap (_.args))
@@ -993,10 +1029,18 @@ abstract class RefChecks extends InfoTransform {
tree match {
case m: MemberDef => applyChecks(m.symbol.annotations)
- case TypeTree() => doTypeTraversal(tree) {
- case AnnotatedType(annots, _, _) => applyChecks(annots)
- case _ =>
- }
+ case tpt@TypeTree() =>
+ if(tpt.original != null) {
+ tpt.original foreach {
+ case dc@TypeTreeWithDeferredRefCheck() => applyRefchecksToAnnotations(dc.check()) // #2416
+ case _ =>
+ }
+ }
+
+ doTypeTraversal(tree) {
+ case AnnotatedType(annots, _, _) => applyChecks(annots)
+ case _ =>
+ }
case _ =>
}
}
@@ -1106,7 +1150,6 @@ abstract class RefChecks extends InfoTransform {
// type bounds (bug #935), issues deprecation warnings for symbols used
// inside annotations.
applyRefchecksToAnnotations(tree)
-
var result: Tree = tree match {
case DefDef(mods, name, tparams, vparams, tpt, EmptyTree) if tree.symbol.hasAnnotation(NativeAttr) =>
tree.symbol.resetFlag(DEFERRED)
@@ -1127,7 +1170,17 @@ abstract class RefChecks extends InfoTransform {
if (bridges.nonEmpty) treeCopy.Template(tree, parents, self, body ::: bridges)
else tree
- case TypeTree() =>
+ case dc@TypeTreeWithDeferredRefCheck() => assert(false, "adapt should have turned dc: TypeTreeWithDeferredRefCheck into tpt: TypeTree, with tpt.original == dc"); dc
+ case tpt@TypeTree() =>
+ if(tpt.original != null) {
+ tpt.original foreach {
+ case dc@TypeTreeWithDeferredRefCheck() =>
+ transform(dc.check()) // #2416 -- only call transform to do refchecks, but discard results
+ // tpt has the right type if the deferred checks are ok
+ case _ =>
+ }
+ }
+
val existentialParams = new ListBuffer[Symbol]
doTypeTraversal(tree) { // check all bounds, except those that are
// existential type parameters
diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
index ea2cfd6204..541ef35b4b 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
@@ -272,12 +272,13 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
case _ => Nil
}
+
assert(clazz != NoSymbol, sym)
if (settings.debug.value) log("Decided for host class: " + clazz)
val accName = nme.protName(sym.originalName)
val hasArgs = sym.tpe.paramTypes != Nil
- val memberType = sym.tpe // transform(sym.tpe)
+ val memberType = refchecks.toScalaRepeatedParam(sym.tpe) // fix for #2413
// if the result type depends on the this type of an enclosing class, the accessor
// has to take an object of exactly this type, otherwise it's more general
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index a045a41132..043c41fc10 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -555,6 +555,8 @@ trait Typers { self: Analyzer =>
* If symbol refers to package object, insert `.package` as second to last selector.
* (exception for some symbols in scala package which are dealiased immediately)
* Call checkAccessible, which sets tree's attributes.
+ * Also note that checkAccessible looks up sym on pre without checking that pre is well-formed
+ * (illegal type applications in pre will be skipped -- that's why typedSelect wraps the resulting tree in a TreeWithDeferredChecks)
* @return modified tree and new prefix type
*/
private def makeAccessible(tree: Tree, sym: Symbol, pre: Type, site: Tree): (Tree, Type) =
@@ -2181,6 +2183,17 @@ trait Typers { self: Analyzer =>
def isNamedApplyBlock(tree: Tree) =
context.namedApplyBlockInfo exists (_._1 == tree)
+ def callToCompanionConstr(context: Context, calledFun: Symbol) = {
+ if (calledFun.isConstructor) {
+ val methCtx = context.enclMethod
+ if (methCtx != NoContext) {
+ val contextFun = methCtx.tree.symbol
+ contextFun.isPrimaryConstructor && contextFun.owner.isModuleClass &&
+ companionModuleOf(calledFun.owner, context).moduleClass == contextFun.owner
+ } else false
+ } else false
+ }
+
def doTypedApply(tree: Tree, fun0: Tree, args: List[Tree], mode: Int, pt: Type): Tree = {
var fun = fun0
if (fun.hasSymbol && (fun.symbol hasFlag OVERLOADED)) {
@@ -2318,7 +2331,10 @@ trait Typers { self: Analyzer =>
case _ => false
}
val (allArgs, missing) = addDefaults(args, qual, targs, previousArgss, params, fun.pos.focus, context)
- if (allArgs.length == formals.length) {
+ val funSym = fun1 match { case Block(_, expr) => expr.symbol }
+ if (allArgs.length != args.length && callToCompanionConstr(context, funSym)) {
+ errorTree(tree, "module extending its companion class cannot use default constructor arguments")
+ } else if (allArgs.length == formals.length) {
// useful when a default doesn't match parameter type, e.g. def f[T](x:T="a"); f[Int]()
val note = "Error occurred in an application involving default arguments."
if (!(context.diagnostic contains note)) context.diagnostic = note :: context.diagnostic
@@ -2564,6 +2580,10 @@ trait Typers { self: Analyzer =>
* an error message is reported and None is returned.
*/
def tree2ConstArg(tree: Tree, pt: Type): Option[ClassfileAnnotArg] = tree match {
+ case Apply(Select(New(tpt), nme.CONSTRUCTOR), args) if (pt.typeSymbol == ArrayClass) =>
+ error(tree.pos, "Array constants have to be specified using the `Array(...)' factory method")
+ None
+
case ann @ Apply(Select(New(tpt), nme.CONSTRUCTOR), args) =>
val annInfo = typedAnnotation(ann, mode, NoSymbol, pt.typeSymbol, true)
if (annInfo.atp.isErroneous) {
@@ -2785,16 +2805,6 @@ trait Typers { self: Analyzer =>
res
}
- class SymInstance(val sym: Symbol, val tp: Type) {
- override def equals(other: Any): Boolean = other match {
- case that: SymInstance =>
- this.sym == that.sym && this.tp =:= that.tp
- case _ =>
- false
- }
- override def hashCode: Int = sym.hashCode * 41 + tp.hashCode
- }
-
/** convert skolems to existentials */
def packedType(tree: Tree, owner: Symbol): Type = {
def defines(tree: Tree, sym: Symbol) =
@@ -2830,7 +2840,7 @@ trait Typers { self: Analyzer =>
}
}
// add all local symbols of `tp' to `localSyms'
- // expanding higher-kinded types into individual copies for each instance.
+ // TODO: expand higher-kinded types into individual copies for each instance.
def addLocals(tp: Type) {
val remainingSyms = new ListBuffer[Symbol]
def addIfLocal(sym: Symbol, tp: Type) {
@@ -2939,6 +2949,11 @@ trait Typers { self: Analyzer =>
errorTree(tree, treeSymTypeMsg(fun)+" does not take type parameters.")
}
+ private[this] var typingIndent: String = ""
+ @inline final def deindentTyping() = if (printTypings) typingIndent = typingIndent.substring(0, typingIndent.length() - 2)
+ @inline final def indentTyping() = if (printTypings) typingIndent += " "
+ @inline final def printTyping(s: => String) = if (printTypings) println(typingIndent+s)
+
/**
* @param tree ...
* @param mode ...
@@ -3256,7 +3271,7 @@ trait Typers { self: Analyzer =>
def errorInResult(tree: Tree) = treesInResult(tree) exists (_.pos == ex.pos)
if (fun :: tree :: args exists errorInResult) {
- if (printTypings) println("second try for: "+fun+" and "+args)
+ printTyping("second try for: "+fun+" and "+args)
val Select(qual, name) = fun
val args1 = tryTypedArgs(args, argMode(fun, mode), ex)
val qual1 =
@@ -3266,9 +3281,8 @@ trait Typers { self: Analyzer =>
val tree1 = Apply(Select(qual1, name) setPos fun.pos, args1) setPos tree.pos
return typed1(tree1, mode | SNDTRYmode, pt)
}
- } else if (printTypings) {
- println("no second try for "+fun+" and "+args+" because error not in result:"+ex.pos+"!="+tree.pos)
- }
+ } else printTyping("no second try for "+fun+" and "+args+" because error not in result:"+ex.pos+"!="+tree.pos)
+
reportTypeError(tree.pos, ex)
setError(tree)
}
@@ -3507,9 +3521,9 @@ trait Typers { self: Analyzer =>
case Select(_, _) => treeCopy.Select(tree, qual, name)
case SelectFromTypeTree(_, _) => treeCopy.SelectFromTypeTree(tree, qual, name)
}
- //if (name.toString == "Elem") println("typedSelect "+qual+":"+qual.tpe+" "+sym+"/"+tree1+":"+tree1.tpe)
val (tree2, pre2) = makeAccessible(tree1, sym, qual.tpe, qual)
val result = stabilize(tree2, pre2, mode, pt)
+
def isPotentialNullDeference() = {
phase.id <= currentRun.typerPhase.id &&
!sym.isConstructor &&
@@ -3520,7 +3534,20 @@ trait Typers { self: Analyzer =>
if (settings.Xchecknull.value && isPotentialNullDeference && unit != null)
unit.warning(tree.pos, "potential null pointer dereference: "+tree)
- result
+ result match {
+ // could checkAccessible (called by makeAccessible) potentially have skipped checking a type application in qual?
+ case SelectFromTypeTree(qual@TypeTree(), name) if qual.tpe.typeArgs nonEmpty => // TODO: somehow the new qual is not checked in refchecks
+ treeCopy.SelectFromTypeTree(
+ result,
+ (TypeTreeWithDeferredRefCheck(){ () => val tp = qual.tpe; val sym = tp.typeSymbolDirect
+ // will execute during refchecks -- TODO: make private checkTypeRef in refchecks public and call that one?
+ checkBounds(qual.pos, tp.prefix, sym.owner, sym.typeParams, tp.typeArgs, "")
+ qual // you only get to see the wrapped tree after running this check :-p
+ }) setType qual.tpe,
+ name)
+ case _ =>
+ result
+ }
}
}
@@ -3653,6 +3680,7 @@ trait Typers { self: Analyzer =>
else atPos(tree.pos)(Select(qual, name))
// atPos necessary because qualifier might come from startContext
val (tree2, pre2) = makeAccessible(tree1, defSym, pre, qual)
+ // assert(pre.typeArgs isEmpty) // no need to add #2416-style check here, right?
stabilize(tree2, pre2, mode, pt)
}
}
@@ -3678,7 +3706,7 @@ trait Typers { self: Analyzer =>
} else {
val tparams = tpt1.symbol.typeParams
if (tparams.length == args.length) {
- // @M: kind-arity checking is done here and in adapt, full kind-checking is in checkKindBounds (in Infer)
+ // @M: kind-arity checking is done here and in adapt, full kind-checking is in checkKindBounds (in Infer)
val args1 =
if(!tpt1.symbol.rawInfo.isComplete)
args mapConserve (typedHigherKindedType(_, mode))
@@ -3689,11 +3717,8 @@ trait Typers { self: Analyzer =>
//@M! the polytype denotes the expected kind
}
val argtypes = args1 map (_.tpe)
- val owntype = if (tpt1.symbol.isClass || tpt1.symbol.isNonClassType)
- // @M! added the latter condition
- appliedType(tpt1.tpe, argtypes)
- else tpt1.tpe.instantiateTypeParams(tparams, argtypes)
- (args, tparams).zipped map { (arg, tparam) => arg match {
+
+ (args, tparams).zipped foreach { (arg, tparam) => arg match {
// note: can't use args1 in selector, because Bind's got replaced
case Bind(_, _) =>
if (arg.symbol.isAbstractType)
@@ -3703,7 +3728,17 @@ trait Typers { self: Analyzer =>
glb(List(arg.symbol.info.bounds.hi, tparam.info.bounds.hi.subst(tparams, argtypes))))
case _ =>
}}
- TypeTree(owntype) setOriginal(tree) // setPos tree.pos
+
+ val result = TypeTree(appliedType(tpt1.tpe, argtypes)) setOriginal(tree) // setPos tree.pos (done by setOriginal)
+ if(tpt1.tpe.isInstanceOf[PolyType]) // did the type application (performed by appliedType) involve an unchecked beta-reduction?
+ (TypeTreeWithDeferredRefCheck(){ () =>
+ // wrap the tree and include the bounds check -- refchecks will perform this check (that the beta reduction was indeed allowed) and unwrap
+ // we can't simply use original in refchecks because it does not contains types
+ // (and the only typed trees we have have been mangled so they're not quite the original tree anymore)
+ checkBounds(result.pos, tpt1.tpe.prefix, tpt1.symbol.owner, tpt1.symbol.typeParams, argtypes, "")
+ result // you only get to see the wrapped tree after running this check :-p
+ }).setType(result.tpe)
+ else result
} else if (tparams.length == 0) {
errorTree(tree, tpt1.tpe+" does not take type parameters")
} else {
@@ -3754,12 +3789,17 @@ trait Typers { self: Analyzer =>
docComments(sym) = comment
comment.defineVariables(sym)
val typer1 = newTyper(context.makeNewScope(tree, context.owner))
- for (useCase <- comment.useCases)
+ for (useCase <- comment.useCases) {
typer1.silent(_.typedUseCase(useCase)) match {
case ex: TypeError =>
unit.warning(useCase.pos, ex.msg)
case _ =>
}
+ for (useCaseSym <- useCase.defined) {
+ if (sym.name != useCaseSym.name)
+ unit.warning(useCase.pos, "@usecase " + useCaseSym.name.decode + " does not match commented symbol: " + sym.name.decode)
+ }
+ }
}
typed(defn, mode, pt)
@@ -4010,7 +4050,7 @@ trait Typers { self: Analyzer =>
case SelectFromTypeTree(qual, selector) =>
val qual1 = typedType(qual, mode)
if (qual1.tpe.isVolatile) error(tree.pos, "illegal type selection from volatile type "+qual.tpe)
- typedSelect(typedType(qual, mode), selector)
+ typedSelect(qual1, selector)
case CompoundTypeTree(templ) =>
typedCompoundTypeTree(templ)
@@ -4026,6 +4066,7 @@ trait Typers { self: Analyzer =>
case etpt @ ExistentialTypeTree(_, _) =>
newTyper(context.makeNewScope(tree, context.owner)).typedExistentialTypeTree(etpt, mode)
+ case dc@TypeTreeWithDeferredRefCheck() => dc // TODO: should we re-type the wrapped tree? then we need to change TypeTreeWithDeferredRefCheck's representation to include the wrapped tree explicitly (instead of in its closure)
case tpt @ TypeTree() =>
if (tpt.original != null)
tree setType typedType(tpt.original, mode).tpe
@@ -4048,8 +4089,7 @@ trait Typers { self: Analyzer =>
* @param pt ...
* @return ...
*/
- def typed(tree: Tree, mode: Int, pt: Type): Tree = {
-
+ def typed(tree: Tree, mode: Int, pt: Type): Tree = { indentTyping()
def dropExistential(tp: Type): Type = tp match {
case ExistentialType(tparams, tpe) =>
if (settings.debug.value) println("drop ex "+tree+" "+tp)
@@ -4075,15 +4115,15 @@ trait Typers { self: Analyzer =>
tree.tpe = null
if (tree.hasSymbol) tree.symbol = NoSymbol
}
- if (printTypings) println("typing "+tree+", pt = "+pt+", undetparams = "+context.undetparams+", implicits-enabled = "+context.implicitsEnabled+", silent = "+context.reportGeneralErrors) //DEBUG
+ printTyping("typing "+tree+", pt = "+pt+", undetparams = "+context.undetparams+", implicits-enabled = "+context.implicitsEnabled+", silent = "+context.reportGeneralErrors) //DEBUG
var tree1 = if (tree.tpe ne null) tree else typed1(tree, mode, dropExistential(pt))
- if (printTypings) println("typed "+tree1+":"+tree1.tpe+(if (isSingleType(tree1.tpe)) " with underlying "+tree1.tpe.widen else "")+", undetparams = "+context.undetparams+", pt = "+pt) //DEBUG
+ printTyping("typed "+tree1+":"+tree1.tpe+(if (isSingleType(tree1.tpe)) " with underlying "+tree1.tpe.widen else "")+", undetparams = "+context.undetparams+", pt = "+pt) //DEBUG
tree1.tpe = addAnnotations(tree1, tree1.tpe)
val result = if (tree1.isEmpty) tree1 else adapt(tree1, mode, pt, tree)
- if (printTypings) println("adapted "+tree1+":"+tree1.tpe.widen+" to "+pt+", "+context.undetparams) //DEBUG
+ printTyping("adapted "+tree1+":"+tree1.tpe.widen+" to "+pt+", "+context.undetparams) //DEBUG
// for (t <- tree1.tpe) assert(t != WildcardType)
// if ((mode & TYPEmode) != 0) println("type: "+tree1+" has type "+tree1.tpe)
if (phase.id <= currentRun.typerPhase.id) signalDone(context.asInstanceOf[analyzer.Context], tree, result)
@@ -4091,7 +4131,7 @@ trait Typers { self: Analyzer =>
} catch {
case ex: TypeError =>
tree.tpe = null
- if (printTypings) println("caught "+ex+" in typed: "+tree) //DEBUG
+ printTyping("caught "+ex+" in typed: "+tree) //DEBUG
reportTypeError(tree.pos, ex)
setError(tree)
case ex: Exception =>
@@ -4103,6 +4143,7 @@ trait Typers { self: Analyzer =>
throw ex
}
finally {
+ deindentTyping()
if (Statistics.enabled) {
val t = currentTime()
microsByType(pendingTreeTypes.head) += ((t - typerTime) / 1000).toInt
diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
index 5bbda13acd..735cb4a3c4 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
@@ -130,21 +130,31 @@ trait Unapplies extends ast.TreeDSL
}
}
- /** The module corresponding to a case class; without any member definitions
+ /** The module corresponding to a case class; overrides toString to show the module's name
*/
def caseModuleDef(cdef: ClassDef): ModuleDef = {
- def inheritFromFun = !(cdef.mods hasFlag ABSTRACT) && cdef.tparams.isEmpty && constrParamss(cdef).length == 1
- def createFun = gen.scalaFunctionConstr(constrParamss(cdef).head map (_.tpt), toIdent(cdef), abstractFun = true)
+ // > MaxFunctionArity is caught in Namers, but for nice error reporting instead of
+ // an abrupt crash we trim the list here.
+ def primaries = constrParamss(cdef).head take MaxFunctionArity map (_.tpt)
+ def inheritFromFun = !cdef.mods.isAbstract && cdef.tparams.isEmpty && constrParamss(cdef).length == 1
+ def createFun = gen.scalaFunctionConstr(primaries, toIdent(cdef), abstractFun = true)
def parents = if (inheritFromFun) List(createFun) else Nil
-
- companionModuleDef(cdef, parents ::: List(gen.scalaScalaObjectConstr))
+ def toString = DefDef(
+ Modifiers(OVERRIDE | FINAL),
+ nme.toString_,
+ Nil,
+ List(Nil),
+ TypeTree(),
+ Literal(Constant(cdef.name.decode)))
+
+ companionModuleDef(cdef, parents ::: List(gen.scalaScalaObjectConstr), List(toString))
}
- def companionModuleDef(cdef: ClassDef, parents: List[Tree]): ModuleDef = atPos(cdef.pos.focus) {
+ def companionModuleDef(cdef: ClassDef, parents: List[Tree], body: List[Tree] = Nil): ModuleDef = atPos(cdef.pos.focus) {
ModuleDef(
Modifiers(cdef.mods.flags & AccessFlags | SYNTHETIC, cdef.mods.privateWithin),
cdef.name.toTermName,
- Template(parents, emptyValDef, NoMods, Nil, List(Nil), Nil, cdef.impl.pos.focus))
+ Template(parents, emptyValDef, NoMods, Nil, List(Nil), body, cdef.impl.pos.focus))
}
private val caseMods = Modifiers(SYNTHETIC | CASE)
diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
index 0c124c9c19..b9afcda3ec 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
@@ -14,6 +14,7 @@ abstract class CPSAnnotationChecker extends CPSUtils {
import definitions._
//override val verbose = true
+ @inline override final def vprintln(x: =>Any): Unit = if (verbose) println(x)
/**
* Checks whether @cps annotations conform
diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
index 57cba6e829..d1a35df04b 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
@@ -11,8 +11,7 @@ trait CPSUtils {
var cpsEnabled = false
val verbose: Boolean = System.getProperty("cpsVerbose", "false") == "true"
- @inline final def vprintln(x: =>Any): Unit = if (verbose) println(x)
-
+ def vprintln(x: =>Any): Unit = if (verbose) println(x)
lazy val MarkerCPSSym = definitions.getClass("scala.util.continuations.cpsSym")
lazy val MarkerCPSTypes = definitions.getClass("scala.util.continuations.cpsParam")
diff --git a/src/library/scala/Enumeration.scala b/src/library/scala/Enumeration.scala
index 5d1a0997ed..871de3714d 100644
--- a/src/library/scala/Enumeration.scala
+++ b/src/library/scala/Enumeration.scala
@@ -6,23 +6,11 @@
** |/ **
\* */
-
-
package scala
import scala.collection.SetLike
-import scala.collection.mutable.{Builder, AddingBuilder, Map, HashMap}
-import scala.collection.immutable.{Set, BitSet}
-import scala.collection.generic.CanBuildFrom
-
-private object Enumeration {
-
- /* This map is used to cache enumeration instances for
- resolving enumeration _values_ to equal objects (by-reference)
- when values are deserialized. */
- private val emap: Map[Class[_], Enumeration] = new HashMap
-
-}
+import scala.collection.{ mutable, immutable, generic }
+import java.lang.reflect.{ Modifier, Method => JMethod, Field => JField }
/** <p>
* Defines a finite set of values specific to the enumeration. Typically
@@ -52,7 +40,7 @@ private object Enumeration {
*
* <b>def</b> isWorkingDay(d: WeekDay) = ! (d == Sat || d == Sun)
*
- * WeekDay.iterator filter isWorkingDay foreach println
+ * WeekDay.values filter isWorkingDay foreach println
* }</pre>
*
* @param initial The initial value from which to count the integers that
@@ -64,48 +52,23 @@ private object Enumeration {
*/
@serializable
@SerialVersionUID(8476000850333817230L)
-abstract class Enumeration(initial: Int, names: String*) { thisenum =>
+abstract class Enumeration(initial: Int, names: String*) {
+ thisenum =>
def this() = this(0, null)
def this(names: String*) = this(0, names: _*)
- Enumeration.synchronized {
- Enumeration.emap.get(getClass) match {
- case None =>
- Enumeration.emap += (getClass -> this)
- case Some(_) =>
- /* do nothing */
- }
- }
-
/* Note that `readResolve` cannot be private, since otherwise
the JVM does not invoke it when deserializing subclasses. */
- protected def readResolve(): AnyRef = Enumeration.synchronized {
- Enumeration.emap.get(getClass) match {
- case None =>
- Enumeration.emap += (getClass -> this)
- this
- case Some(existing) =>
- existing
- }
- }
+ protected def readResolve(): AnyRef = thisenum.getClass.getField("MODULE$").get()
/** The name of this enumeration.
*/
- override def toString = {
- val name = this.getClass.getName
- var string =
- if (name endsWith "$") name.substring(0, name.length - 1) else name
- val idx1 = string.lastIndexOf('.' : Int)
- if (idx1 != -1) string = string.substring(idx1 + 1)
- val idx2 = string.indexOf('$')
- if (idx2 != -1) string = string.substring(idx2 + 1)
- string
- }
+ override def toString = (getClass.getName stripSuffix "$" split '.' last) split '$' last
/** The mapping from the integer used to identify values to the actual
* values. */
- private val vmap: Map[Int, Value] = new HashMap
+ private val vmap: mutable.Map[Int, Value] = new mutable.HashMap
/** The cache listing all values of this enumeration. */
@transient private var vset: ValueSet = null
@@ -113,13 +76,13 @@ abstract class Enumeration(initial: Int, names: String*) { thisenum =>
/** The mapping from the integer used to identify values to their
* names. */
- private val nmap: Map[Int, String] = new HashMap
+ private val nmap: mutable.Map[Int, String] = new mutable.HashMap
/** The values of this enumeration as a set.
*/
def values: ValueSet = {
if (!vsetDefined) {
- vset = new ValueSet(BitSet.empty ++ (vmap.values map (_.id)))
+ vset = new ValueSet(immutable.BitSet.empty ++ (vmap.values map (_.id)))
vsetDefined = true
}
vset
@@ -130,6 +93,8 @@ abstract class Enumeration(initial: Int, names: String*) { thisenum =>
/** The string to use to name the next created value. */
protected var nextName = names.iterator
+ private def nextNameOrElse(orElse: => String) =
+ if (nextName.hasNext) nextName.next else orElse
/** The highest integer amongst those used to identify values in this
* enumeration. */
@@ -171,8 +136,7 @@ abstract class Enumeration(initial: Int, names: String*) { thisenum =>
* unique amongst all values of the enumeration.
* @return ..
*/
- protected final def Value(i: Int): Value =
- Value(i, if (nextName.hasNext) nextName.next else null)
+ protected final def Value(i: Int): Value = Value(i, nextNameOrElse(null))
/** Creates a fresh value, part of this enumeration, called <code>name</code>.
*
@@ -190,32 +154,27 @@ abstract class Enumeration(initial: Int, names: String*) { thisenum =>
*/
protected final def Value(i: Int, name: String): Value = new Val(i, name)
+ private def populateNameMap() {
+ // The list of possible Value methods: 0-args which return a conforming type
+ val methods = getClass.getMethods filter (m => m.getParameterTypes.isEmpty && classOf[Value].isAssignableFrom(m.getReturnType))
+
+ methods foreach { m =>
+ val name = m.getName
+ // invoke method to obtain actual `Value` instance
+ val value = m.invoke(this).asInstanceOf[Value]
+ // verify that outer points to the correct Enumeration: ticket #3616.
+ if (value.outerEnum eq thisenum) {
+ val id = Int.unbox(classOf[Val] getMethod "id" invoke value)
+ nmap += ((id, name))
+ }
+ }
+ }
+
/* Obtains the name for the value with id `i`. If no name is cached
* in `nmap`, it populates `nmap` using reflection.
*/
private def nameOf(i: Int): String = synchronized {
- def isValDef(m: java.lang.reflect.Method) =
- getClass.getDeclaredFields.exists(fd => fd.getName == m.getName &&
- fd.getType == m.getReturnType)
- nmap.get(i) match {
- case Some(name) => name
- case None =>
- val methods = getClass.getMethods
- for (m <- methods
- if (classOf[Value].isAssignableFrom(m.getReturnType) &&
- !java.lang.reflect.Modifier.isFinal(m.getModifiers) &&
- m.getParameterTypes.isEmpty &&
- isValDef(m))) {
- val name = m.getName
- // invoke method to obtain actual `Value` instance
- val value = m.invoke(this)
- // invoke `id` method
- val idMeth = classOf[Val].getMethod("id")
- val id: Int = idMeth.invoke(value).asInstanceOf[java.lang.Integer].intValue()
- nmap += (id -> name)
- }
- nmap(i)
- }
+ nmap.getOrElse(i, { populateNameMap() ; nmap(i) })
}
/** The type of the enumerated values. */
@@ -224,12 +183,14 @@ abstract class Enumeration(initial: Int, names: String*) { thisenum =>
abstract class Value extends Ordered[Value] {
/** the id and bit location of this enumeration value */
def id: Int
+ /** a marker so we can tell whose values belong to whom come reflective-naming time */
+ private[Enumeration] val outerEnum = thisenum
+
override def compare(that: Value): Int = this.id - that.id
- override def equals(other: Any): Boolean =
- other match {
- case that: thisenum.Value => compare(that) == 0
- case _ => false
- }
+ override def equals(other: Any) = other match {
+ case that: Enumeration#Value => (outerEnum eq that.outerEnum) && (id == that.id)
+ case _ => false
+ }
override def hashCode: Int = id.##
/** this enumeration value as an <code>Int</code> bit mask.
@@ -258,29 +219,25 @@ abstract class Enumeration(initial: Int, names: String*) { thisenum =>
@serializable
@SerialVersionUID(0 - 3501153230598116017L)
protected class Val(i: Int, name: String) extends Value {
- def this(i: Int) =
- this(i, if (nextName.hasNext) nextName.next else i.toString())
- def this(name: String) = this(nextId, name)
- def this() =
- this(nextId, if (nextName.hasNext) nextName.next else nextId.toString())
- assert(!vmap.isDefinedAt(i))
+ def this(i: Int) = this(i, nextNameOrElse(i.toString))
+ def this(name: String) = this(nextId, name)
+ def this() = this(nextId)
+
+ assert(!vmap.isDefinedAt(i), "Duplicate id: " + i)
vmap(i) = this
vsetDefined = false
nextId = i + 1
if (nextId > topId) topId = nextId
def id = i
override def toString() =
- if (name eq null) Enumeration.this.nameOf(i)
- else name
+ if (name != null) name
+ else try thisenum.nameOf(i)
+ catch { case _: NoSuchElementException => "<Invalid enum: no field for #" + i + ">" }
+
protected def readResolve(): AnyRef = {
- val enum = Enumeration.synchronized {
- Enumeration.emap.get(Enumeration.this.getClass) match {
- case None => Enumeration.this
- case Some(existing) => existing
- }
- }
- if (enum.vmap ne null) enum.vmap(i)
- else this
+ val enum = thisenum.readResolve().asInstanceOf[Enumeration]
+ if (enum.vmap == null) this
+ else enum.vmap(i)
}
}
@@ -288,21 +245,24 @@ abstract class Enumeration(initial: Int, names: String*) { thisenum =>
* Iterating through this set will yield values in increasing order of their ids.
* @param ids The set of ids of values, organized as a BitSet.
*/
- class ValueSet private[Enumeration] (val ids: BitSet) extends Set[Value] with SetLike[Value, ValueSet] {
+ class ValueSet private[Enumeration] (val ids: immutable.BitSet) extends Set[Value] with SetLike[Value, ValueSet] {
override def empty = ValueSet.empty
def contains(v: Value) = ids contains (v.id)
def + (value: Value) = new ValueSet(ids + value.id)
def - (value: Value) = new ValueSet(ids - value.id)
- def iterator = ids.iterator map Enumeration.this.apply
- override def stringPrefix = Enumeration.this + ".ValueSet"
+ def iterator = ids.iterator map thisenum.apply
+ override def stringPrefix = thisenum + ".ValueSet"
}
/** A factory object for value sets */
object ValueSet {
+ import mutable.{ Builder, AddingBuilder }
+ import generic.CanBuildFrom
+
/** The empty value set */
- val empty = new ValueSet(BitSet.empty)
+ val empty = new ValueSet(immutable.BitSet.empty)
/** A value set consisting of given elements */
- def apply(elems: Value*): ValueSet = elems.foldLeft(empty)(_ + _)
+ def apply(elems: Value*): ValueSet = empty ++ elems
/** A builder object for value sets */
def newBuilder: Builder[Value, ValueSet] = new AddingBuilder(empty)
/** The implicit builder for value sets */
diff --git a/src/library/scala/Option.scala b/src/library/scala/Option.scala
index 8992024353..c3cfb14b73 100644
--- a/src/library/scala/Option.scala
+++ b/src/library/scala/Option.scala
@@ -6,12 +6,9 @@
** |/ **
\* */
-
-
package scala
-object Option
-{
+object Option {
/** An implicit conversion that converts an option to an iterable value
*/
implicit def option2Iterable[A](xo: Option[A]): Iterable[A] = xo.toList
@@ -23,6 +20,11 @@ object Option
* @return Some(value) if value != null, None if value == null
*/
def apply[A](x: A): Option[A] = if (x == null) None else Some(x)
+
+ /** An Option factory which returns None in a manner consistent with
+ * the collections hierarchy.
+ */
+ def empty[A] : Option[A] = None
}
/** This class represents optional values. Instances of <code>Option</code>
diff --git a/src/parallel-collections/scala/collection/Parallel.scala b/src/library/scala/collection/Parallel.scala
index e500817745..e500817745 100644
--- a/src/parallel-collections/scala/collection/Parallel.scala
+++ b/src/library/scala/collection/Parallel.scala
diff --git a/src/library/scala/collection/Parallelizable.scala b/src/library/scala/collection/Parallelizable.scala
new file mode 100644
index 0000000000..405c005c55
--- /dev/null
+++ b/src/library/scala/collection/Parallelizable.scala
@@ -0,0 +1,38 @@
+package scala.collection
+
+
+
+import parallel.ParallelIterableLike
+
+
+
+/** This trait describes collections which can be turned into parallel collections
+ * by invoking the method `par`. Parallelizable collections may be parametrized with
+ * a target type different than their own.
+ */
+trait Parallelizable[+ParRepr <: Parallel] {
+
+ /** Returns a parallel implementation of a collection.
+ */
+ def par: ParRepr
+
+}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/src/parallel-collections/scala/collection/Sequentializable.scala b/src/library/scala/collection/Sequentializable.scala
index 61fb24571a..61fb24571a 100644
--- a/src/parallel-collections/scala/collection/Sequentializable.scala
+++ b/src/library/scala/collection/Sequentializable.scala
diff --git a/src/library/scala/collection/SetLike.scala b/src/library/scala/collection/SetLike.scala
index 2e9a1ec2a2..fbbd77d8aa 100644
--- a/src/library/scala/collection/SetLike.scala
+++ b/src/library/scala/collection/SetLike.scala
@@ -11,7 +11,6 @@ package scala.collection
import generic._
import mutable.{Builder, AddingBuilder}
-import PartialFunction._
/** A template trait for sets.
*
diff --git a/src/library/scala/collection/TraversableLike.scala b/src/library/scala/collection/TraversableLike.scala
index 2169dcdd02..fd5d8ba72c 100644
--- a/src/library/scala/collection/TraversableLike.scala
+++ b/src/library/scala/collection/TraversableLike.scala
@@ -330,18 +330,17 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
* for which `f(x)` equals `k`.
*
*/
- def groupBy[K](f: A => K): immutable.Map[K, Repr] = {
- val m = mutable.Map.empty[K, Builder[A, Repr]]
+ def groupBy[K](f: A => K): Map[K, Repr] = {
+ var m = Map[K, Builder[A, Repr]]()
for (elem <- this) {
val key = f(elem)
- val bldr = m.getOrElseUpdate(key, newBuilder)
+ val bldr = m get key match {
+ case None => val b = newBuilder; m = m updated (key, b); b
+ case Some(b) => b
+ }
bldr += elem
}
- val b = immutable.Map.newBuilder[K, Repr]
- for ((k, v) <- m)
- b += ((k, v.result))
-
- b.result
+ m map { case (k, b) => (k, b.result) }
}
/** Tests whether a predicate holds for all elements of this $coll.
@@ -698,7 +697,7 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
def toTraversable: Traversable[A] = thisCollection
def toIterator: Iterator[A] = toStream.iterator
- def toStream: Stream[A] = Stream.empty[A] ++ thisCollection
+ def toStream: Stream[A] = toBuffer.toStream
/** Converts this $coll to a string.
* @return a string representation of this collection. By default this
diff --git a/src/library/scala/collection/TraversableProxyLike.scala b/src/library/scala/collection/TraversableProxyLike.scala
index f2d91ded0c..05c4c44f12 100644
--- a/src/library/scala/collection/TraversableProxyLike.scala
+++ b/src/library/scala/collection/TraversableProxyLike.scala
@@ -37,7 +37,7 @@ trait TraversableProxyLike[+A, +Repr <: TraversableLike[A, Repr] with Traversabl
override def filterNot(p: A => Boolean): Repr = self.filterNot(p)
override def collect[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[Repr, B, That]): That = self.collect(pf)(bf)
override def partition(p: A => Boolean): (Repr, Repr) = self.partition(p)
- override def groupBy[K](f: A => K): immutable.Map[K, Repr] = self.groupBy(f)
+ override def groupBy[K](f: A => K): Map[K, Repr] = self.groupBy(f)
override def forall(p: A => Boolean): Boolean = self.forall(p)
override def exists(p: A => Boolean): Boolean = self.exists(p)
override def count(p: A => Boolean): Int = self.count(p)
diff --git a/src/library/scala/collection/TraversableViewLike.scala b/src/library/scala/collection/TraversableViewLike.scala
index 9b5be82dd6..7b443e54b8 100644
--- a/src/library/scala/collection/TraversableViewLike.scala
+++ b/src/library/scala/collection/TraversableViewLike.scala
@@ -216,7 +216,7 @@ self =>
override def scanRight[B, That](z: B)(op: (A, B) => B)(implicit bf: CanBuildFrom[This, B, That]): That =
newForced(thisSeq.scanRight(z)(op)).asInstanceOf[That]
- override def groupBy[K](f: A => K): immutable.Map[K, This] =
+ override def groupBy[K](f: A => K): Map[K, This] =
thisSeq.groupBy(f).mapValues(xs => newForced(xs).asInstanceOf[This])
override def stringPrefix = "TraversableView"
diff --git a/src/parallel-collections/scala/collection/generic/CanBuildFromParallel.scala b/src/library/scala/collection/generic/CanCombineFrom.scala
index 404201b1c2..b56dab9794 100644
--- a/src/parallel-collections/scala/collection/generic/CanBuildFromParallel.scala
+++ b/src/library/scala/collection/generic/CanCombineFrom.scala
@@ -1,13 +1,8 @@
package scala.collection
package generic
-
-
import scala.collection.parallel._
-
-
-
/**
* A base trait for parallel builder factories.
*
@@ -15,7 +10,7 @@ import scala.collection.parallel._
* @tparam Elem the element type of the collection to be created
* @tparam To the type of the collection to be created
*/
-trait CanBuildFromParallel[-From, -Elem, +To] extends CanBuildFrom[From, Elem, To] with Parallel {
+trait CanCombineFrom[-From, -Elem, +To] extends CanBuildFrom[From, Elem, To] with Parallel {
def apply(from: From): Combiner[Elem, To]
def apply(): Combiner[Elem, To]
}
diff --git a/src/library/scala/collection/generic/GenericParallelCompanion.scala b/src/library/scala/collection/generic/GenericParallelCompanion.scala
new file mode 100644
index 0000000000..e5ba36f846
--- /dev/null
+++ b/src/library/scala/collection/generic/GenericParallelCompanion.scala
@@ -0,0 +1,29 @@
+package scala.collection.generic
+
+
+import scala.collection.parallel.Combiner
+import scala.collection.parallel.ParallelIterable
+import scala.collection.parallel.ParallelMap
+
+
+
+/** A template class for companion objects of parallel collection classes.
+ * They should be mixed in together with `GenericCompanion` type.
+ * @tparam CC the type constructor representing the collection class
+ * @since 2.8
+ */
+trait GenericParallelCompanion[+CC[X] <: ParallelIterable[X]] {
+ /** The default builder for $Coll objects.
+ */
+ def newBuilder[A]: Combiner[A, CC[A]]
+
+ /** The parallel builder for $Coll objects.
+ */
+ def newCombiner[A]: Combiner[A, CC[A]]
+}
+
+trait GenericParallelMapCompanion[+CC[P, Q] <: ParallelMap[P, Q]] {
+ def newCombiner[P, Q]: Combiner[(P, Q), CC[P, Q]]
+}
+
+
diff --git a/src/parallel-collections/scala/collection/generic/GenericParallelTemplate.scala b/src/library/scala/collection/generic/GenericParallelTemplate.scala
index 58454be04e..e98c13fa36 100644
--- a/src/parallel-collections/scala/collection/generic/GenericParallelTemplate.scala
+++ b/src/library/scala/collection/generic/GenericParallelTemplate.scala
@@ -4,6 +4,7 @@ package scala.collection.generic
import scala.collection.parallel.Combiner
import scala.collection.parallel.ParallelIterable
+import scala.collection.parallel.ParallelMap
import scala.collection.parallel.TaskSupport
@@ -47,7 +48,17 @@ extends GenericTraversableTemplate[A, CC]
}
+trait GenericParallelMapTemplate[K, +V, +CC[X, Y] <: ParallelMap[X, Y]]
+extends TaskSupport
+{
+ def mapCompanion: GenericParallelMapCompanion[CC]
+ def genericMapCombiner[P, Q]: Combiner[(P, Q), CC[P, Q]] = {
+ val cb = mapCompanion.newCombiner[P, Q]
+ cb.environment = environment
+ cb
+ }
+}
diff --git a/src/parallel-collections/scala/collection/generic/HasNewCombiner.scala b/src/library/scala/collection/generic/HasNewCombiner.scala
index 2c24b437d8..2c24b437d8 100644
--- a/src/parallel-collections/scala/collection/generic/HasNewCombiner.scala
+++ b/src/library/scala/collection/generic/HasNewCombiner.scala
diff --git a/src/parallel-collections/scala/collection/generic/ParallelFactory.scala b/src/library/scala/collection/generic/ParallelFactory.scala
index 86a5fdf822..fd33631640 100644
--- a/src/parallel-collections/scala/collection/generic/ParallelFactory.scala
+++ b/src/library/scala/collection/generic/ParallelFactory.scala
@@ -19,11 +19,11 @@ extends TraversableFactory[CC]
type EPC[T, C] = collection.parallel.EnvironmentPassingCombiner[T, C]
/**
- * A generic implementation of the `CanBuildFromParallel` trait, which forwards all calls to
+ * A generic implementation of the `CanCombineFrom` trait, which forwards all calls to
* `apply(from)` to the `genericParallelBuilder` method of the $coll `from`, and calls to `apply()`
* to this factory.
*/
- class GenericCanBuildFromParallel[A] extends GenericCanBuildFrom[A] with CanBuildFromParallel[CC[_], A, CC[A]] {
+ class GenericCanCombineFrom[A] extends GenericCanBuildFrom[A] with CanCombineFrom[CC[_], A, CC[A]] {
override def apply(from: Coll) = from.genericCombiner
override def apply() = newBuilder[A]
}
diff --git a/src/parallel-collections/scala/collection/generic/ParallelMapFactory.scala b/src/library/scala/collection/generic/ParallelMapFactory.scala
index ceda9d1155..8f779b4029 100644
--- a/src/parallel-collections/scala/collection/generic/ParallelMapFactory.scala
+++ b/src/library/scala/collection/generic/ParallelMapFactory.scala
@@ -17,7 +17,10 @@ import scala.collection.mutable.Builder
* @define $Coll ParallelMap
*/
abstract class ParallelMapFactory[CC[X, Y] <: ParallelMap[X, Y] with ParallelMapLike[X, Y, CC[X, Y], _]]
-extends MapFactory[CC] {
+extends MapFactory[CC]
+ with GenericParallelMapCompanion[CC] {
+
+ type MapColl = CC[_, _]
/** The default builder for $Coll objects.
* @tparam K the type of the keys
@@ -29,10 +32,10 @@ extends MapFactory[CC] {
* @tparam K the type of the keys
* @tparam V the type of the associated values
*/
- def newCombiner[K, V]: Combiner[(K, V), CC[K, V]] = null // TODO
+ def newCombiner[K, V]: Combiner[(K, V), CC[K, V]]
- class ParallelMapCanBuildFrom[K, V] extends CanBuildFromParallel[CC[_, _], (K, V), CC[K, V]] {
- def apply(from: CC[_, _]) = newCombiner[K, V]
+ class CanCombineFromMap[K, V] extends CanCombineFrom[CC[_, _], (K, V), CC[K, V]] {
+ def apply(from: MapColl) = from.genericMapCombiner[K, V].asInstanceOf[Combiner[(K, V), CC[K, V]]]
def apply() = newCombiner[K, V]
}
diff --git a/src/parallel-collections/scala/collection/generic/Signalling.scala b/src/library/scala/collection/generic/Signalling.scala
index 1dac4297b7..1dac4297b7 100644
--- a/src/parallel-collections/scala/collection/generic/Signalling.scala
+++ b/src/library/scala/collection/generic/Signalling.scala
diff --git a/src/parallel-collections/scala/collection/generic/Sizing.scala b/src/library/scala/collection/generic/Sizing.scala
index bf801302ae..bf801302ae 100644
--- a/src/parallel-collections/scala/collection/generic/Sizing.scala
+++ b/src/library/scala/collection/generic/Sizing.scala
diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala
index 11292bdf0c..f40905428e 100644
--- a/src/library/scala/collection/immutable/HashMap.scala
+++ b/src/library/scala/collection/immutable/HashMap.scala
@@ -14,6 +14,10 @@ package immutable
import generic._
import annotation.unchecked.uncheckedVariance
+
+import parallel.immutable.ParallelHashTrie
+
+
/** This class implements immutable maps using a hash trie.
*
* '''Note:''' the builder of a hash map returns specialized representations EmptyMap,Map1,..., Map4
@@ -32,7 +36,7 @@ import annotation.unchecked.uncheckedVariance
* @define willNotTerminateInf
*/
@serializable @SerialVersionUID(2L)
-class HashMap[A, +B] extends Map[A,B] with MapLike[A, B, HashMap[A, B]] {
+class HashMap[A, +B] extends Map[A,B] with MapLike[A, B, HashMap[A, B]] with Parallelizable[ParallelHashTrie[A, B]] {
override def size: Int = 0
@@ -71,7 +75,7 @@ class HashMap[A, +B] extends Map[A,B] with MapLike[A, B, HashMap[A, B]] {
protected def get0(key: A, hash: Int, level: Int): Option[B] = None
- protected def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1)): HashMap[A, B1] =
+ def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1)): HashMap[A, B1] =
new HashMap.HashMap1(key, hash, value, kv)
protected def removed0(key: A, hash: Int, level: Int): HashMap[A, B] = this
@@ -80,9 +84,11 @@ class HashMap[A, +B] extends Map[A,B] with MapLike[A, B, HashMap[A, B]] {
def split: Seq[HashMap[A, B]] = Seq(this)
- def combine[B1 >: B](that: HashMap[A, B1]): HashMap[A, B1] = combine0(that, 0)
+ def merge[B1 >: B](that: HashMap[A, B1]): HashMap[A, B1] = merge0(that, 0)
- protected def combine0[B1 >: B](that: HashMap[A, B1], level: Int): HashMap[A, B1] = that
+ protected def merge0[B1 >: B](that: HashMap[A, B1], level: Int): HashMap[A, B1] = that
+
+ def par = ParallelHashTrie.fromTrie(this)
}
@@ -105,10 +111,9 @@ object HashMap extends ImmutableMapFactory[HashMap] {
// TODO: add HashMap2, HashMap3, ...
// statistics - will remove in future
- var dives = 0
- var colls = 0
- var two_colls = 0
- var two_nocolls = 0
+ var bothsingle = 0
+ var bothtries = 0
+ var onetrie = 0
class HashMap1[A,+B](private[HashMap] var key: A, private[HashMap] var hash: Int, private[HashMap] var value: (B @uncheckedVariance), private[HashMap] var kv: (A,B @uncheckedVariance)) extends HashMap[A,B] {
@@ -171,7 +176,11 @@ object HashMap extends ImmutableMapFactory[HashMap] {
override def iterator: Iterator[(A,B)] = Iterator(ensurePair)
override def foreach[U](f: ((A, B)) => U): Unit = f(ensurePair)
private[HashMap] def ensurePair: (A,B) = if (kv ne null) kv else { kv = (key, value); kv }
- protected override def combine0[B1 >: B](that: HashMap[A, B1], level: Int): HashMap[A, B1] = that.updated0(key, hash, level, value, kv)
+ protected override def merge0[B1 >: B](that: HashMap[A, B1], level: Int): HashMap[A, B1] = {
+ // if (that.isInstanceOf[HashMap1[_, _]]) bothsingle += 1
+ // else onetrie += 1
+ that.updated0(key, hash, level, value, kv)
+ }
}
private class HashMapCollision1[A,+B](private[HashMap] var hash: Int, var kvs: ListMap[A,B @uncheckedVariance]) extends HashMap[A,B] {
@@ -206,7 +215,7 @@ object HashMap extends ImmutableMapFactory[HashMap] {
def newhm(lm: ListMap[A, B @uncheckedVariance]) = new HashMapCollision1(hash, lm)
List(newhm(x), newhm(y))
}
- protected override def combine0[B1 >: B](that: HashMap[A, B1], level: Int): HashMap[A, B1] = {
+ protected override def merge0[B1 >: B](that: HashMap[A, B1], level: Int): HashMap[A, B1] = {
// this can be made more efficient by passing the entire ListMap at once
var m = that
for (p <- kvs) m = m.updated0(p._1, this.hash, level, p._2, p)
@@ -266,8 +275,7 @@ object HashMap extends ImmutableMapFactory[HashMap] {
Array.copy(elems, 0, elemsNew, 0, offset)
elemsNew(offset) = new HashMap1(key, hash, value, kv)
Array.copy(elems, offset, elemsNew, offset + 1, elems.length - offset)
- val bitmapNew = bitmap | mask
- new HashTrieMap(bitmapNew, elemsNew, size + 1)
+ new HashTrieMap(bitmap | mask, elemsNew, size + 1)
}
}
@@ -427,34 +435,36 @@ time { mNew.iterator.foreach( p => ()) }
i
}
- override def split: Seq[HashMap[A, B]] = {
- // printBitmap(bitmap)
- // println(elems.toList)
-
- // println("subtrees: " + Integer.bitCount(bitmap))
- // println("will split at: " + posOf(Integer.bitCount(bitmap) / 2, bitmap))
- val splitpoint = posOf(Integer.bitCount(bitmap) / 2, bitmap)
- val bm1 = bitmap & (-1 << splitpoint)
- val bm2 = bitmap & (-1 >>> (32 - splitpoint))
- // printBitmap(bm1)
- // printBitmap(bm2)
- val (e1, e2) = elems.splitAt(splitpoint)
- // println(e1.toList)
- // println(e2.toList)
- val hm1 = new HashTrieMap(bm1, e1, e1.foldLeft(0)(_ + _.size))
- val hm2 = new HashTrieMap(bm2, e2, e2.foldLeft(0)(_ + _.size))
-
- List(hm1, hm2)
+ override def split: Seq[HashMap[A, B]] = if (size == 1) Seq(this) else {
+ val nodesize = Integer.bitCount(bitmap)
+ if (nodesize > 1) {
+ // printBitmap(bitmap)
+ // println(elems.toList)
+
+ // println("subtrees: " + nodesize)
+ // println("will split at: " + (nodesize / 2))
+ val splitpoint = nodesize / 2
+ val bitsplitpoint = posOf(nodesize / 2, bitmap)
+ val bm1 = bitmap & (-1 << bitsplitpoint)
+ val bm2 = bitmap & (-1 >>> (32 - bitsplitpoint))
+ // printBitmap(bm1)
+ // printBitmap(bm2)
+ val (e1, e2) = elems.splitAt(splitpoint)
+ // println(e1.toList)
+ // println(e2.toList)
+ val hm1 = new HashTrieMap(bm1, e1, e1.foldLeft(0)(_ + _.size))
+ val hm2 = new HashTrieMap(bm2, e2, e2.foldLeft(0)(_ + _.size))
+
+ List(hm1, hm2)
+ } else elems(0).split
}
- protected override def combine0[B1 >: B](that: HashMap[A, B1], level: Int): HashMap[A, B1] = that match {
+ protected override def merge0[B1 >: B](that: HashMap[A, B1], level: Int): HashMap[A, B1] = that match {
case hm: HashMap1[_, _] =>
+ // onetrie += 1
this.updated0(hm.key, hm.hash, level, hm.value.asInstanceOf[B1], hm.kv)
- case hm: HashMapCollision1[_, _] =>
- var m: HashMap[A, B1] = this
- for (p <- that) m = m.updated0(p._1, computeHash(p._1), level, p._2, p)
- m
case hm: HashTrieMap[_, _] =>
+ // bothtries += 1
val that = hm.asInstanceOf[HashTrieMap[A, B1]]
val thiselems = this.elems
val thatelems = that.elems
@@ -465,7 +475,7 @@ time { mNew.iterator.foreach( p => ()) }
val subcount = Integer.bitCount(thisbm | thatbm)
// construct a new array of appropriate size
- val combined = new Array[HashMap[A, B1 @uncheckedVariance]](subcount)
+ val merged = new Array[HashMap[A, B1]](subcount)
// run through both bitmaps and add elements to it
var i = 0
@@ -482,9 +492,9 @@ time { mNew.iterator.foreach( p => ()) }
// }
if (thislsb == thatlsb) {
// println("a collision")
- val m = thiselems(thisi).combine0(thatelems(thati), level + 5)
+ val m = thiselems(thisi).merge0(thatelems(thati), level + 5)
totalelems += m.size
- combined(i) = m
+ merged(i) = m
thisbm = thisbm & ~thislsb
thatbm = thatbm & ~thatlsb
thati += 1
@@ -497,20 +507,20 @@ time { mNew.iterator.foreach( p => ()) }
// and compare a and b defined as below:
val a = thislsb - 1
val b = thatlsb - 1
- // ! our case indeed is more specific, but this didn't help:
+ // ! our case indeed is more specific, but this didn't help:
// if ((thislsb > 0 && thislsb < thatlsb) || thatlsb == 0 || (thatlsb < 0 && thislsb != 0)) {
if ((a < b) ^ (a < 0) ^ (b < 0)) {
// println("an element from this trie")
val m = thiselems(thisi)
totalelems += m.size
- combined(i) = m
+ merged(i) = m
thisbm = thisbm & ~thislsb
thisi += 1
} else {
// println("an element from that trie")
val m = thatelems(thati)
totalelems += m.size
- combined(i) = m
+ merged(i) = m
thatbm = thatbm & ~thatlsb
thati += 1
}
@@ -518,16 +528,8 @@ time { mNew.iterator.foreach( p => ()) }
i += 1
}
- val res = new HashTrieMap[A, B1](this.bitmap | that.bitmap, combined, totalelems)
- // if (!check(this, that, res)) { TODO remove
- // printBitmap(this.bitmap)
- // printBitmap(that.bitmap)
- // printBitmap(res.bitmap)
- // println(this.bitmap)
- // System.exit(1)
- // }
- res
- case empty: HashMap[_, _] => this
+ new HashTrieMap[A, B1](this.bitmap | that.bitmap, merged, totalelems)
+ case hm: HashMapCollision1[_, _] => that.merge0(this, level)
case _ => error("section supposed to be unreachable.")
}
diff --git a/src/library/scala/collection/immutable/IntMap.scala b/src/library/scala/collection/immutable/IntMap.scala
index ba5cd896ac..d4605d3e1f 100644
--- a/src/library/scala/collection/immutable/IntMap.scala
+++ b/src/library/scala/collection/immutable/IntMap.scala
@@ -11,6 +11,14 @@
package scala.collection
package immutable;
+
+
+import scala.collection.generic.CanBuildFrom
+import scala.collection.mutable.Builder
+import scala.collection.mutable.MapBuilder
+
+
+
/** Utility class for integer maps.
* @author David MacIver
*/
@@ -53,6 +61,12 @@ import IntMapUtils._
* @since 2.7
*/
object IntMap {
+ /** $mapCanBuildFromInfo */
+ implicit def canBuildFrom[A, B] = new CanBuildFrom[IntMap[A], (Int, B), IntMap[B]] {
+ def apply(from: IntMap[A]): Builder[(Int, B), IntMap[B]] = apply()
+ def apply(): Builder[(Int, B), IntMap[B]] = new MapBuilder[Int, B, IntMap[B]](empty[B])
+ }
+
def empty[T] : IntMap[T] = IntMap.Nil;
def singleton[T](key : Int, value : T) : IntMap[T] = IntMap.Tip(key, value);
def apply[T](elems : (Int, T)*) : IntMap[T] =
@@ -147,7 +161,7 @@ import IntMap._
/** Specialised immutable map structure for integer keys, based on
* <a href="http://citeseer.ist.psu.edu/okasaki98fast.html">Fast Mergeable Integer Maps</a>
- * by Okasaki and Gill. Essentially a trie based on binary digits of the the integers.
+ * by Okasaki and Gill. Essentially a trie based on binary digits of the integers.
*
* Note: This class is as of 2.8 largely superseded by HashMap.
*
diff --git a/src/library/scala/collection/immutable/LongMap.scala b/src/library/scala/collection/immutable/LongMap.scala
index 691a81d9f0..dcdc6e948f 100644
--- a/src/library/scala/collection/immutable/LongMap.scala
+++ b/src/library/scala/collection/immutable/LongMap.scala
@@ -1,6 +1,23 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
package scala.collection
package immutable
+
+import scala.collection.generic.CanBuildFrom
+import scala.collection.mutable.Builder
+import scala.collection.mutable.MapBuilder
+
+
+
/** Utility class for long maps.
* @author David MacIver
*/
@@ -44,6 +61,12 @@ import LongMapUtils._
* @since 2.7
*/
object LongMap{
+ /** $mapCanBuildFromInfo */
+ implicit def canBuildFrom[A, B] = new CanBuildFrom[LongMap[A], (Long, B), LongMap[B]] {
+ def apply(from: LongMap[A]): Builder[(Long, B), LongMap[B]] = apply()
+ def apply(): Builder[(Long, B), LongMap[B]] = new MapBuilder[Long, B, LongMap[B]](empty[B])
+ }
+
def empty[T] : LongMap[T] = LongMap.Nil;
def singleton[T](key : Long, value : T) : LongMap[T] = LongMap.Tip(key, value);
def apply[T](elems : (Long, T)*) : LongMap[T] =
@@ -136,7 +159,7 @@ import LongMap._;
/**
* Specialised immutable map structure for long keys, based on
* <a href="http://citeseer.ist.psu.edu/okasaki98fast.html">Fast Mergeable Long Maps</a>
- * by Okasaki and Gill. Essentially a trie based on binary digits of the the integers.
+ * by Okasaki and Gill. Essentially a trie based on binary digits of the integers.
*
* Note: This class is as of 2.8 largely superseded by HashMap.
*
diff --git a/src/library/scala/collection/immutable/StringOps.scala b/src/library/scala/collection/immutable/StringOps.scala
index 8a27a4ad4b..0d8f5f6b83 100644
--- a/src/library/scala/collection/immutable/StringOps.scala
+++ b/src/library/scala/collection/immutable/StringOps.scala
@@ -36,8 +36,16 @@ final class StringOps(override val repr: String) extends StringLike[String] {
/** Creates a string builder buffer as builder for this class */
override protected[this] def newBuilder = new StringBuilder
- override def slice(from: Int, until: Int): String =
- repr.substring(from max 0, until min repr.length)
+ override def slice(from: Int, until: Int): String = {
+ /** Slice must be forgiving on all out of bounds indices and
+ * substring is not.
+ */
+ val start = from max 0
+ val end = until min repr.length
+
+ if (start >= end) ""
+ else repr.substring(start, end)
+ }
override def toString = repr
}
diff --git a/src/parallel-collections/scala/collection/immutable/package.scala b/src/library/scala/collection/immutable/package.scala
index 5ff9fa223d..5ff9fa223d 100644
--- a/src/parallel-collections/scala/collection/immutable/package.scala
+++ b/src/library/scala/collection/immutable/package.scala
diff --git a/src/library/scala/collection/mutable/ArrayBuffer.scala b/src/library/scala/collection/mutable/ArrayBuffer.scala
index 6412a21531..a59a0db2e1 100644
--- a/src/library/scala/collection/mutable/ArrayBuffer.scala
+++ b/src/library/scala/collection/mutable/ArrayBuffer.scala
@@ -12,6 +12,7 @@ package scala.collection
package mutable
import generic._
+import parallel.mutable.ParallelArray
/** An implementation of the `Buffer` class using an array to
* represent the assembled sequence internally. Append, update and random
@@ -46,7 +47,8 @@ class ArrayBuffer[A](override protected val initialSize: Int)
with BufferLike[A, ArrayBuffer[A]]
with IndexedSeqOptimized[A, ArrayBuffer[A]]
with Builder[A, ArrayBuffer[A]]
- with ResizableArray[A] {
+ with ResizableArray[A]
+ with Parallelizable[ParallelArray[A]] {
override def companion: GenericCompanion[ArrayBuffer] = ArrayBuffer
@@ -64,6 +66,8 @@ class ArrayBuffer[A](override protected val initialSize: Int)
}
}
+ def par = ParallelArray.handoff[A](array.asInstanceOf[Array[A]], size)
+
/** Appends a single element to this buffer and returns
* the identity of the buffer. It takes constant amortized time.
*
diff --git a/src/library/scala/collection/mutable/ArrayOps.scala b/src/library/scala/collection/mutable/ArrayOps.scala
index 00e8697b53..3cf6a642d2 100644
--- a/src/library/scala/collection/mutable/ArrayOps.scala
+++ b/src/library/scala/collection/mutable/ArrayOps.scala
@@ -14,6 +14,9 @@ import compat.Platform.arraycopy
import scala.reflect.ClassManifest
+import parallel.mutable.ParallelArray
+
+
/** This class serves as a wrapper for `Array`s with all the operations found in
* indexed sequences. Where needed, instances of arrays are implicitly converted
* into this class.
@@ -32,7 +35,7 @@ import scala.reflect.ClassManifest
* @define mayNotTerminateInf
* @define willNotTerminateInf
*/
-abstract class ArrayOps[T] extends ArrayLike[T, Array[T]] {
+abstract class ArrayOps[T] extends ArrayLike[T, Array[T]] with Parallelizable[ParallelArray[T]] {
private def rowBuilder[U]: Builder[U, Array[U]] =
Array.newBuilder(
@@ -52,6 +55,8 @@ abstract class ArrayOps[T] extends ArrayLike[T, Array[T]] {
else
super.toArray[U]
+ def par = ParallelArray.handoff(repr)
+
/** Flattens a two-dimensional array by concatenating all its rows
* into a single array.
*
diff --git a/src/library/scala/collection/mutable/BufferLike.scala b/src/library/scala/collection/mutable/BufferLike.scala
index 80a8824a3b..dd4b5f303f 100644
--- a/src/library/scala/collection/mutable/BufferLike.scala
+++ b/src/library/scala/collection/mutable/BufferLike.scala
@@ -64,16 +64,13 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
with Shrinkable[A]
with Scriptable[A]
with Subtractable[A, This]
- with Cloneable[This]
with SeqLike[A, This]
{ self : This =>
// Note this does not extend Addable because `+` is being phased out of
// all Seq-derived classes.
- import scala.collection.{Iterable, Traversable}
-
- // Abstract methods from IndexedSeq:
+ // Abstract methods from Seq:
def apply(n: Int): A
def update(n: Int, newelem: A)
@@ -99,7 +96,7 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
* @throws IndexOutofBoundsException if the index `n` is not in the valid range
* `0 <= n <= length`.
*/
- def insertAll(n: Int, elems: Traversable[A])
+ def insertAll(n: Int, elems: collection.Traversable[A])
/** Removes the element at a given index from this buffer.
*
diff --git a/src/library/scala/collection/mutable/BufferProxy.scala b/src/library/scala/collection/mutable/BufferProxy.scala
index 7adbb8ee3f..5d2e7fd86d 100644
--- a/src/library/scala/collection/mutable/BufferProxy.scala
+++ b/src/library/scala/collection/mutable/BufferProxy.scala
@@ -125,16 +125,20 @@ trait BufferProxy[A] extends Buffer[A] with Proxy {
* @param n the index where a new element will be inserted.
* @param iter the iterable object providing all elements to insert.
*/
- def insertAll(n: Int, iter: scala.collection.Iterable[A]): Unit = self.insertAll(n, iter)
+ def insertAll(n: Int, iter: scala.collection.Iterable[A]) {
+ self.insertAll(n, iter)
+ }
- override def insertAll(n: Int, iter: scala.collection.Traversable[A]): Unit = self.insertAll(n, iter)
+ override def insertAll(n: Int, iter: scala.collection.Traversable[A]) {
+ self.insertAll(n, iter)
+ }
/** Replace element at index `n` with the new element `newelem`.
*
* @param n the index of the element to replace.
* @param newelem the new element.
*/
- def update(n: Int, newelem: A): Unit = self.update(n, newelem)
+ def update(n: Int, newelem: A) { self.update(n, newelem) }
/** Removes the element on a given index position.
*
diff --git a/src/library/scala/collection/mutable/PriorityQueue.scala b/src/library/scala/collection/mutable/PriorityQueue.scala
index acdfc03597..b1ca5fde3c 100644
--- a/src/library/scala/collection/mutable/PriorityQueue.scala
+++ b/src/library/scala/collection/mutable/PriorityQueue.scala
@@ -37,7 +37,6 @@ class PriorityQueue[A](implicit ord: Ordering[A])
extends Seq[A]
with SeqLike[A, PriorityQueue[A]]
with Growable[A]
- with Cloneable[PriorityQueue[A]]
with Builder[A, PriorityQueue[A]]
{
import ord._
diff --git a/src/library/scala/collection/mutable/Seq.scala b/src/library/scala/collection/mutable/Seq.scala
index eff387353e..c318dd34cf 100644
--- a/src/library/scala/collection/mutable/Seq.scala
+++ b/src/library/scala/collection/mutable/Seq.scala
@@ -29,14 +29,6 @@ trait Seq[A] extends Iterable[A]
with GenericTraversableTemplate[A, Seq]
with SeqLike[A, Seq[A]] {
override def companion: GenericCompanion[Seq] = Seq
-
- /** Replaces element at given index with a new value.
- *
- * @param n the index of the element to replace.
- * @param lem the new value.
- * @throws IndexOutofBoundsException if the index is not valid.
- */
- def update(idx: Int, elem: A)
}
/** $factoryInfo
diff --git a/src/library/scala/collection/mutable/SeqLike.scala b/src/library/scala/collection/mutable/SeqLike.scala
new file mode 100644
index 0000000000..e16aa37fe2
--- /dev/null
+++ b/src/library/scala/collection/mutable/SeqLike.scala
@@ -0,0 +1,31 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package mutable
+
+import generic._
+
+/** A template trait for mutable sequences of type `mutable.Seq[A]`.
+ * @tparam A the type of the elements of the set
+ * @tparam This the type of the set itself.
+ *
+ */
+trait SeqLike[A, +This <: SeqLike[A, This] with Seq[A]]
+ extends scala.collection.SeqLike[A, This]
+ with Cloneable[This] {
+ self =>
+
+ /** Replaces element at given index with a new value.
+ *
+ * @param n the index of the element to replace.
+ * @param lem the new value.
+ * @throws IndexOutofBoundsException if the index is not valid.
+ */
+ def update(idx: Int, elem: A)
+}
diff --git a/src/library/scala/collection/mutable/Stack.scala b/src/library/scala/collection/mutable/Stack.scala
index e678f7fa5c..c791066398 100644
--- a/src/library/scala/collection/mutable/Stack.scala
+++ b/src/library/scala/collection/mutable/Stack.scala
@@ -62,7 +62,8 @@ class Stack[A] private (var elems: List[A]) extends scala.collection.Seq[A] with
* @param elems the element sequence.
* @return the stack with the new elements on top.
*/
- def push(elem1: A, elem2: A, elems: A*): this.type = this.push(elem1).push(elem2).pushAll(elems)
+ def push(elem1: A, elem2: A, elems: A*): this.type =
+ this.push(elem1).push(elem2).pushAll(elems)
/** Push all elements in the given traversable object onto
* the stack. The last element in the traversable object
@@ -134,5 +135,5 @@ class Stack[A] private (var elems: List[A]) extends scala.collection.Seq[A] with
// !!! TODO - integrate
object Stack {
- def apply[A](xs: A*): Stack[A] = new Stack[A] ++= xs
+ def apply[A](xs: A*): Stack[A] = new Stack[A] pushAll xs
}
diff --git a/src/library/scala/collection/parallel/Combiner.scala b/src/library/scala/collection/parallel/Combiner.scala
new file mode 100644
index 0000000000..a37f642d42
--- /dev/null
+++ b/src/library/scala/collection/parallel/Combiner.scala
@@ -0,0 +1,66 @@
+package scala.collection.parallel
+
+
+import scala.collection.Parallel
+import scala.collection.mutable.Builder
+import scala.collection.generic.Sizing
+
+
+
+/** The base trait for all combiners.
+ * A combiner lets one construct collections incrementally just like
+ * a regular builder, but also implements an efficient merge operation of two builders
+ * via `combine` method. Once the collection is constructed, it may be obtained by invoking
+ * the `result` method.
+ *
+ * @tparam Elem the type of the elements added to the builder
+ * @tparam To the type of the collection the builder produces
+ *
+ * @author prokopec
+ */
+trait Combiner[-Elem, +To] extends Builder[Elem, To] with Sizing with Parallel with TaskSupport {
+ self: EnvironmentPassingCombiner[Elem, To] =>
+
+ type EPC = EnvironmentPassingCombiner[Elem, To]
+
+ /** Combines the contents of the receiver builder and the `other` builder,
+ * producing a new builder containing both their elements.
+ *
+ * This method may combine the two builders by copying them into a larger collection,
+ * by producing a lazy view that gets evaluated once `result` is invoked, or use
+ * a merge operation specific to the data structure in question.
+ *
+ * Note that both the receiver builder and `other` builder become invalidated
+ * after the invocation of this method, and should be cleared (see `clear`)
+ * if they are to be used again.
+ *
+ * Also, combining two combiners `c1` and `c2` for which `c1 eq c2` is `true`, that is,
+ * they are the same objects in memories, always does nothing and returns the first combiner.
+ *
+ * @tparam N the type of elements contained by the `other` builder
+ * @tparam NewTo the type of collection produced by the `other` builder
+ * @param other the other builder
+ * @return the parallel builder containing both the elements of this and the `other` builder
+ */
+ def combine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]): Combiner[N, NewTo]
+
+}
+
+
+trait EnvironmentPassingCombiner[-Elem, +To] extends Combiner[Elem, To] {
+ abstract override def result = {
+ val res = super.result
+// res.environment = environment
+ res
+ }
+}
+
+
+
+
+
+
+
+
+
+
diff --git a/src/parallel-collections/scala/collection/parallel/Combiners.scala b/src/library/scala/collection/parallel/Combiners.scala
index 80966f3435..a37f642d42 100644
--- a/src/parallel-collections/scala/collection/parallel/Combiners.scala
+++ b/src/library/scala/collection/parallel/Combiners.scala
@@ -34,6 +34,9 @@ trait Combiner[-Elem, +To] extends Builder[Elem, To] with Sizing with Parallel w
* after the invocation of this method, and should be cleared (see `clear`)
* if they are to be used again.
*
+ * Also, combining two combiners `c1` and `c2` for which `c1 eq c2` is `true`, that is,
+ * they are the same objects in memories, always does nothing and returns the first combiner.
+ *
* @tparam N the type of elements contained by the `other` builder
* @tparam NewTo the type of collection produced by the `other` builder
* @param other the other builder
diff --git a/src/parallel-collections/scala/collection/parallel/Iterators.scala b/src/library/scala/collection/parallel/Iterators.scala
index 30aca2965b..bfebff994c 100644
--- a/src/parallel-collections/scala/collection/parallel/Iterators.scala
+++ b/src/library/scala/collection/parallel/Iterators.scala
@@ -5,7 +5,7 @@ package scala.collection.parallel
import scala.collection.Parallel
import scala.collection.generic.Signalling
import scala.collection.generic.DelegatedSignalling
-import scala.collection.generic.CanBuildFromParallel
+import scala.collection.generic.CanCombineFrom
import scala.collection.mutable.Builder
import scala.collection.Iterator.empty
@@ -93,14 +93,14 @@ trait AugmentedIterableIterator[+T, +Repr <: Parallel] extends RemainsIterator[T
/* transformers to combiners */
- def map2combiner[S, That](f: T => S, pbf: CanBuildFromParallel[Repr, S, That]): Combiner[S, That] = {
- val cb = pbf(repr)
+ def map2combiner[S, That](f: T => S, cb: Combiner[S, That]): Combiner[S, That] = {
+ //val cb = pbf(repr)
cb.sizeHint(remaining)
while (hasNext) cb += f(next)
cb
}
- def collect2combiner[S, That](pf: PartialFunction[T, S], pbf: CanBuildFromParallel[Repr, S, That]): Combiner[S, That] = {
+ def collect2combiner[S, That](pf: PartialFunction[T, S], pbf: CanCombineFrom[Repr, S, That]): Combiner[S, That] = {
val cb = pbf(repr)
while (hasNext) {
val curr = next
@@ -109,7 +109,7 @@ trait AugmentedIterableIterator[+T, +Repr <: Parallel] extends RemainsIterator[T
cb
}
- def flatmap2combiner[S, That](f: T => Traversable[S], pbf: CanBuildFromParallel[Repr, S, That]): Combiner[S, That] = {
+ def flatmap2combiner[S, That](f: T => Traversable[S], pbf: CanCombineFrom[Repr, S, That]): Combiner[S, That] = {
val cb = pbf(repr)
while (hasNext) {
val traversable = f(next)
@@ -276,7 +276,7 @@ trait AugmentedSeqIterator[+T, +Repr <: Parallel] extends AugmentedIterableItera
cb
}
- def reverseMap2combiner[S, That](f: T => S, cbf: CanBuildFromParallel[Repr, S, That]): Combiner[S, That] = {
+ def reverseMap2combiner[S, That](f: T => S, cbf: CanCombineFrom[Repr, S, That]): Combiner[S, That] = {
val cb = cbf(repr)
cb.sizeHint(remaining)
var lst = List[S]()
@@ -288,7 +288,7 @@ trait AugmentedSeqIterator[+T, +Repr <: Parallel] extends AugmentedIterableItera
cb
}
- def updated2combiner[U >: T, That](index: Int, elem: U, cbf: CanBuildFromParallel[Repr, U, That]): Combiner[U, That] = {
+ def updated2combiner[U >: T, That](index: Int, elem: U, cbf: CanCombineFrom[Repr, U, That]): Combiner[U, That] = {
val cb = cbf(repr)
cb.sizeHint(remaining)
var j = 0
diff --git a/src/parallel-collections/scala/collection/parallel/ParallelIterable.scala b/src/library/scala/collection/parallel/ParallelIterable.scala
index 83cb37f9c8..4882dc19ee 100644
--- a/src/parallel-collections/scala/collection/parallel/ParallelIterable.scala
+++ b/src/library/scala/collection/parallel/ParallelIterable.scala
@@ -26,8 +26,8 @@ trait ParallelIterable[+T] extends Iterable[T]
/** $factoryinfo
*/
object ParallelIterable extends ParallelFactory[ParallelIterable] {
- implicit def canBuildFrom[T]: CanBuildFromParallel[Coll, T, ParallelIterable[T]] =
- new GenericCanBuildFromParallel[T]
+ implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParallelIterable[T]] =
+ new GenericCanCombineFrom[T]
def newBuilder[T]: Combiner[T, ParallelIterable[T]] = ParallelArrayCombiner[T]
diff --git a/src/parallel-collections/scala/collection/parallel/ParallelIterableLike.scala b/src/library/scala/collection/parallel/ParallelIterableLike.scala
index 5ed6d10195..7ac2713b55 100644
--- a/src/parallel-collections/scala/collection/parallel/ParallelIterableLike.scala
+++ b/src/library/scala/collection/parallel/ParallelIterableLike.scala
@@ -102,7 +102,7 @@ import scala.collection.generic._
* The order in which the operations on elements are performed is unspecified and may be nondeterministic.
*
* @define pbfinfo
- * An implicit value of class `CanBuildFromParallel` which determines the
+ * An implicit value of class `CanCombineFrom` which determines the
* result class `That` from the current representation type `Repr` and
* and the new element type `B`. This builder factory can provide a parallel
* builder for the resulting collection.
@@ -117,7 +117,7 @@ import scala.collection.generic._
*/
trait ParallelIterableLike[+T, +Repr <: Parallel, +SequentialView <: Iterable[T]]
extends IterableLike[T, Repr]
- with Parallelizable[T, Repr]
+ with Parallelizable[Repr]
with Sequentializable[T, SequentialView]
with Parallel
with HasNewCombiner[T, Repr]
@@ -199,6 +199,16 @@ extends IterableLike[T, Repr]
*/
protected[this] override def newBuilder: collection.mutable.Builder[T, Repr] = newCombiner
+ /** Optionally reuses existing combiner for better performance. By default it doesn't - subclasses may override this behaviour.
+ * The provided combiner `oldc` that can potentially be reused will be either some combiner from the previous computational task, or `None` if there
+ * was no previous phase (in which case this method must return `newc`).
+ *
+ * @param oldc The combiner that is the result of the previous task, or `None` if there was no previous task.
+ * @param newc The new, empty combiner that can be used.
+ * @return Either `newc` or `oldc`.
+ */
+ protected def reuse[S, That](oldc: Option[Combiner[S, That]], newc: Combiner[S, That]): Combiner[S, That] = newc
+
/* convenience task operations wrapper */
protected implicit def task2ops[R, Tp](tsk: Task[R, Tp]) = new {
def mapResult[R1](mapping: R => R1): ResultMapping[R, Tp, R1] = new ResultMapping[R, Tp, R1](tsk) {
@@ -219,7 +229,7 @@ extends IterableLike[T, Repr]
}
protected def wrap[R](body: => R) = new NonDivisible[R] {
- def leaf = result = body
+ def leaf(prevr: Option[R]) = result = body
var result: R = null.asInstanceOf[R]
}
@@ -347,7 +357,7 @@ extends IterableLike[T, Repr]
}
override def product[U >: T](implicit num: Numeric[U]): U = {
- executeAndWaitResult(new Product[U](num, parallelIterator))
+ executeAndWaitResult(new Product[U](num, parallelIterator))
}
override def min[U >: T](implicit ord: Ordering[U]): T = {
@@ -612,7 +622,7 @@ extends IterableLike[T, Repr]
/** Sequentially performs one task after another. */
protected[this] trait SeqComposite[FR, SR, R, First <: super.Task[FR, _], Second <: super.Task[SR, _]]
extends Composite[FR, SR, R, First, Second] {
- def leaf = {
+ def leaf(prevr: Option[R]) = {
ft.compute
st.compute
result = combineResults(ft.result, st.result)
@@ -622,7 +632,7 @@ extends IterableLike[T, Repr]
/** Performs two tasks in parallel, and waits for both to finish. */
protected[this] trait ParComposite[FR, SR, R, First <: super.Task[FR, _], Second <: super.Task[SR, _]]
extends Composite[FR, SR, R, First, Second] {
- def leaf = {
+ def leaf(prevr: Option[R]) = {
st.start
ft.compute
st.sync
@@ -634,7 +644,7 @@ extends IterableLike[T, Repr]
extends NonDivisibleTask[R1, ResultMapping[R, Tp, R1]] {
var result: R1 = null.asInstanceOf[R1]
def map(r: R): R1
- def leaf = {
+ def leaf(prevr: Option[R1]) = {
inner.compute
result = map(inner.result)
}
@@ -644,27 +654,27 @@ extends IterableLike[T, Repr]
protected[this] class Foreach[S](op: T => S, val pit: ParallelIterator) extends Accessor[Unit, Foreach[S]] {
var result: Unit = ()
- def leaf = pit.foreach(op)
+ def leaf(prevr: Option[Unit]) = pit.foreach(op)
def newSubtask(p: ParallelIterator) = new Foreach[S](op, p)
}
protected[this] class Count(pred: T => Boolean, val pit: ParallelIterator) extends Accessor[Int, Count] {
var result: Int = 0
- def leaf = result = pit.count(pred)
+ def leaf(prevr: Option[Int]) = result = pit.count(pred)
def newSubtask(p: ParallelIterator) = new Count(pred, p)
override def merge(that: Count) = result = result + that.result
}
protected[this] class Reduce[U >: T](op: (U, U) => U, val pit: ParallelIterator) extends Accessor[U, Reduce[U]] {
var result: U = null.asInstanceOf[U]
- def leaf = result = pit.reduce(op)
+ def leaf(prevr: Option[U]) = result = pit.reduce(op)
def newSubtask(p: ParallelIterator) = new Reduce(op, p)
override def merge(that: Reduce[U]) = result = op(result, that.result)
}
protected[this] class Fold[U >: T](z: U, op: (U, U) => U, val pit: ParallelIterator) extends Accessor[U, Fold[U]] {
var result: U = null.asInstanceOf[U]
- def leaf = result = pit.fold(z)(op)
+ def leaf(prevr: Option[U]) = result = pit.fold(z)(op)
def newSubtask(p: ParallelIterator) = new Fold(z, op, p)
override def merge(that: Fold[U]) = result = op(result, that.result)
}
@@ -672,81 +682,81 @@ extends IterableLike[T, Repr]
protected[this] class Aggregate[S](z: S, seqop: (S, T) => S, combop: (S, S) => S, val pit: ParallelIterator)
extends Accessor[S, Aggregate[S]] {
var result: S = null.asInstanceOf[S]
- def leaf = result = pit.foldLeft(z)(seqop)
+ def leaf(prevr: Option[S]) = result = pit.foldLeft(z)(seqop)
def newSubtask(p: ParallelIterator) = new Aggregate(z, seqop, combop, p)
override def merge(that: Aggregate[S]) = result = combop(result, that.result)
}
protected[this] class Sum[U >: T](num: Numeric[U], val pit: ParallelIterator) extends Accessor[U, Sum[U]] {
var result: U = null.asInstanceOf[U]
- def leaf = result = pit.sum(num)
+ def leaf(prevr: Option[U]) = result = pit.sum(num)
def newSubtask(p: ParallelIterator) = new Sum(num, p)
override def merge(that: Sum[U]) = result = num.plus(result, that.result)
}
protected[this] class Product[U >: T](num: Numeric[U], val pit: ParallelIterator) extends Accessor[U, Product[U]] {
var result: U = null.asInstanceOf[U]
- def leaf = result = pit.product(num)
+ def leaf(prevr: Option[U]) = result = pit.product(num)
def newSubtask(p: ParallelIterator) = new Product(num, p)
override def merge(that: Product[U]) = result = num.times(result, that.result)
}
protected[this] class Min[U >: T](ord: Ordering[U], val pit: ParallelIterator) extends Accessor[U, Min[U]] {
var result: U = null.asInstanceOf[U]
- def leaf = result = pit.min(ord)
+ def leaf(prevr: Option[U]) = result = pit.min(ord)
def newSubtask(p: ParallelIterator) = new Min(ord, p)
override def merge(that: Min[U]) = result = if (ord.lteq(result, that.result)) result else that.result
}
protected[this] class Max[U >: T](ord: Ordering[U], val pit: ParallelIterator) extends Accessor[U, Max[U]] {
var result: U = null.asInstanceOf[U]
- def leaf = result = pit.max(ord)
+ def leaf(prevr: Option[U]) = result = pit.max(ord)
def newSubtask(p: ParallelIterator) = new Max(ord, p)
override def merge(that: Max[U]) = result = if (ord.gteq(result, that.result)) result else that.result
}
- protected[this] class Map[S, That](f: T => S, pbf: CanBuildFromParallel[Repr, S, That], val pit: ParallelIterator)
+ protected[this] class Map[S, That](f: T => S, pbf: CanCombineFrom[Repr, S, That], val pit: ParallelIterator)
extends Transformer[Combiner[S, That], Map[S, That]] {
var result: Combiner[S, That] = null
- def leaf = result = pit.map2combiner(f, pbf)
+ def leaf(prev: Option[Combiner[S, That]]) = result = pit.map2combiner(f, reuse(prev, pbf(self.repr)))
def newSubtask(p: ParallelIterator) = new Map(f, pbf, p)
override def merge(that: Map[S, That]) = result = result combine that.result
}
protected[this] class Collect[S, That]
- (pf: PartialFunction[T, S], pbf: CanBuildFromParallel[Repr, S, That], val pit: ParallelIterator)
+ (pf: PartialFunction[T, S], pbf: CanCombineFrom[Repr, S, That], val pit: ParallelIterator)
extends Transformer[Combiner[S, That], Collect[S, That]] {
var result: Combiner[S, That] = null
- def leaf = result = pit.collect2combiner[S, That](pf, pbf)
+ def leaf(prev: Option[Combiner[S, That]]) = result = pit.collect2combiner[S, That](pf, pbf) // TODO
def newSubtask(p: ParallelIterator) = new Collect(pf, pbf, p)
override def merge(that: Collect[S, That]) = result = result combine that.result
}
- protected[this] class FlatMap[S, That](f: T => Traversable[S], pbf: CanBuildFromParallel[Repr, S, That], val pit: ParallelIterator)
+ protected[this] class FlatMap[S, That](f: T => Traversable[S], pbf: CanCombineFrom[Repr, S, That], val pit: ParallelIterator)
extends Transformer[Combiner[S, That], FlatMap[S, That]] {
var result: Combiner[S, That] = null
- def leaf = result = pit.flatmap2combiner(f, pbf)
+ def leaf(prev: Option[Combiner[S, That]]) = result = pit.flatmap2combiner(f, pbf) // TODO
def newSubtask(p: ParallelIterator) = new FlatMap(f, pbf, p)
override def merge(that: FlatMap[S, That]) = result = result combine that.result
}
protected[this] class Forall(pred: T => Boolean, val pit: ParallelIterator) extends Accessor[Boolean, Forall] {
var result: Boolean = true
- def leaf = { if (!pit.isAborted) result = pit.forall(pred); if (result == false) pit.abort }
+ def leaf(prev: Option[Boolean]) = { if (!pit.isAborted) result = pit.forall(pred); if (result == false) pit.abort }
def newSubtask(p: ParallelIterator) = new Forall(pred, p)
override def merge(that: Forall) = result = result && that.result
}
protected[this] class Exists(pred: T => Boolean, val pit: ParallelIterator) extends Accessor[Boolean, Exists] {
var result: Boolean = false
- def leaf = { if (!pit.isAborted) result = pit.exists(pred); if (result == true) pit.abort }
+ def leaf(prev: Option[Boolean]) = { if (!pit.isAborted) result = pit.exists(pred); if (result == true) pit.abort }
def newSubtask(p: ParallelIterator) = new Exists(pred, p)
override def merge(that: Exists) = result = result || that.result
}
protected[this] class Find[U >: T](pred: T => Boolean, val pit: ParallelIterator) extends Accessor[Option[U], Find[U]] {
var result: Option[U] = None
- def leaf = { if (!pit.isAborted) result = pit.find(pred); if (result != None) pit.abort }
+ def leaf(prev: Option[Option[U]]) = { if (!pit.isAborted) result = pit.find(pred); if (result != None) pit.abort }
def newSubtask(p: ParallelIterator) = new Find(pred, p)
override def merge(that: Find[U]) = if (this.result == None) result = that.result
}
@@ -754,7 +764,7 @@ extends IterableLike[T, Repr]
protected[this] class Filter[U >: T, This >: Repr](pred: T => Boolean, cbf: () => Combiner[U, This], val pit: ParallelIterator)
extends Transformer[Combiner[U, This], Filter[U, This]] {
var result: Combiner[U, This] = null
- def leaf = result = pit.filter2combiner(pred, cbf())
+ def leaf(prev: Option[Combiner[U, This]]) = result = pit.filter2combiner(pred, reuse(prev, cbf()))
def newSubtask(p: ParallelIterator) = new Filter(pred, cbf, p)
override def merge(that: Filter[U, This]) = result = result combine that.result
}
@@ -762,7 +772,7 @@ extends IterableLike[T, Repr]
protected[this] class FilterNot[U >: T, This >: Repr](pred: T => Boolean, cbf: () => Combiner[U, This], val pit: ParallelIterator)
extends Transformer[Combiner[U, This], FilterNot[U, This]] {
var result: Combiner[U, This] = null
- def leaf = result = pit.filterNot2combiner(pred, cbf())
+ def leaf(prev: Option[Combiner[U, This]]) = result = pit.filterNot2combiner(pred, reuse(prev, cbf()))
def newSubtask(p: ParallelIterator) = new FilterNot(pred, cbf, p)
override def merge(that: FilterNot[U, This]) = result = result combine that.result
}
@@ -770,7 +780,7 @@ extends IterableLike[T, Repr]
protected class Copy[U >: T, That](cfactory: () => Combiner[U, That], val pit: ParallelIterator)
extends Transformer[Combiner[U, That], Copy[U, That]] {
var result: Combiner[U, That] = null
- def leaf = result = pit.copy2builder[U, That, Combiner[U, That]](cfactory())
+ def leaf(prev: Option[Combiner[U, That]]) = result = pit.copy2builder[U, That, Combiner[U, That]](reuse(prev, cfactory()))
def newSubtask(p: ParallelIterator) = new Copy[U, That](cfactory, p)
override def merge(that: Copy[U, That]) = result = result combine that.result
}
@@ -778,7 +788,7 @@ extends IterableLike[T, Repr]
protected[this] class Partition[U >: T, This >: Repr](pred: T => Boolean, cbf: () => Combiner[U, This], val pit: ParallelIterator)
extends Transformer[(Combiner[U, This], Combiner[U, This]), Partition[U, This]] {
var result: (Combiner[U, This], Combiner[U, This]) = null
- def leaf = result = pit.partition2combiners(pred, cbf(), cbf())
+ def leaf(prev: Option[(Combiner[U, This], Combiner[U, This])]) = result = pit.partition2combiners(pred, reuse(prev.map(_._1), cbf()), reuse(prev.map(_._2), cbf()))
def newSubtask(p: ParallelIterator) = new Partition(pred, cbf, p)
override def merge(that: Partition[U, This]) = result = (result._1 combine that.result._1, result._2 combine that.result._2)
}
@@ -786,7 +796,7 @@ extends IterableLike[T, Repr]
protected[this] class Take[U >: T, This >: Repr](n: Int, cbf: () => Combiner[U, This], val pit: ParallelIterator)
extends Transformer[Combiner[U, This], Take[U, This]] {
var result: Combiner[U, This] = null
- def leaf = result = pit.take2combiner(n, cbf())
+ def leaf(prev: Option[Combiner[U, This]]) = result = pit.take2combiner(n, reuse(prev, cbf()))
def newSubtask(p: ParallelIterator) = throw new UnsupportedOperationException
override def split = {
val pits = pit.split
@@ -802,7 +812,7 @@ extends IterableLike[T, Repr]
protected[this] class Drop[U >: T, This >: Repr](n: Int, cbf: () => Combiner[U, This], val pit: ParallelIterator)
extends Transformer[Combiner[U, This], Drop[U, This]] {
var result: Combiner[U, This] = null
- def leaf = result = pit.drop2combiner(n, cbf())
+ def leaf(prev: Option[Combiner[U, This]]) = result = pit.drop2combiner(n, reuse(prev, cbf()))
def newSubtask(p: ParallelIterator) = throw new UnsupportedOperationException
override def split = {
val pits = pit.split
@@ -818,7 +828,7 @@ extends IterableLike[T, Repr]
protected[this] class Slice[U >: T, This >: Repr](from: Int, until: Int, cbf: () => Combiner[U, This], val pit: ParallelIterator)
extends Transformer[Combiner[U, This], Slice[U, This]] {
var result: Combiner[U, This] = null
- def leaf = result = pit.slice2combiner(from, until, cbf())
+ def leaf(prev: Option[Combiner[U, This]]) = result = pit.slice2combiner(from, until, reuse(prev, cbf()))
def newSubtask(p: ParallelIterator) = throw new UnsupportedOperationException
override def split = {
val pits = pit.split
@@ -835,7 +845,7 @@ extends IterableLike[T, Repr]
protected[this] class SplitAt[U >: T, This >: Repr](at: Int, cbf: () => Combiner[U, This], val pit: ParallelIterator)
extends Transformer[(Combiner[U, This], Combiner[U, This]), SplitAt[U, This]] {
var result: (Combiner[U, This], Combiner[U, This]) = null
- def leaf = result = pit.splitAt2combiners(at, cbf(), cbf())
+ def leaf(prev: Option[(Combiner[U, This], Combiner[U, This])]) = result = pit.splitAt2combiners(at, reuse(prev.map(_._1), cbf()), reuse(prev.map(_._2), cbf()))
def newSubtask(p: ParallelIterator) = throw new UnsupportedOperationException
override def split = {
val pits = pit.split
@@ -849,10 +859,10 @@ extends IterableLike[T, Repr]
(pos: Int, pred: T => Boolean, cbf: () => Combiner[U, This], val pit: ParallelIterator)
extends Transformer[(Combiner[U, This], Boolean), TakeWhile[U, This]] {
var result: (Combiner[U, This], Boolean) = null
- def leaf = if (pos < pit.indexFlag) {
- result = pit.takeWhile2combiner(pred, cbf())
+ def leaf(prev: Option[(Combiner[U, This], Boolean)]) = if (pos < pit.indexFlag) {
+ result = pit.takeWhile2combiner(pred, reuse(prev.map(_._1), cbf()))
if (!result._2) pit.setIndexFlagIfLesser(pos)
- } else result = (cbf(), false)
+ } else result = (reuse(prev.map(_._1), cbf()), false)
def newSubtask(p: ParallelIterator) = throw new UnsupportedOperationException
override def split = {
val pits = pit.split
@@ -867,11 +877,11 @@ extends IterableLike[T, Repr]
(pos: Int, pred: T => Boolean, cbf: () => Combiner[U, This], val pit: ParallelIterator)
extends Transformer[(Combiner[U, This], Combiner[U, This]), Span[U, This]] {
var result: (Combiner[U, This], Combiner[U, This]) = null
- def leaf = if (pos < pit.indexFlag) {
- result = pit.span2combiners(pred, cbf(), cbf())
+ def leaf(prev: Option[(Combiner[U, This], Combiner[U, This])]) = if (pos < pit.indexFlag) {
+ result = pit.span2combiners(pred, reuse(prev.map(_._1), cbf()), reuse(prev.map(_._2), cbf()))
if (result._2.size > 0) pit.setIndexFlagIfLesser(pos)
} else {
- result = (cbf(), pit.copy2builder[U, This, Combiner[U, This]](cbf()))
+ result = (reuse(prev.map(_._2), cbf()), pit.copy2builder[U, This, Combiner[U, This]](reuse(prev.map(_._2), cbf())))
}
def newSubtask(p: ParallelIterator) = throw new UnsupportedOperationException
override def split = {
@@ -888,7 +898,7 @@ extends IterableLike[T, Repr]
protected[this] class CopyToArray[U >: T, This >: Repr](from: Int, len: Int, array: Array[U], val pit: ParallelIterator)
extends Accessor[Unit, CopyToArray[U, This]] {
var result: Unit = ()
- def leaf = pit.copyToArray(array, from, len)
+ def leaf(prev: Option[Unit]) = pit.copyToArray(array, from, len)
def newSubtask(p: ParallelIterator) = throw new UnsupportedOperationException
override def split = {
val pits = pit.split
diff --git a/src/parallel-collections/scala/collection/parallel/ParallelIterableView.scala b/src/library/scala/collection/parallel/ParallelIterableView.scala
index f40f02eb3b..f40f02eb3b 100644
--- a/src/parallel-collections/scala/collection/parallel/ParallelIterableView.scala
+++ b/src/library/scala/collection/parallel/ParallelIterableView.scala
diff --git a/src/parallel-collections/scala/collection/parallel/ParallelIterableViewLike.scala b/src/library/scala/collection/parallel/ParallelIterableViewLike.scala
index 024eb48d25..024eb48d25 100644
--- a/src/parallel-collections/scala/collection/parallel/ParallelIterableViewLike.scala
+++ b/src/library/scala/collection/parallel/ParallelIterableViewLike.scala
diff --git a/src/library/scala/collection/parallel/ParallelMap.scala b/src/library/scala/collection/parallel/ParallelMap.scala
new file mode 100644
index 0000000000..5ce61469bc
--- /dev/null
+++ b/src/library/scala/collection/parallel/ParallelMap.scala
@@ -0,0 +1,71 @@
+package scala.collection.parallel
+
+
+
+
+
+import scala.collection.Map
+import scala.collection.mutable.Builder
+import scala.collection.generic.ParallelMapFactory
+import scala.collection.generic.GenericParallelMapTemplate
+import scala.collection.generic.GenericParallelMapCompanion
+import scala.collection.generic.CanCombineFrom
+
+
+
+
+
+
+trait ParallelMap[K, +V]
+extends Map[K, V]
+ with GenericParallelMapTemplate[K, V, ParallelMap]
+ with ParallelIterable[(K, V)]
+ with ParallelMapLike[K, V, ParallelMap[K, V], Map[K, V]]
+{
+self =>
+
+ def mapCompanion: GenericParallelMapCompanion[ParallelMap] = ParallelMap
+
+ override def empty: ParallelMap[K, V] = new immutable.ParallelHashTrie[K, V]
+
+ override def stringPrefix = "ParallelMap"
+}
+
+
+
+object ParallelMap extends ParallelMapFactory[ParallelMap] {
+ def empty[K, V]: ParallelMap[K, V] = new immutable.ParallelHashTrie[K, V]
+
+ def newCombiner[K, V]: Combiner[(K, V), ParallelMap[K, V]] = immutable.HashTrieCombiner[K, V]
+
+ implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParallelMap[K, V]] = new CanCombineFromMap[K, V]
+
+}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/src/parallel-collections/scala/collection/parallel/ParallelMapLike.scala b/src/library/scala/collection/parallel/ParallelMapLike.scala
index eddc2963fa..8a0b54525f 100644
--- a/src/parallel-collections/scala/collection/parallel/ParallelMapLike.scala
+++ b/src/library/scala/collection/parallel/ParallelMapLike.scala
@@ -22,9 +22,9 @@ extends MapLike[K, V, Repr]
with ParallelIterableLike[(K, V), Repr, SequentialView]
{ self =>
- protected[this] override def newBuilder: Builder[(K, V), Repr] = null // TODO
+ protected[this] override def newBuilder: Builder[(K, V), Repr] = newCombiner
- protected[this] override def newCombiner: Combiner[(K, V), Repr] = null // TODO
+ protected[this] override def newCombiner: Combiner[(K, V), Repr] = error("Must be implemented in concrete classes.")
override def empty: Repr
diff --git a/src/parallel-collections/scala/collection/parallel/ParallelSeq.scala b/src/library/scala/collection/parallel/ParallelSeq.scala
index 3e85b8dff6..71b802cd11 100644
--- a/src/parallel-collections/scala/collection/parallel/ParallelSeq.scala
+++ b/src/library/scala/collection/parallel/ParallelSeq.scala
@@ -6,7 +6,7 @@ import scala.collection.generic.GenericCompanion
import scala.collection.generic.GenericParallelCompanion
import scala.collection.generic.GenericParallelTemplate
import scala.collection.generic.ParallelFactory
-import scala.collection.generic.CanBuildFromParallel
+import scala.collection.generic.CanCombineFrom
import scala.collection.parallel.mutable.ParallelArrayCombiner
import scala.collection.parallel.mutable.ParallelArray
@@ -30,7 +30,7 @@ trait ParallelSeq[+T] extends Seq[T]
object ParallelSeq extends ParallelFactory[ParallelSeq] {
- implicit def canBuildFrom[T]: CanBuildFromParallel[Coll, T, ParallelSeq[T]] = new GenericCanBuildFromParallel[T]
+ implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParallelSeq[T]] = new GenericCanCombineFrom[T]
def newBuilder[T]: Combiner[T, ParallelSeq[T]] = ParallelArrayCombiner[T]
diff --git a/src/parallel-collections/scala/collection/parallel/ParallelSeqLike.scala b/src/library/scala/collection/parallel/ParallelSeqLike.scala
index fedc9f56ac..18b0c83f23 100644
--- a/src/parallel-collections/scala/collection/parallel/ParallelSeqLike.scala
+++ b/src/library/scala/collection/parallel/ParallelSeqLike.scala
@@ -6,7 +6,7 @@ import scala.collection.SeqLike
import scala.collection.generic.DefaultSignalling
import scala.collection.generic.AtomicIndexFlag
import scala.collection.generic.CanBuildFrom
-import scala.collection.generic.CanBuildFromParallel
+import scala.collection.generic.CanCombineFrom
import scala.collection.generic.VolatileAbort
@@ -320,7 +320,7 @@ extends scala.collection.SeqLike[T, Repr]
protected[this] class SegmentLength(pred: T => Boolean, from: Int, val pit: ParallelIterator)
extends Accessor[(Int, Boolean), SegmentLength] {
var result: (Int, Boolean) = null
- def leaf = if (from < pit.indexFlag) {
+ def leaf(prev: Option[(Int, Boolean)]) = if (from < pit.indexFlag) {
val itsize = pit.remaining
val seglen = pit.prefixLength(pred)
result = (seglen, itsize == seglen)
@@ -337,7 +337,7 @@ extends scala.collection.SeqLike[T, Repr]
protected[this] class IndexWhere(pred: T => Boolean, from: Int, val pit: ParallelIterator)
extends Accessor[Int, IndexWhere] {
var result: Int = -1
- def leaf = if (from < pit.indexFlag) {
+ def leaf(prev: Option[Int]) = if (from < pit.indexFlag) {
val r = pit.indexWhere(pred)
if (r != -1) {
result = from + r
@@ -357,7 +357,7 @@ extends scala.collection.SeqLike[T, Repr]
protected[this] class LastIndexWhere(pred: T => Boolean, pos: Int, val pit: ParallelIterator)
extends Accessor[Int, LastIndexWhere] {
var result: Int = -1
- def leaf = if (pos > pit.indexFlag) {
+ def leaf(prev: Option[Int]) = if (pos > pit.indexFlag) {
val r = pit.lastIndexWhere(pred)
if (r != -1) {
result = pos + r
@@ -377,15 +377,15 @@ extends scala.collection.SeqLike[T, Repr]
protected[this] class Reverse[U >: T, This >: Repr](cbf: () => Combiner[U, This], val pit: ParallelIterator)
extends Transformer[Combiner[U, This], Reverse[U, This]] {
var result: Combiner[U, This] = null
- def leaf = result = pit.reverse2combiner(cbf())
+ def leaf(prev: Option[Combiner[U, This]]) = result = pit.reverse2combiner(reuse(prev, cbf()))
def newSubtask(p: SuperParallelIterator) = new Reverse(cbf, down(p))
override def merge(that: Reverse[U, This]) = result = that.result combine result
}
- protected[this] class ReverseMap[S, That](f: T => S, pbf: CanBuildFromParallel[Repr, S, That], val pit: ParallelIterator)
+ protected[this] class ReverseMap[S, That](f: T => S, pbf: CanCombineFrom[Repr, S, That], val pit: ParallelIterator)
extends Transformer[Combiner[S, That], ReverseMap[S, That]] {
var result: Combiner[S, That] = null
- def leaf = result = pit.reverseMap2combiner(f, pbf)
+ def leaf(prev: Option[Combiner[S, That]]) = result = pit.reverseMap2combiner(f, pbf) // TODO
def newSubtask(p: SuperParallelIterator) = new ReverseMap(f, pbf, down(p))
override def merge(that: ReverseMap[S, That]) = result = that.result combine result
}
@@ -393,7 +393,7 @@ extends scala.collection.SeqLike[T, Repr]
protected[this] class SameElements[U >: T](val pit: ParallelIterator, val otherpit: PreciseSplitter[U])
extends Accessor[Boolean, SameElements[U]] {
var result: Boolean = true
- def leaf = if (!pit.isAborted) {
+ def leaf(prev: Option[Boolean]) = if (!pit.isAborted) {
result = pit.sameElements(otherpit)
if (!result) pit.abort
}
@@ -406,10 +406,10 @@ extends scala.collection.SeqLike[T, Repr]
override def merge(that: SameElements[U]) = result = result && that.result
}
- protected[this] class Updated[U >: T, That](pos: Int, elem: U, pbf: CanBuildFromParallel[Repr, U, That], val pit: ParallelIterator)
+ protected[this] class Updated[U >: T, That](pos: Int, elem: U, pbf: CanCombineFrom[Repr, U, That], val pit: ParallelIterator)
extends Transformer[Combiner[U, That], Updated[U, That]] {
var result: Combiner[U, That] = null
- def leaf = result = pit.updated2combiner(pos, elem, pbf)
+ def leaf(prev: Option[Combiner[U, That]]) = result = pit.updated2combiner(pos, elem, pbf) // TODO
def newSubtask(p: SuperParallelIterator) = throw new UnsupportedOperationException
override def split = {
val pits = pit.split
@@ -421,7 +421,7 @@ extends scala.collection.SeqLike[T, Repr]
protected[this] class Corresponds[S](corr: (T, S) => Boolean, val pit: ParallelIterator, val otherpit: PreciseSplitter[S])
extends Accessor[Boolean, Corresponds[S]] {
var result: Boolean = true
- def leaf = if (!pit.isAborted) {
+ def leaf(prev: Option[Boolean]) = if (!pit.isAborted) {
result = pit.corresponds(corr)(otherpit)
if (!result) pit.abort
}
diff --git a/src/parallel-collections/scala/collection/parallel/ParallelSeqView.scala b/src/library/scala/collection/parallel/ParallelSeqView.scala
index d0faa942ef..7862e99f44 100644
--- a/src/parallel-collections/scala/collection/parallel/ParallelSeqView.scala
+++ b/src/library/scala/collection/parallel/ParallelSeqView.scala
@@ -6,7 +6,7 @@ package scala.collection.parallel
import scala.collection.TraversableView
import scala.collection.SeqView
import scala.collection.Parallel
-import scala.collection.generic.CanBuildFromParallel
+import scala.collection.generic.CanCombineFrom
@@ -38,8 +38,8 @@ object ParallelSeqView {
type Coll = ParallelSeqView[_, C, _] forSome { type C <: ParallelSeq[_] }
- implicit def canBuildFrom[T]: CanBuildFromParallel[Coll, T, ParallelSeqView[T, ParallelSeq[T], Seq[T]]] =
- new CanBuildFromParallel[Coll, T, ParallelSeqView[T, ParallelSeq[T], Seq[T]]] {
+ implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParallelSeqView[T, ParallelSeq[T], Seq[T]]] =
+ new CanCombineFrom[Coll, T, ParallelSeqView[T, ParallelSeq[T], Seq[T]]] {
def apply(from: Coll) = new NoCombiner[T] with EnvironmentPassingCombiner[T, Nothing]
def apply() = new NoCombiner[T] with EnvironmentPassingCombiner[T, Nothing]
}
diff --git a/src/parallel-collections/scala/collection/parallel/ParallelSeqViewLike.scala b/src/library/scala/collection/parallel/ParallelSeqViewLike.scala
index 2e9ebb1df3..eab4d7ad5f 100644
--- a/src/parallel-collections/scala/collection/parallel/ParallelSeqViewLike.scala
+++ b/src/library/scala/collection/parallel/ParallelSeqViewLike.scala
@@ -8,7 +8,7 @@ import scala.collection.SeqView
import scala.collection.SeqViewLike
import scala.collection.Parallel
import scala.collection.generic.CanBuildFrom
-import scala.collection.generic.CanBuildFromParallel
+import scala.collection.generic.CanCombineFrom
@@ -161,10 +161,10 @@ extends SeqView[T, Coll]
/* tasks */
- protected[this] class Force[U >: T, That](cbf: CanBuildFromParallel[Coll, U, That], val pit: ParallelIterator)
+ protected[this] class Force[U >: T, That](cbf: CanCombineFrom[Coll, U, That], val pit: ParallelIterator)
extends Transformer[Combiner[U, That], Force[U, That]] {
var result: Combiner[U, That] = null
- def leaf = result = pit.copy2builder[U, That, Combiner[U, That]](cbf(self.underlying))
+ def leaf(prev: Option[Combiner[U, That]]) = result = pit.copy2builder[U, That, Combiner[U, That]](reuse(prev, cbf(self.underlying)))
def newSubtask(p: SuperParallelIterator) = new Force(cbf, down(p))
override def merge(that: Force[U, That]) = result = result combine that.result
}
diff --git a/src/library/scala/collection/parallel/RemainsIterator.scala b/src/library/scala/collection/parallel/RemainsIterator.scala
new file mode 100644
index 0000000000..6ed6d235d2
--- /dev/null
+++ b/src/library/scala/collection/parallel/RemainsIterator.scala
@@ -0,0 +1,438 @@
+package scala.collection.parallel
+
+
+
+import scala.collection.Parallel
+import scala.collection.generic.Signalling
+import scala.collection.generic.DelegatedSignalling
+import scala.collection.generic.CanCombineFrom
+import scala.collection.mutable.Builder
+import scala.collection.Iterator.empty
+
+trait RemainsIterator[+T] extends Iterator[T] {
+ /** The number of elements this iterator has yet to iterate.
+ * This method doesn't change the state of the iterator.
+ */
+ def remaining: Int
+}
+
+
+/** Augments iterators with additional methods, mostly transformers,
+ * assuming they iterate an iterable collection.
+ *
+ * @param T type of the elements iterated.
+ * @param Repr type of the collection iterator iterates.
+ */
+trait AugmentedIterableIterator[+T, +Repr <: Parallel] extends RemainsIterator[T] {
+
+ def repr: Repr
+
+ /* accessors */
+
+ override def count(p: T => Boolean): Int = {
+ var i = 0
+ while (hasNext) if (p(next)) i += 1
+ i
+ }
+
+ def reduce[U >: T](op: (U, U) => U): U = {
+ var r: U = next
+ while (hasNext) r = op(r, next)
+ r
+ }
+
+ def fold[U >: T](z: U)(op: (U, U) => U): U = {
+ var r = z
+ while (hasNext) r = op(r, next)
+ r
+ }
+
+ override def sum[U >: T](implicit num: Numeric[U]): U = {
+ var r: U = num.zero
+ while (hasNext) r = num.plus(r, next)
+ r
+ }
+
+ override def product[U >: T](implicit num: Numeric[U]): U = {
+ var r: U = num.one
+ while (hasNext) r = num.times(r, next)
+ r
+ }
+
+ override def min[U >: T](implicit ord: Ordering[U]): T = {
+ var r = next
+ while (hasNext) {
+ val curr = next
+ if (ord.lteq(curr, r)) r = curr
+ }
+ r
+ }
+
+ override def max[U >: T](implicit ord: Ordering[U]): T = {
+ var r = next
+ while (hasNext) {
+ val curr = next
+ if (ord.gteq(curr, r)) r = curr
+ }
+ r
+ }
+
+ override def copyToArray[U >: T](array: Array[U], from: Int, len: Int) {
+ var i = from
+ val until = from + len
+ while (i < until && hasNext) {
+ array(i) = next
+ i += 1
+ }
+ }
+
+ /* transformers to combiners */
+
+ def map2combiner[S, That](f: T => S, cb: Combiner[S, That]): Combiner[S, That] = {
+ //val cb = pbf(repr)
+ cb.sizeHint(remaining)
+ while (hasNext) cb += f(next)
+ cb
+ }
+
+ def collect2combiner[S, That](pf: PartialFunction[T, S], pbf: CanCombineFrom[Repr, S, That]): Combiner[S, That] = {
+ val cb = pbf(repr)
+ while (hasNext) {
+ val curr = next
+ if (pf.isDefinedAt(curr)) cb += pf(curr)
+ }
+ cb
+ }
+
+ def flatmap2combiner[S, That](f: T => Traversable[S], pbf: CanCombineFrom[Repr, S, That]): Combiner[S, That] = {
+ val cb = pbf(repr)
+ while (hasNext) {
+ val traversable = f(next)
+ if (traversable.isInstanceOf[Iterable[_]]) cb ++= traversable.asInstanceOf[Iterable[S]].iterator
+ else cb ++= traversable
+ }
+ cb
+ }
+
+ def copy2builder[U >: T, Coll, Bld <: Builder[U, Coll]](b: Bld): Bld = {
+ b.sizeHint(remaining)
+ while (hasNext) b += next
+ b
+ }
+
+ def filter2combiner[U >: T, This >: Repr](pred: T => Boolean, cb: Combiner[U, This]): Combiner[U, This] = {
+ while (hasNext) {
+ val curr = next
+ if (pred(curr)) cb += curr
+ }
+ cb
+ }
+
+ def filterNot2combiner[U >: T, This >: Repr](pred: T => Boolean, cb: Combiner[U, This]): Combiner[U, This] = {
+ while (hasNext) {
+ val curr = next
+ if (!pred(curr)) cb += curr
+ }
+ cb
+ }
+
+ def partition2combiners[U >: T, This >: Repr](pred: T => Boolean, btrue: Combiner[U, This], bfalse: Combiner[U, This]) = {
+ while (hasNext) {
+ val curr = next
+ if (pred(curr)) btrue += curr
+ else bfalse += curr
+ }
+ (btrue, bfalse)
+ }
+
+ def take2combiner[U >: T, This >: Repr](n: Int, cb: Combiner[U, This]): Combiner[U, This] = {
+ cb.sizeHint(n)
+ var left = n
+ while (left > 0) {
+ cb += next
+ left -= 1
+ }
+ cb
+ }
+
+ def drop2combiner[U >: T, This >: Repr](n: Int, cb: Combiner[U, This]): Combiner[U, This] = {
+ drop(n)
+ cb.sizeHint(remaining)
+ while (hasNext) cb += next
+ cb
+ }
+
+ def slice2combiner[U >: T, This >: Repr](from: Int, until: Int, cb: Combiner[U, This]): Combiner[U, This] = {
+ drop(from)
+ var left = until - from
+ cb.sizeHint(left)
+ while (left > 0) {
+ cb += next
+ left -= 1
+ }
+ cb
+ }
+
+ def splitAt2combiners[U >: T, This >: Repr](at: Int, before: Combiner[U, This], after: Combiner[U, This]) = {
+ before.sizeHint(at)
+ after.sizeHint(remaining - at)
+ var left = at
+ while (left > 0) {
+ before += next
+ left -= 1
+ }
+ while (hasNext) after += next
+ (before, after)
+ }
+
+ def takeWhile2combiner[U >: T, This >: Repr](p: T => Boolean, cb: Combiner[U, This]) = {
+ var loop = true
+ while (hasNext && loop) {
+ val curr = next
+ if (p(curr)) cb += curr
+ else loop = false
+ }
+ (cb, loop)
+ }
+
+ def span2combiners[U >: T, This >: Repr](p: T => Boolean, before: Combiner[U, This], after: Combiner[U, This]) = {
+ var isBefore = true
+ while (hasNext && isBefore) {
+ val curr = next
+ if (p(curr)) before += curr
+ else {
+ after.sizeHint(remaining + 1)
+ after += curr
+ isBefore = false
+ }
+ }
+ while (hasNext) after += next
+ (before, after)
+ }
+}
+
+
+trait AugmentedSeqIterator[+T, +Repr <: Parallel] extends AugmentedIterableIterator[T, Repr] {
+
+ /** The exact number of elements this iterator has yet to iterate.
+ * This method doesn't change the state of the iterator.
+ */
+ def remaining: Int
+
+ /* accessors */
+
+ def prefixLength(pred: T => Boolean): Int = {
+ var total = 0
+ var loop = true
+ while (hasNext && loop) {
+ if (pred(next)) total += 1
+ else loop = false
+ }
+ total
+ }
+
+ override def indexWhere(pred: T => Boolean): Int = {
+ var i = 0
+ var loop = true
+ while (hasNext && loop) {
+ if (pred(next)) loop = false
+ else i += 1
+ }
+ if (loop) -1 else i
+ }
+
+ def lastIndexWhere(pred: T => Boolean): Int = {
+ var pos = -1
+ var i = 0
+ while (hasNext) {
+ if (pred(next)) pos = i
+ i += 1
+ }
+ pos
+ }
+
+ def corresponds[S](corr: (T, S) => Boolean)(that: Iterator[S]): Boolean = {
+ while (hasNext && that.hasNext) {
+ if (!corr(next, that.next)) return false
+ }
+ hasNext == that.hasNext
+ }
+
+ /* transformers */
+
+ def reverse2combiner[U >: T, This >: Repr](cb: Combiner[U, This]): Combiner[U, This] = {
+ cb.sizeHint(remaining)
+ var lst = List[T]()
+ while (hasNext) lst ::= next
+ while (lst != Nil) {
+ cb += lst.head
+ lst = lst.tail
+ }
+ cb
+ }
+
+ def reverseMap2combiner[S, That](f: T => S, cbf: CanCombineFrom[Repr, S, That]): Combiner[S, That] = {
+ val cb = cbf(repr)
+ cb.sizeHint(remaining)
+ var lst = List[S]()
+ while (hasNext) lst ::= f(next)
+ while (lst != Nil) {
+ cb += lst.head
+ lst = lst.tail
+ }
+ cb
+ }
+
+ def updated2combiner[U >: T, That](index: Int, elem: U, cbf: CanCombineFrom[Repr, U, That]): Combiner[U, That] = {
+ val cb = cbf(repr)
+ cb.sizeHint(remaining)
+ var j = 0
+ while (hasNext) {
+ if (j == index) {
+ cb += elem
+ next
+ } else cb += next
+ j += 1
+ }
+ cb
+ }
+
+}
+
+
+
+trait ParallelIterableIterator[+T, +Repr <: Parallel]
+extends AugmentedIterableIterator[T, Repr]
+ with Splitter[T]
+ with Signalling
+ with DelegatedSignalling
+{
+ def split: Seq[ParallelIterableIterator[T, Repr]]
+
+ /** The number of elements this iterator has yet to traverse. This method
+ * doesn't change the state of the iterator.
+ *
+ * This method is used to provide size hints to builders and combiners, and
+ * to approximate positions of iterators within a data structure.
+ *
+ * '''Note''': This method may be implemented to return an upper bound on the number of elements
+ * in the iterator, instead of the exact number of elements to iterate.
+ *
+ * In that case, 2 considerations must be taken into account:
+ *
+ * 1) classes that inherit `ParallelIterable` must reimplement methods `take`, `drop`, `slice`, `splitAt` and `copyToArray`.
+ *
+ * 2) if an iterator provides an upper bound on the number of elements, then after splitting the sum
+ * of `remaining` values of split iterators must be less than or equal to this upper bound.
+ */
+ def remaining: Int
+}
+
+
+trait ParallelSeqIterator[+T, +Repr <: Parallel]
+extends ParallelIterableIterator[T, Repr]
+ with AugmentedSeqIterator[T, Repr]
+ with PreciseSplitter[T]
+{
+ def split: Seq[ParallelSeqIterator[T, Repr]]
+ def psplit(sizes: Int*): Seq[ParallelSeqIterator[T, Repr]]
+
+ /** The number of elements this iterator has yet to traverse. This method
+ * doesn't change the state of the iterator. Unlike the version of this method in the supertrait,
+ * method `remaining` in `ParallelSeqLike.this.ParallelIterator` must return an exact number
+ * of elements remaining in the iterator.
+ *
+ * @return an exact number of elements this iterator has yet to iterate
+ */
+ def remaining: Int
+}
+
+
+trait DelegatedIterator[+T, +Delegate <: Iterator[T]] extends RemainsIterator[T] {
+ val delegate: Delegate
+ def next = delegate.next
+ def hasNext = delegate.hasNext
+}
+
+
+trait Counting[+T] extends RemainsIterator[T] {
+ val initialSize: Int
+ def remaining = initialSize - traversed
+ var traversed = 0
+ abstract override def next = {
+ val n = super.next
+ traversed += 1
+ n
+ }
+}
+
+
+/** A mixin for iterators that traverse only filtered elements of a delegate.
+ */
+trait FilteredIterator[+T, +Delegate <: Iterator[T]] extends DelegatedIterator[T, Delegate] {
+ protected[this] val pred: T => Boolean
+
+ private[this] var hd: T = _
+ private var hdDefined = false
+
+ override def hasNext: Boolean = hdDefined || {
+ do {
+ if (!delegate.hasNext) return false
+ hd = delegate.next
+ } while (!pred(hd))
+ hdDefined = true
+ true
+ }
+
+ override def next = if (hasNext) { hdDefined = false; hd } else empty.next
+}
+
+
+/** A mixin for iterators that traverse elements of the delegate iterator, and of another collection.
+ */
+trait AppendedIterator[+T, +Delegate <: Iterator[T]] extends DelegatedIterator[T, Delegate] {
+ // `rest` should never alias `delegate`
+ protected[this] val rest: Iterator[T]
+
+ private[this] var current: Iterator[T] = delegate
+
+ override def hasNext = (current.hasNext) || (current == delegate && rest.hasNext)
+
+ override def next = {
+ if (!current.hasNext) current = rest
+ current.next
+ }
+
+}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/src/parallel-collections/scala/collection/parallel/Splitters.scala b/src/library/scala/collection/parallel/Splitter.scala
index b3cad6d67a..b3cad6d67a 100644
--- a/src/parallel-collections/scala/collection/parallel/Splitters.scala
+++ b/src/library/scala/collection/parallel/Splitter.scala
diff --git a/src/library/scala/collection/parallel/Splitters.scala b/src/library/scala/collection/parallel/Splitters.scala
new file mode 100644
index 0000000000..b3cad6d67a
--- /dev/null
+++ b/src/library/scala/collection/parallel/Splitters.scala
@@ -0,0 +1,86 @@
+package scala.collection.parallel
+
+
+import scala.collection.Seq
+
+
+/** A splitter (or a split iterator) can be split into more splitters that traverse over
+ * disjoint subsets of elements.
+ *
+ * @tparam T type of the elements this parallel iterator traverses
+ *
+ * @since 2.8.1
+ * @author prokopec
+ */
+trait Splitter[+T] extends Iterator[T] {
+
+ /** Splits the iterator into a sequence of disjunct views.
+ *
+ * Returns a sequence of split iterators, each iterating over some subset of the
+ * elements in the collection. These subsets are disjoint and should be approximately
+ * equal in size. These subsets are not empty, unless the iterator is empty in which
+ * case this method returns a sequence with a single empty iterator. If the iterator has
+ * more than two elements, this method will return two or more iterators.
+ *
+ * Implementors are advised to keep this partition relatively small - two iterators are
+ * already enough when partitioning the collection, although there may be a few more.
+ *
+ * '''Note:''' this method actually invalidates the current iterator.
+ *
+ * @return a sequence of disjunct iterators of the collection
+ */
+ def split: Seq[Splitter[T]]
+
+}
+
+
+/** A precise splitter (or a precise split iterator) can be split into arbitrary number of splitters
+ * that traverse disjoint subsets of arbitrary sizes.
+ *
+ * Implementors might want to override the parameterless `split` method for efficiency.
+ *
+ * @tparam T type of the elements this parallel iterator traverses
+ *
+ * @since 2.8.1
+ * @author prokopec
+ */
+trait PreciseSplitter[+T] extends Splitter[T] {
+
+ /** Splits the iterator into disjunct views.
+ *
+ * This overloaded version of the `split` method is specific to precise parallel iterators.
+ * It returns a sequence of parallel iterators, each iterating some subset of the
+ * elements in this iterator. The sizes of the subiterators in the partition is equal to
+ * the size in the corresponding argument, as long as there are enough elements in this
+ * iterator to split it that way.
+ *
+ * If there aren't enough elements, a zero element iterator is appended for each additional argument.
+ * If there are additional elements, an additional iterator is appended at the end to compensate.
+ *
+ * For example, say we have a parallel iterator `ps` with 100 elements. Invoking:
+ * {{{
+ * ps.split(50, 25, 25, 10, 5)
+ * }}}
+ * will return a sequence of five iterators, last two views being empty. On the other hand, calling:
+ * {{{
+ * ps.split(50, 40)
+ * }}}
+ * will return a sequence of three iterators, last of them containing ten elements.
+ *
+ * '''Note:''' this method actually invalidates the current iterator.
+ *
+ * Unlike the case with `split` found in parallel iterable iterators, views returned by this method can be empty.
+ *
+ * @param sizes the sizes used to split this split iterator into iterators that traverse disjunct subsets
+ * @return a sequence of disjunct subsequence iterators of this parallel iterator
+ */
+ def psplit(sizes: Int*): Seq[PreciseSplitter[T]]
+
+ def split: Seq[PreciseSplitter[T]]
+
+}
+
+
+
+
+
diff --git a/src/parallel-collections/scala/collection/parallel/TaskSupport.scala b/src/library/scala/collection/parallel/TaskSupport.scala
index 8a072b22aa..8a072b22aa 100644
--- a/src/parallel-collections/scala/collection/parallel/TaskSupport.scala
+++ b/src/library/scala/collection/parallel/TaskSupport.scala
diff --git a/src/parallel-collections/scala/collection/parallel/Tasks.scala b/src/library/scala/collection/parallel/Tasks.scala
index d21113cf64..3ef60f8c7a 100644
--- a/src/parallel-collections/scala/collection/parallel/Tasks.scala
+++ b/src/library/scala/collection/parallel/Tasks.scala
@@ -38,8 +38,10 @@ trait Tasks {
def repr = this.asInstanceOf[Tp]
/** Code that gets called after the task gets started - it may spawn other tasks instead of calling `leaf`. */
def compute
- /** Body of the task - non-divisible unit of work done by this task. */
- def leaf
+ /** Body of the task - non-divisible unit of work done by this task. Optionally is provided with the result from the previous task
+ * or `None` if there was no previous task.
+ */
+ def leaf(result: Option[R])
/** Start task. */
def start
/** Wait for task to finish. */
@@ -88,19 +90,20 @@ trait AdaptiveWorkStealingTasks extends Tasks {
def split: Seq[Task[R, Tp]]
/** The actual leaf computation. */
- def leaf: Unit
+ def leaf(result: Option[R]): Unit
- def compute = if (shouldSplitFurther) internal else leaf
+ def compute = if (shouldSplitFurther) internal else leaf(None)
def internal = {
var last = spawnSubtasks
- last.leaf
+ last.leaf(None)
result = last.result
while (last.next != null) {
+ val lastresult = Option(last.result)
last = last.next
- if (last.tryCancel) last.leaf else last.sync
+ if (last.tryCancel) last.leaf(lastresult) else last.sync
merge(last.repr)
}
}
diff --git a/src/library/scala/collection/parallel/immutable/ParallelHashTrie.scala b/src/library/scala/collection/parallel/immutable/ParallelHashTrie.scala
new file mode 100644
index 0000000000..a9e08913ea
--- /dev/null
+++ b/src/library/scala/collection/parallel/immutable/ParallelHashTrie.scala
@@ -0,0 +1,248 @@
+package scala.collection.parallel.immutable
+
+
+
+
+
+
+
+import scala.collection.parallel.ParallelMap
+import scala.collection.parallel.ParallelMapLike
+import scala.collection.parallel.Combiner
+import scala.collection.parallel.EnvironmentPassingCombiner
+import scala.collection.generic.ParallelMapFactory
+import scala.collection.generic.CanCombineFrom
+import scala.collection.generic.GenericParallelMapTemplate
+import scala.collection.generic.GenericParallelMapCompanion
+import scala.collection.immutable.HashMap
+
+
+
+
+
+
+/** Parallel hash trie map.
+ *
+ * @author prokopec
+ */
+class ParallelHashTrie[K, +V] private[immutable] (private[this] val trie: HashMap[K, V])
+extends ParallelMap[K, V]
+ with GenericParallelMapTemplate[K, V, ParallelHashTrie]
+ with ParallelMapLike[K, V, ParallelHashTrie[K, V], HashMap[K, V]]
+{
+self =>
+
+ def this() = this(HashMap.empty[K, V])
+
+ override def mapCompanion: GenericParallelMapCompanion[ParallelHashTrie] = ParallelHashTrie
+
+ override def empty: ParallelHashTrie[K, V] = new ParallelHashTrie[K, V]
+
+ def parallelIterator = new ParallelHashTrieIterator(trie) with SCPI
+
+ def seq = trie
+
+ def -(k: K) = new ParallelHashTrie(trie - k)
+
+ def +[U >: V](kv: (K, U)) = new ParallelHashTrie(trie + kv)
+
+ def get(k: K) = trie.get(k)
+
+ override def size = trie.size
+
+ protected override def reuse[S, That](oldc: Option[Combiner[S, That]], newc: Combiner[S, That]) = oldc match {
+ case Some(old) => old
+ case None => newc
+ }
+
+ type SCPI = SignalContextPassingIterator[ParallelHashTrieIterator]
+
+ class ParallelHashTrieIterator(val ht: HashMap[K, V])
+ extends super.ParallelIterator {
+ self: SignalContextPassingIterator[ParallelHashTrieIterator] =>
+ // println("created iterator " + ht)
+ var i = 0
+ lazy val triter = ht.iterator
+ def split: Seq[ParallelIterator] = {
+ // println("splitting " + ht + " into " + ht.split.map(new ParallelHashTrieIterator(_) with SCPI).map(_.toList))
+ ht.split.map(new ParallelHashTrieIterator(_) with SCPI)
+ }
+ def next: (K, V) = {
+ // println("taking next after " + i + ", in " + ht)
+ i += 1
+ triter.next
+ }
+ def hasNext: Boolean = {
+ // println("hasNext: " + i + ", " + ht.size + ", " + ht)
+ i < ht.size
+ }
+ def remaining = ht.size - i
+ }
+
+}
+
+
+object ParallelHashTrie extends ParallelMapFactory[ParallelHashTrie] {
+ def empty[K, V]: ParallelHashTrie[K, V] = new ParallelHashTrie[K, V]
+
+ def newCombiner[K, V]: Combiner[(K, V), ParallelHashTrie[K, V]] = HashTrieCombiner[K, V]
+
+ implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParallelHashTrie[K, V]] = {
+ new CanCombineFromMap[K, V]
+ }
+
+ def fromTrie[K, V](t: HashMap[K, V]) = new ParallelHashTrie(t)
+
+ var totalcombines = new java.util.concurrent.atomic.AtomicInteger(0)
+}
+
+
+trait HashTrieCombiner[K, V]
+extends Combiner[(K, V), ParallelHashTrie[K, V]] {
+self: EnvironmentPassingCombiner[(K, V), ParallelHashTrie[K, V]] =>
+ import HashTrieCombiner._
+ var heads = new Array[Unrolled[K, V]](rootsize)
+ var lasts = new Array[Unrolled[K, V]](rootsize)
+ var size: Int = 0
+
+ def clear = {
+ heads = new Array[Unrolled[K, V]](rootsize)
+ lasts = new Array[Unrolled[K, V]](rootsize)
+ }
+
+ def +=(elem: (K, V)) = {
+ size += 1
+ val hc = elem._1.##
+ val pos = hc & 0x1f
+ if (lasts(pos) eq null) {
+ // initialize bucket
+ heads(pos) = new Unrolled[K, V]
+ lasts(pos) = heads(pos)
+ }
+ // add to bucket
+ lasts(pos) = lasts(pos).add(elem)
+ this
+ }
+
+ def combine[N <: (K, V), NewTo >: ParallelHashTrie[K, V]](other: Combiner[N, NewTo]): Combiner[N, NewTo] = if (this ne other) {
+ // ParallelHashTrie.totalcombines.incrementAndGet
+ if (other.isInstanceOf[HashTrieCombiner[_, _]]) {
+ val that = other.asInstanceOf[HashTrieCombiner[K, V]]
+ var i = 0
+ while (i < rootsize) {
+ if (lasts(i) eq null) {
+ heads(i) = that.heads(i)
+ lasts(i) = that.lasts(i)
+ } else {
+ lasts(i).next = that.heads(i)
+ if (that.lasts(i) ne null) lasts(i) = that.lasts(i)
+ }
+ i += 1
+ }
+ size = size + that.size
+ this
+ } else error("Unexpected combiner type.")
+ } else this
+
+ def result = {
+ val buckets = heads.filter(_ != null)
+ val root = new Array[HashMap[K, V]](buckets.length)
+
+ executeAndWait(new CreateTrie(buckets, root, 0, buckets.length))
+
+ var bitmap = 0
+ var i = 0
+ while (i < rootsize) {
+ if (heads(i) ne null) bitmap |= 1 << i
+ i += 1
+ }
+ val sz = root.foldLeft(0)(_ + _.size)
+
+ if (sz == 0) new ParallelHashTrie[K, V]
+ else if (sz == 1) new ParallelHashTrie[K, V](root(0))
+ else {
+ val trie = new HashMap.HashTrieMap(bitmap, root, sz)
+ new ParallelHashTrie[K, V](trie)
+ }
+ }
+
+ /* tasks */
+
+ class CreateTrie(buckets: Array[Unrolled[K, V]], root: Array[HashMap[K, V]], offset: Int, howmany: Int) extends super.Task[Unit, CreateTrie] {
+ var result = ()
+ def leaf(prev: Option[Unit]) = {
+ var i = offset
+ val until = offset + howmany
+ while (i < until) {
+ root(i) = createTrie(buckets(i))
+ i += 1
+ }
+ }
+ private def createTrie(elems: Unrolled[K, V]): HashMap[K, V] = {
+ var trie = new HashMap[K, V]
+
+ var unrolled = elems
+ var i = 0
+ while (unrolled ne null) {
+ val chunkarr = unrolled.array
+ val chunksz = unrolled.size
+ while (i < chunksz) {
+ val kv = chunkarr(i)
+ val hc = kv._1.##
+ trie = trie.updated0(kv._1, hc, rootbits, kv._2, kv)
+ i += 1
+ }
+ i = 0
+ unrolled = unrolled.next
+ }
+
+ trie
+ }
+ def split = {
+ val fp = howmany / 2
+ List(new CreateTrie(buckets, root, offset, fp), new CreateTrie(buckets, root, offset + fp, howmany - fp))
+ }
+ def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(root.length, parallelismLevel)
+ }
+
+}
+
+
+object HashTrieCombiner {
+ def apply[K, V] = new HashTrieCombiner[K, V] with EnvironmentPassingCombiner[(K, V), ParallelHashTrie[K, V]] {}
+
+ private[immutable] val rootbits = 5
+ private[immutable] val rootsize = 1 << 5
+ private[immutable] val unrolledsize = 16
+
+ private[immutable] class Unrolled[K, V] {
+ var size = 0
+ var array = new Array[(K, V)](unrolledsize)
+ var next: Unrolled[K, V] = null
+ // adds and returns itself or the new unrolled if full
+ def add(elem: (K, V)): Unrolled[K, V] = if (size < unrolledsize) {
+ array(size) = elem
+ size += 1
+ this
+ } else {
+ next = new Unrolled[K, V]
+ next.add(elem)
+ }
+ override def toString = "Unrolled(" + array.mkString(", ") + ")"
+ }
+}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/src/parallel-collections/scala/collection/parallel/immutable/ParallelIterable.scala b/src/library/scala/collection/parallel/immutable/ParallelIterable.scala
index 92bf5ab706..92bf5ab706 100644
--- a/src/parallel-collections/scala/collection/parallel/immutable/ParallelIterable.scala
+++ b/src/library/scala/collection/parallel/immutable/ParallelIterable.scala
diff --git a/src/library/scala/collection/parallel/immutable/ParallelIterable.scala.disabled b/src/library/scala/collection/parallel/immutable/ParallelIterable.scala.disabled
new file mode 100644
index 0000000000..25a3217258
--- /dev/null
+++ b/src/library/scala/collection/parallel/immutable/ParallelIterable.scala.disabled
@@ -0,0 +1,53 @@
+package scala.collection.parallel.immutable
+
+
+import scala.collection.generic._
+
+import scala.collection.parallel.ParallelIterableLike
+import scala.collection.parallel.Combiner
+
+
+// TODO uncomment when we add parallel vectors
+
+///** A template trait for immutable parallel iterable collections.
+// *
+// * $paralleliterableinfo
+// *
+// * $sideeffects
+// *
+// * @tparam A the element type of the collection
+// *
+// * @author prokopec
+// * @since 2.8
+// */
+//trait ParallelIterable[A] extends collection.immutable.Iterable[A]
+// with collection.parallel.ParallelIterable[A]
+// with GenericParallelTemplate[A, ParallelIterable]
+// with ParallelIterableLike[A, ParallelIterable[A], Iterable[A]] {
+// override def companion: GenericCompanion[ParallelIterable] with GenericParallelCompanion[ParallelIterable] = ParallelIterable
+//}
+//
+///** $factoryinfo
+// */
+//object ParallelIterable extends ParallelFactory[ParallelIterable] {
+// implicit def canBuildFrom[A]: CanBuildFromParallel[Coll, A, ParallelIterable[A]] =
+// new GenericCanBuildFromParallel[A]
+//
+// def newBuilder[A]: Combiner[A, ParallelIterable[A]] = null // TODO
+//
+// def newCombiner[A]: Combiner[A, ParallelIterable[A]] = null // TODO
+//}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/src/parallel-collections/scala/collection/parallel/immutable/ParallelRange.scala b/src/library/scala/collection/parallel/immutable/ParallelRange.scala
index a07db9c39c..85a33c7431 100644
--- a/src/parallel-collections/scala/collection/parallel/immutable/ParallelRange.scala
+++ b/src/library/scala/collection/parallel/immutable/ParallelRange.scala
@@ -6,7 +6,7 @@ import scala.collection.immutable.Range
import scala.collection.immutable.RangeUtils
import scala.collection.parallel.ParallelSeq
import scala.collection.parallel.Combiner
-import scala.collection.generic.CanBuildFromParallel
+import scala.collection.generic.CanCombineFrom
@@ -62,8 +62,8 @@ extends ParallelSeq[Int]
/* transformers */
- override def map2combiner[S, That](f: Int => S, pbf: CanBuildFromParallel[ParallelSeq[Int], S, That]): Combiner[S, That] = {
- val cb = pbf(self.repr)
+ override def map2combiner[S, That](f: Int => S, cb: Combiner[S, That]): Combiner[S, That] = {
+ //val cb = pbf(self.repr)
val sz = remaining
cb.sizeHint(sz)
if (sz > 0) {
diff --git a/src/parallel-collections/scala/collection/parallel/immutable/ParallelSeq.scala b/src/library/scala/collection/parallel/immutable/ParallelSeq.scala
index f3a26e8682..ceb0dcc13d 100644
--- a/src/parallel-collections/scala/collection/parallel/immutable/ParallelSeq.scala
+++ b/src/library/scala/collection/parallel/immutable/ParallelSeq.scala
@@ -4,7 +4,7 @@ package scala.collection.parallel.immutable
import scala.collection.generic.GenericParallelTemplate
import scala.collection.generic.GenericCompanion
import scala.collection.generic.GenericParallelCompanion
-import scala.collection.generic.CanBuildFromParallel
+import scala.collection.generic.CanCombineFrom
import scala.collection.generic.ParallelFactory
import scala.collection.parallel.ParallelSeqLike
import scala.collection.parallel.Combiner
diff --git a/src/library/scala/collection/parallel/immutable/ParallelSeq.scala.disabled b/src/library/scala/collection/parallel/immutable/ParallelSeq.scala.disabled
new file mode 100644
index 0000000000..ddae095f0d
--- /dev/null
+++ b/src/library/scala/collection/parallel/immutable/ParallelSeq.scala.disabled
@@ -0,0 +1,44 @@
+package scala.collection.parallel.immutable
+
+
+import scala.collection.generic.GenericParallelTemplate
+import scala.collection.generic.GenericCompanion
+import scala.collection.generic.GenericParallelCompanion
+import scala.collection.generic.CanCombineFrom
+import scala.collection.generic.ParallelFactory
+import scala.collection.parallel.ParallelSeqLike
+import scala.collection.parallel.Combiner
+
+
+
+// TODO uncomment when we add parallel vectors
+
+///** An immutable variant of `ParallelSeq`.
+// *
+// * @define Coll mutable.ParallelSeq
+// * @define coll mutable parallel sequence
+// */
+//trait ParallelSeq[A] extends collection.immutable.IndexedSeq[A]
+// with ParallelIterable[A]
+// with collection.parallel.ParallelSeq[A]
+// with GenericParallelTemplate[A, ParallelSeq]
+// with ParallelSeqLike[A, ParallelSeq[A], Seq[A]] {
+// override def companion: GenericCompanion[ParallelSeq] with GenericParallelCompanion[ParallelSeq] = ParallelSeq
+//
+//}
+//
+//
+///** $factoryInfo
+// * @define Coll mutable.ParallelSeq
+// * @define coll mutable parallel sequence
+// */
+//object ParallelSeq extends ParallelFactory[ParallelSeq] {
+// implicit def canBuildFrom[T]: CanBuildFromParallel[Coll, T, ParallelSeq[T]] = new GenericCanBuildFromParallel[T]
+//
+// def newBuilder[A]: Combiner[A, ParallelSeq[A]] = null // TODO
+//
+// def newCombiner[A]: Combiner[A, ParallelSeq[A]] = null // TODO
+//}
+
+
+
diff --git a/src/parallel-collections/scala/collection/parallel/immutable/package.scala b/src/library/scala/collection/parallel/immutable/package.scala
index 054786afaf..054786afaf 100644
--- a/src/parallel-collections/scala/collection/parallel/immutable/package.scala
+++ b/src/library/scala/collection/parallel/immutable/package.scala
diff --git a/src/parallel-collections/scala/collection/parallel/mutable/LazyCombiner.scala b/src/library/scala/collection/parallel/mutable/LazyCombiner.scala
index e45b3f156a..bd17d24ea8 100644
--- a/src/parallel-collections/scala/collection/parallel/mutable/LazyCombiner.scala
+++ b/src/library/scala/collection/parallel/mutable/LazyCombiner.scala
@@ -27,12 +27,12 @@ trait LazyCombiner[Elem, +To, Buff <: Growable[Elem] with Sizing] extends Combin
def +=(elem: Elem) = { lastbuff += elem; this }
def result: To = allocateAndCopy
def clear = { chain.clear }
- def combine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]): Combiner[N, NewTo] = {
+ def combine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]): Combiner[N, NewTo] = if (this ne other) {
if (other.isInstanceOf[LazyCombiner[_, _, _]]) {
val that = other.asInstanceOf[LazyCombiner[Elem, To, Buff]]
newLazyCombiner(chain ++= that.chain)
} else throw new UnsupportedOperationException("Cannot combine with combiner of different type.")
- }
+ } else this
def size = chain.foldLeft(0)(_ + _.size)
/** Method that allocates the data structure and copies elements into it using
@@ -40,4 +40,4 @@ trait LazyCombiner[Elem, +To, Buff <: Growable[Elem] with Sizing] extends Combin
*/
def allocateAndCopy: To
def newLazyCombiner(buffchain: ArrayBuffer[Buff]): LazyCombiner[Elem, To, Buff]
-} \ No newline at end of file
+}
diff --git a/src/parallel-collections/scala/collection/parallel/mutable/ParallelArray.scala b/src/library/scala/collection/parallel/mutable/ParallelArray.scala
index 7ea5499aa6..c16cc6da15 100644
--- a/src/parallel-collections/scala/collection/parallel/mutable/ParallelArray.scala
+++ b/src/library/scala/collection/parallel/mutable/ParallelArray.scala
@@ -5,7 +5,8 @@ package scala.collection.parallel.mutable
import scala.collection.generic.GenericParallelTemplate
import scala.collection.generic.GenericCompanion
import scala.collection.generic.GenericParallelCompanion
-import scala.collection.generic.CanBuildFromParallel
+import scala.collection.generic.CanCombineFrom
+import scala.collection.generic.CanBuildFrom
import scala.collection.generic.ParallelFactory
import scala.collection.generic.Sizing
import scala.collection.parallel.Combiner
@@ -357,8 +358,8 @@ extends ParallelSeq[T]
/* transformers */
- override def map2combiner[S, That](f: T => S, cbf: CanBuildFromParallel[ParallelArray[T], S, That]): Combiner[S, That] = {
- val cb = cbf(self.repr)
+ override def map2combiner[S, That](f: T => S, cb: Combiner[S, That]): Combiner[S, That] = {
+ //val cb = cbf(self.repr)
cb.sizeHint(remaining)
map2combiner_quick(f, arr, cb, until, i)
i = until
@@ -373,7 +374,7 @@ extends ParallelSeq[T]
}
}
- override def collect2combiner[S, That](pf: PartialFunction[T, S], pbf: CanBuildFromParallel[ParallelArray[T], S, That]): Combiner[S, That] = {
+ override def collect2combiner[S, That](pf: PartialFunction[T, S], pbf: CanCombineFrom[ParallelArray[T], S, That]): Combiner[S, That] = {
val cb = pbf(self.repr)
collect2combiner_quick(pf, arr, cb, until, i)
i = until
@@ -389,7 +390,7 @@ extends ParallelSeq[T]
}
}
- override def flatmap2combiner[S, That](f: T => Traversable[S], pbf: CanBuildFromParallel[ParallelArray[T], S, That]): Combiner[S, That] = {
+ override def flatmap2combiner[S, That](f: T => Traversable[S], pbf: CanCombineFrom[ParallelArray[T], S, That]): Combiner[S, That] = {
val cb = pbf(self.repr)
while (i < until) {
val traversable = f(arr(i).asInstanceOf[T])
@@ -511,6 +512,42 @@ extends ParallelSeq[T]
}
+ /* operations */
+
+ private def buildsArray[S, That](c: Builder[S, That]) = c.isInstanceOf[ParallelArrayCombiner[_]]
+
+ override def map[S, That](f: T => S)(implicit bf: CanBuildFrom[ParallelArray[T], S, That]) = if (buildsArray(bf(repr))) {
+ // reserve array
+ val targetarr = new Array[Any](length)
+
+ // fill it in parallel
+ executeAndWait(new Map[S](f, targetarr, 0, length))
+
+ // wrap it into a parallel array
+ (new ParallelArray[S](new ExposedArraySeq[S](targetarr.asInstanceOf[Array[AnyRef]], length))).asInstanceOf[That]
+ } else super.map(f)(bf)
+
+ /* tasks */
+
+ class Map[S](f: T => S, targetarr: Array[Any], offset: Int, howmany: Int) extends super.Task[Unit, Map[S]] {
+ var result = ();
+ def leaf(prev: Option[Unit]) = {
+ val tarr = targetarr
+ val sarr = array
+ var i = offset
+ val until = offset + howmany
+ while (i < until) {
+ tarr(i) = f(sarr(i).asInstanceOf[T])
+ i += 1
+ }
+ }
+ def split = {
+ val fp = howmany / 2
+ List(new Map(f, targetarr, offset, fp), new Map(f, targetarr, offset + fp, howmany - fp))
+ }
+ def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(length, parallelismLevel)
+ }
+
}
@@ -518,14 +555,21 @@ extends ParallelSeq[T]
object ParallelArray extends ParallelFactory[ParallelArray] {
- implicit def canBuildFrom[T]: CanBuildFromParallel[Coll, T, ParallelArray[T]] = new GenericCanBuildFromParallel[T]
+ implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParallelArray[T]] = new GenericCanCombineFrom[T]
def newBuilder[T]: Combiner[T, ParallelArray[T]] = newCombiner
def newCombiner[T]: Combiner[T, ParallelArray[T]] = ParallelArrayCombiner[T]
/** Creates a new parallel array by wrapping the specified array.
*/
- def handoff[T <: AnyRef](arr: Array[T]): ParallelArray[T] = {
- new ParallelArray[T](new ExposedArraySeq[T](arr.asInstanceOf[Array[AnyRef]], arr.length))
+ def handoff[T](arr: Array[T]): ParallelArray[T] = wrapOrRebuild(arr, arr.length)
+
+ /** Creates a new parallel array by wrapping a part of the specified array.
+ */
+ def handoff[T](arr: Array[T], sz: Int): ParallelArray[T] = wrapOrRebuild(arr, sz)
+
+ private def wrapOrRebuild[T](arr: AnyRef, sz: Int) = arr match {
+ case arr: Array[AnyRef] => new ParallelArray[T](new ExposedArraySeq[T](arr, sz))
+ case _ => new ParallelArray[T](new ExposedArraySeq[T](runtime.ScalaRunTime.toObjectArray(arr), sz))
}
def createFromCopy[T <: AnyRef : ClassManifest](arr: Array[T]): ParallelArray[T] = {
diff --git a/src/parallel-collections/scala/collection/parallel/mutable/ParallelArrayCombiner.scala b/src/library/scala/collection/parallel/mutable/ParallelArrayCombiner.scala
index 9bbad7035e..2991344be2 100644
--- a/src/parallel-collections/scala/collection/parallel/mutable/ParallelArrayCombiner.scala
+++ b/src/library/scala/collection/parallel/mutable/ParallelArrayCombiner.scala
@@ -43,7 +43,7 @@ extends LazyCombiner[T, ParallelArray[T], ExposedArrayBuffer[T]]
class CopyChainToArray(array: Array[Any], offset: Int, howmany: Int) extends super.Task[Unit, CopyChainToArray] {
var result = ()
- def leaf = if (howmany > 0) {
+ def leaf(prev: Option[Unit]) = if (howmany > 0) {
var totalleft = howmany
val (stbuff, stind) = findStart(offset)
var buffind = stbuff
diff --git a/src/parallel-collections/scala/collection/parallel/mutable/ParallelIterable.scala b/src/library/scala/collection/parallel/mutable/ParallelIterable.scala
index f7ba44b67e..bd0a46bc43 100644
--- a/src/parallel-collections/scala/collection/parallel/mutable/ParallelIterable.scala
+++ b/src/library/scala/collection/parallel/mutable/ParallelIterable.scala
@@ -28,8 +28,8 @@ trait ParallelIterable[T] extends collection.mutable.Iterable[T]
/** $factoryinfo
*/
object ParallelIterable extends ParallelFactory[ParallelIterable] {
- implicit def canBuildFrom[T]: CanBuildFromParallel[Coll, T, ParallelIterable[T]] =
- new GenericCanBuildFromParallel[T]
+ implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParallelIterable[T]] =
+ new GenericCanCombineFrom[T]
def newBuilder[T]: Combiner[T, ParallelIterable[T]] = ParallelArrayCombiner[T]
diff --git a/src/parallel-collections/scala/collection/parallel/mutable/ParallelSeq.scala b/src/library/scala/collection/parallel/mutable/ParallelSeq.scala
index ed08b59962..636ba1ac3d 100644
--- a/src/parallel-collections/scala/collection/parallel/mutable/ParallelSeq.scala
+++ b/src/library/scala/collection/parallel/mutable/ParallelSeq.scala
@@ -4,7 +4,7 @@ package scala.collection.parallel.mutable
import scala.collection.generic.GenericParallelTemplate
import scala.collection.generic.GenericCompanion
import scala.collection.generic.GenericParallelCompanion
-import scala.collection.generic.CanBuildFromParallel
+import scala.collection.generic.CanCombineFrom
import scala.collection.generic.ParallelFactory
import scala.collection.parallel.ParallelSeqLike
import scala.collection.parallel.Combiner
@@ -38,7 +38,7 @@ trait ParallelSeq[T] extends collection.mutable.Seq[T]
* @define coll mutable parallel sequence
*/
object ParallelSeq extends ParallelFactory[ParallelSeq] {
- implicit def canBuildFrom[T]: CanBuildFromParallel[Coll, T, ParallelSeq[T]] = new GenericCanBuildFromParallel[T]
+ implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParallelSeq[T]] = new GenericCanCombineFrom[T]
def newBuilder[T]: Combiner[T, ParallelSeq[T]] = ParallelArrayCombiner[T]
diff --git a/src/parallel-collections/scala/collection/parallel/mutable/package.scala b/src/library/scala/collection/parallel/mutable/package.scala
index f670c7b7c5..f670c7b7c5 100644
--- a/src/parallel-collections/scala/collection/parallel/mutable/package.scala
+++ b/src/library/scala/collection/parallel/mutable/package.scala
diff --git a/src/parallel-collections/scala/collection/parallel/package.scala b/src/library/scala/collection/parallel/package.scala
index 6e8cbe8633..cddf098966 100644
--- a/src/parallel-collections/scala/collection/parallel/package.scala
+++ b/src/library/scala/collection/parallel/package.scala
@@ -4,7 +4,7 @@ package scala.collection
import java.lang.Thread._
import scala.collection.generic.CanBuildFrom
-import scala.collection.generic.CanBuildFromParallel
+import scala.collection.generic.CanCombineFrom
/** Package object for parallel collections.
@@ -28,14 +28,14 @@ package object parallel {
* @param array the array to be parallelized
* @return a `Parallelizable` object with a `par` method
*/
- implicit def array2ParallelArray[T <: AnyRef](array: Array[T]) = new Parallelizable[T, mutable.ParallelArray[T]] {
+ implicit def array2ParallelArray[T <: AnyRef](array: Array[T]) = new Parallelizable[mutable.ParallelArray[T]] {
def par = mutable.ParallelArray.handoff[T](array)
}
implicit def factory2ops[From, Elem, To](bf: CanBuildFrom[From, Elem, To]) = new {
def isParallel = bf.isInstanceOf[Parallel]
- def asParallel = bf.asInstanceOf[CanBuildFromParallel[From, Elem, To]]
- def ifParallel[R](isbody: CanBuildFromParallel[From, Elem, To] => R) = new {
+ def asParallel = bf.asInstanceOf[CanCombineFrom[From, Elem, To]]
+ def ifParallel[R](isbody: CanCombineFrom[From, Elem, To] => R) = new {
def otherwise(notbody: => R) = if (isParallel) isbody(asParallel) else notbody
}
}
diff --git a/src/library/scala/concurrent/SyncVar.scala b/src/library/scala/concurrent/SyncVar.scala
index 46dc415e1f..5b55be1326 100644
--- a/src/library/scala/concurrent/SyncVar.scala
+++ b/src/library/scala/concurrent/SyncVar.scala
@@ -6,12 +6,10 @@
** |/ **
\* */
-
-
package scala.concurrent
-
-/** The class <code>SyncVar</code> ...
+/** A class to provide safe concurrent access to a mutable cell.
+ * All methods are synchronized.
*
* @author Martin Odersky, Stepan Koltsov
* @version 1.0, 10/03/2003
@@ -29,24 +27,17 @@ class SyncVar[A] {
def get(timeout: Long): Option[A] = synchronized {
if (!isDefined) {
- try {
- wait(timeout)
- } catch {
- case _: InterruptedException =>
- }
+ try wait(timeout)
+ catch { case _: InterruptedException => () }
}
- if (exception.isEmpty) {
- if (isDefined) Some(value) else None
- } else
- throw exception.get
+ if (exception.isDefined) throw exception.get
+ else if (isDefined) Some(value)
+ else None
}
def take() = synchronized {
- try {
- get
- } finally {
- unset()
- }
+ try get
+ finally unset()
}
def set(x: A) = synchronized {
@@ -56,12 +47,6 @@ class SyncVar[A] {
notifyAll()
}
- private def setException(e: Throwable) = synchronized {
- exception = Some(e)
- isDefined = true
- notifyAll()
- }
-
def put(x: A) = synchronized {
while (isDefined) wait()
set(x)
@@ -75,5 +60,5 @@ class SyncVar[A] {
isDefined = false
notifyAll()
}
-
}
+
diff --git a/src/library/scala/io/Source.scala b/src/library/scala/io/Source.scala
index b5313ef61b..cb7403e255 100644
--- a/src/library/scala/io/Source.scala
+++ b/src/library/scala/io/Source.scala
@@ -114,11 +114,13 @@ object Source {
/** Create a <code>Source</code> from array of bytes, assuming
* one byte per character (ISO-8859-1 encoding.)
*/
- def fromRawBytes(bytes: Array[Byte]): Source = fromString(new String(bytes, Codec.ISO8859.name))
+ def fromRawBytes(bytes: Array[Byte]): Source =
+ fromString(new String(bytes, Codec.ISO8859.name))
/** creates <code>Source</code> from file with given file: URI
*/
- def fromURI(uri: URI)(implicit codec: Codec): BufferedSource = fromFile(new JFile(uri))(codec)
+ def fromURI(uri: URI)(implicit codec: Codec): BufferedSource =
+ fromFile(new JFile(uri))(codec)
/** same as fromURL(new URL(s))(Codec(enc))
*/
@@ -196,7 +198,8 @@ abstract class Source extends Iterator[Char] {
*
*/
@deprecated("Use a collections method such as getLines().toIndexedSeq for random access.")
- def getLine(line: Int): String = getLines() drop (line - 1) next
+ def getLine(line: Int): String = lineNum(line)
+ private def lineNum(line: Int): String = getLines() drop (line - 1) next
class LineIterator() extends Iterator[String] {
private[this] val sb = new StringBuilder
@@ -296,10 +299,10 @@ abstract class Source extends Iterator[Char] {
* @param out PrintStream to use
*/
def report(pos: Int, msg: String, out: PrintStream) {
- val line = Position line pos
- val col = Position column pos
+ val line = Position line pos
+ val col = Position column pos
- out println "%s:%d:%d: %s%s%s^".format(descr, line, col, msg, getLine(line), spaces(col - 1))
+ out println "%s:%d:%d: %s%s%s^".format(descr, line, col, msg, lineNum(line), spaces(col - 1))
}
/**
@@ -340,8 +343,9 @@ abstract class Source extends Iterator[Char] {
}
/** The close() method closes the underlying resource. */
- def close(): Unit =
+ def close() {
if (closeFunction != null) closeFunction()
+ }
/** The reset() method creates a fresh copy of this Source. */
def reset(): Source =
diff --git a/src/library/scala/math/Ordering.scala b/src/library/scala/math/Ordering.scala
index 4527e18338..fdfc4915d9 100644
--- a/src/library/scala/math/Ordering.scala
+++ b/src/library/scala/math/Ordering.scala
@@ -204,15 +204,17 @@ object Ordering extends LowPriorityOrderingImplicits {
}
implicit object String extends StringOrdering
- implicit def Option[T](implicit ord: Ordering[T]) : Ordering[Option[T]] =
- new Ordering[Option[T]] {
- def compare(x : Option[T], y : Option[T]) = (x, y) match {
- case (None, None) => 0
- case (None, _) => -1
- case (_, None) => 1
- case (Some(x), Some(y)) => ord.compare(x, y)
- }
+ trait OptionOrdering[T] extends Ordering[Option[T]] {
+ def optionOrdering: Ordering[T]
+ def compare(x: Option[T], y: Option[T]) = (x, y) match {
+ case (None, None) => 0
+ case (None, _) => -1
+ case (_, None) => 1
+ case (Some(x), Some(y)) => optionOrdering.compare(x, y)
}
+ }
+ implicit def Option[T](implicit ord: Ordering[T]): Ordering[Option[T]] =
+ new OptionOrdering[T] { val optionOrdering = ord }
implicit def Iterable[T](implicit ord: Ordering[T]): Ordering[Iterable[T]] =
new Ordering[Iterable[T]] {
diff --git a/src/library/scala/reflect/generic/Symbols.scala b/src/library/scala/reflect/generic/Symbols.scala
index 2f5e0624ab..f1226c7e19 100755
--- a/src/library/scala/reflect/generic/Symbols.scala
+++ b/src/library/scala/reflect/generic/Symbols.scala
@@ -119,6 +119,7 @@ trait Symbols { self: Universe =>
def isTrait: Boolean = isClass && hasFlag(TRAIT) // refined later for virtual classes.
final def hasDefault = isParameter && hasFlag(DEFAULTPARAM)
final def isAbstractClass = isClass && hasFlag(ABSTRACT)
+ // XXX This is unlikely to be correct: it's not looking for the ABSOVERRIDE flag?
final def isAbstractOverride = isTerm && hasFlag(ABSTRACT) && hasFlag(OVERRIDE)
final def isBridge = hasFlag(BRIDGE)
final def isCase = hasFlag(CASE)
diff --git a/src/library/scala/runtime/AnyValCompanion.scala b/src/library/scala/runtime/AnyValCompanion.scala
index 0a6f93805a..0fba1cfd60 100644
--- a/src/library/scala/runtime/AnyValCompanion.scala
+++ b/src/library/scala/runtime/AnyValCompanion.scala
@@ -6,7 +6,7 @@
** |/ **
\* */
-// $Id$
+
package scala.runtime
diff --git a/src/library/scala/runtime/ScalaRunTime.scala b/src/library/scala/runtime/ScalaRunTime.scala
index a1d15c4b7d..a8cb2340ff 100644
--- a/src/library/scala/runtime/ScalaRunTime.scala
+++ b/src/library/scala/runtime/ScalaRunTime.scala
@@ -235,6 +235,12 @@ object ScalaRunTime {
*
*/
def stringOf(arg: Any): String = {
+ import collection.{SortedSet, SortedMap}
+ def mapTraversable(x: Traversable[_], f: Any => String) = x match {
+ case ss: SortedSet[_] => ss.map(f)
+ case ss: SortedMap[_, _] => ss.map(f)
+ case _ => x.map(f)
+ }
def inner(arg: Any): String = arg match {
case null => "null"
// Node extends NodeSeq extends Seq[Node] strikes again
@@ -252,7 +258,7 @@ object ScalaRunTime {
// exception if you call iterator. What a world.
// And they can't be infinite either.
if (x.getClass.getName startsWith "scala.tools.nsc.io") x.toString
- else (x map inner) mkString (x.stringPrefix + "(", ", ", ")")
+ else (mapTraversable(x, inner)) mkString (x.stringPrefix + "(", ", ", ")")
case x => x toString
}
val s = inner(arg)
diff --git a/src/library/scala/util/Random.scala b/src/library/scala/util/Random.scala
index 69cb4bb48d..53e721dcda 100644
--- a/src/library/scala/util/Random.scala
+++ b/src/library/scala/util/Random.scala
@@ -17,6 +17,8 @@ import collection.immutable.List
*
*/
class Random(val self: java.util.Random) {
+ import collection.mutable.ArrayBuffer
+ import collection.generic.CanBuildFrom
/** Creates a new random number generator using a single long seed. */
def this(seed: Long) = this(new java.util.Random(seed))
@@ -97,27 +99,6 @@ class Random(val self: java.util.Random) {
}
def setSeed(seed: Long) { self.setSeed(seed) }
-}
-
-/** The object <code>Random</code> offers a default implementation
- * of scala.util.Random and random-related convenience methods.
- *
- * @since 2.8
- */
-object Random extends Random {
- import collection.mutable.ArrayBuffer
- import collection.generic.CanBuildFrom
-
- /** Returns a Stream of pseudorandomly chosen alphanumeric characters,
- * equally chosen from A-Z, a-z, and 0-9.
- *
- * @since 2.8
- */
- def alphanumeric: Stream[Char] = {
- def isAlphaNum(c: Char) = (c >= 'A' && c <= 'Z') || (c >= 'a' && c <= 'z') || (c >= '0' && c <= '9')
-
- Stream continually nextPrintableChar filter isAlphaNum
- }
/** Returns a new collection of the same type in a randomly chosen order.
*
@@ -140,4 +121,25 @@ object Random extends Random {
bf(xs) ++= buf result
}
+
+}
+
+/** The object <code>Random</code> offers a default implementation
+ * of scala.util.Random and random-related convenience methods.
+ *
+ * @since 2.8
+ */
+object Random extends Random {
+
+ /** Returns a Stream of pseudorandomly chosen alphanumeric characters,
+ * equally chosen from A-Z, a-z, and 0-9.
+ *
+ * @since 2.8
+ */
+ def alphanumeric: Stream[Char] = {
+ def isAlphaNum(c: Char) = (c >= 'A' && c <= 'Z') || (c >= 'a' && c <= 'z') || (c >= '0' && c <= '9')
+
+ Stream continually nextPrintableChar filter isAlphaNum
+ }
+
}
diff --git a/src/library/scala/xml/parsing/MarkupParser.scala b/src/library/scala/xml/parsing/MarkupParser.scala
index 24e0d78c6f..4f6b89c07b 100644
--- a/src/library/scala/xml/parsing/MarkupParser.scala
+++ b/src/library/scala/xml/parsing/MarkupParser.scala
@@ -310,7 +310,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
def xEntityValue(): String = {
val endch = ch
nextch
- while (ch != endch) {
+ while (ch != endch && !eof) {
putChar(ch)
nextch
}
@@ -556,7 +556,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
if (ch != '\'' && ch != '"')
reportSyntaxError("quote ' or \" expected");
nextch
- while (ch != endch) {
+ while (ch != endch && !eof) {
putChar(ch)
nextch
}
@@ -572,7 +572,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
if (ch!='\'' && ch != '"')
reportSyntaxError("quote ' or \" expected");
nextch
- while (ch != endch) {
+ while (ch != endch && !eof) {
putChar(ch)
//Console.println("hello '"+ch+"'"+isPubIDChar(ch));
if (!isPubIDChar(ch))
diff --git a/src/parallel-collections/scala/collection/Parallelizable.scala b/src/parallel-collections/scala/collection/Parallelizable.scala
deleted file mode 100644
index 206285459d..0000000000
--- a/src/parallel-collections/scala/collection/Parallelizable.scala
+++ /dev/null
@@ -1,40 +0,0 @@
-package scala.collection
-
-
-
-import parallel.ParallelIterableLike
-
-
-
-/**
- * This trait describes collections which can be turned into parallel collections
- * by invoking the method `par`. Parallelizable collections may be parametrized with
- * a target type different than their own.
- */
-trait Parallelizable[+T, +Repr <: Parallel] {
-
- /**
- * Returns a parallel implementation of a collection.
- */
- def par: Repr
-
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/parallel-collections/scala/collection/generic/GenericParallelCompanion.scala b/src/parallel-collections/scala/collection/generic/GenericParallelCompanion.scala
deleted file mode 100644
index 14eb9ab282..0000000000
--- a/src/parallel-collections/scala/collection/generic/GenericParallelCompanion.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-package scala.collection.generic
-
-
-import scala.collection.parallel.Combiner
-import scala.collection.parallel.ParallelIterable
-
-
-
-/**
- * A template class for companion objects of parallel collection classes.
- * They should be mixed in together with `GenericCompanion` type.
- * @tparam CC the type constructor representing the collection class
- * @since 2.8
- */
-trait GenericParallelCompanion[+CC[X] <: ParallelIterable[X]] {
- /**
- * The default builder for $Coll objects.
- */
- def newBuilder[A]: Combiner[A, CC[A]]
-
- /**
- * The parallel builder for $Coll objects.
- */
- def newCombiner[A]: Combiner[A, CC[A]]
-}
-
-
-
-
-
-
diff --git a/src/parallel-collections/scala/collection/parallel/ParallelMap.scala b/src/parallel-collections/scala/collection/parallel/ParallelMap.scala
deleted file mode 100644
index c03f14b198..0000000000
--- a/src/parallel-collections/scala/collection/parallel/ParallelMap.scala
+++ /dev/null
@@ -1,61 +0,0 @@
-package scala.collection.parallel
-
-
-
-
-
-import scala.collection.Map
-import scala.collection.mutable.Builder
-import scala.collection.generic.ParallelMapFactory
-import scala.collection.generic.CanBuildFromParallel
-
-
-
-
-
-
-trait ParallelMap[K, +V]
-extends Map[K, V]
- with ParallelIterable[(K, V)]
- with ParallelMapLike[K, V, ParallelMap[K, V], Map[K, V]]
-{ self =>
-
- override def empty: ParallelMap[K, V] = null // TODO
-
-}
-
-
-
-object ParallelMap extends ParallelMapFactory[ParallelMap] {
- def empty[A, B]: ParallelMap[A, B] = null // TODO
-
- implicit def canBuildFrom[A, B]: CanBuildFromParallel[Coll, (A, B), Map[A, B]] = null // TODO
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/parallel-collections/scala/collection/parallel/immutable/ParallelHashTrie.scala b/src/parallel-collections/scala/collection/parallel/immutable/ParallelHashTrie.scala
deleted file mode 100644
index be379d9e5e..0000000000
--- a/src/parallel-collections/scala/collection/parallel/immutable/ParallelHashTrie.scala
+++ /dev/null
@@ -1,60 +0,0 @@
-package scala.collection.parallel.immutable
-
-
-
-
-
-
-
-import scala.collection.parallel.ParallelMap
-import scala.collection.parallel.ParallelMapLike
-import scala.collection.immutable.HashMap
-
-
-
-
-
-
-
-
-class ParallelHashTrie[K, +V]
-extends ParallelMap[K, V]
- with ParallelMapLike[K, V, ParallelHashTrie[K, V], HashMap[K, V]]
-{ self =>
-
- override def empty: ParallelHashTrie[K, V] = null // TODO
-
- def parallelIterator = null // TODO
-
- def seq = null // TODO
-
- def -(k: K) = null // TODO
-
- def +[U >: V](kv: (K, U)) = null // TODO
-
- def get(k: K) = None // TODO
-
-}
-
-
-
-
-
-object ParallelHashTrie {
-
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/partest/scala/tools/partest/PartestTask.scala b/src/partest/scala/tools/partest/PartestTask.scala
index 230a6f73ec..55b4a2a637 100644
--- a/src/partest/scala/tools/partest/PartestTask.scala
+++ b/src/partest/scala/tools/partest/PartestTask.scala
@@ -182,8 +182,10 @@ class PartestTask extends Task with CompilationPathProperty {
private def getScalapFiles = getFiles(scalapFiles)
override def execute() {
- if (isPartestDebug)
+ if (isPartestDebug || debug) {
setProp("partest.debug", "true")
+ nest.NestUI._verbose = true
+ }
srcDir foreach (x => setProp("partest.srcdir", x))
diff --git a/src/partest/scala/tools/partest/nest/Diff.java b/src/partest/scala/tools/partest/nest/Diff.java
index f780712b6b..f69fc6858b 100644
--- a/src/partest/scala/tools/partest/nest/Diff.java
+++ b/src/partest/scala/tools/partest/nest/Diff.java
@@ -49,7 +49,7 @@ public class Diff {
an edit script, if desired.
*/
public Diff(Object[] a,Object[] b) {
- Hashtable h = new Hashtable(a.length + b.length);
+ Hashtable<Object, Integer> h = new Hashtable<Object, Integer>(a.length + b.length);
filevec[0] = new file_data(a,h);
filevec[1] = new file_data(b,h);
}
@@ -744,7 +744,7 @@ public class Diff {
nondiscarded_lines = j;
}
- file_data(Object[] data,Hashtable h) {
+ file_data(Object[] data, Hashtable<Object, Integer> h) {
buffered_lines = data.length;
equivs = new int[buffered_lines];
@@ -752,9 +752,9 @@ public class Diff {
realindexes = new int[buffered_lines];
for (int i = 0; i < data.length; ++i) {
- Integer ir = (Integer)h.get(data[i]);
+ Integer ir = h.get(data[i]);
if (ir == null)
- h.put(data[i],new Integer(equivs[i] = equiv_max++));
+ h.put(data[i], new Integer(equivs[i] = equiv_max++));
else
equivs[i] = ir.intValue();
}
diff --git a/src/partest/scala/tools/partest/nest/DiffPrint.java b/src/partest/scala/tools/partest/nest/DiffPrint.java
index eeb0dd5a09..31f9a1bc79 100644
--- a/src/partest/scala/tools/partest/nest/DiffPrint.java
+++ b/src/partest/scala/tools/partest/nest/DiffPrint.java
@@ -505,7 +505,7 @@ public class DiffPrint {
*/
static String[] slurp(String file) throws IOException {
BufferedReader rdr = new BufferedReader(new FileReader(file));
- Vector s = new Vector();
+ Vector<String> s = new Vector<String>();
for (;;) {
String line = rdr.readLine();
if (line == null) break;
diff --git a/src/partest/scala/tools/partest/nest/Worker.scala b/src/partest/scala/tools/partest/nest/Worker.scala
index 7797c1095a..931bc5cc13 100644
--- a/src/partest/scala/tools/partest/nest/Worker.scala
+++ b/src/partest/scala/tools/partest/nest/Worker.scala
@@ -374,7 +374,7 @@ class Worker(val fileManager: FileManager) extends Actor {
def isInGroup(f: File, num: Int) = SFile(f).stripExtension endsWith ("_" + num)
val groups = (0 to 9).toList map (num => testFiles filter (f => isInGroup(f, num)))
- val noGroupSuffix = testFiles -- groups.flatten
+ val noGroupSuffix = testFiles filterNot (groups.flatten contains)
def compileGroup(g: List[File]) {
val (scalaFiles, javaFiles) = g partition isScala
diff --git a/src/swing/scala/swing/Font.scala b/src/swing/scala/swing/Font.scala.disabled
index a58c8967d7..6eebd667bd 100644
--- a/src/swing/scala/swing/Font.scala
+++ b/src/swing/scala/swing/Font.scala.disabled
@@ -1,36 +1,36 @@
package scala.swing
-/*object Font {
- def apply(fontFormat: Int, fontFile: java.io.File) = java.awt.Font.createFont(fontFormat, fontFile)
- def apply(fontFormat: Int, fontStream: java.io.InputStream) = java.awt.Font.createFont(fontFormat, fontStream)
+/*object Font {
+ def apply(fontFormat: Int, fontFile: java.io.File) = java.awt.Font.createFont(fontFormat, fontFile)
+ def apply(fontFormat: Int, fontStream: java.io.InputStream) = java.awt.Font.createFont(fontFormat, fontStream)
def decode(str: String) = java.awt.Font.decode(str)
-
+
/* TODO: finish implementation
/**
* See [java.awt.Font.getFont].
*/
- def get(attributes: Map[_ <: java.text.AttributedCharacterIterator.Attribute, _]) =
+ def get(attributes: Map[_ <: java.text.AttributedCharacterIterator.Attribute, _]) =
java.awt.Font.getFont(ImmutableMapWrapper(attributes))
-
+
import java.{util => ju}
private case class ImmutableMapWrapper[A, B](underlying : Map[A, B])(m : ClassManifest[A]) extends ju.AbstractMap[A, B] {
self =>
override def size = underlying.size
- override def put(k : A, v : B) =
+ override def put(k : A, v : B) =
throw new UnsupportedOperationException("This is a wrapper that does not support mutation")
- override def remove(k : AnyRef) =
+ override def remove(k : AnyRef) =
throw new UnsupportedOperationException("This is a wrapper that does not support mutation")
-
+
override def entrySet : ju.Set[ju.Map.Entry[A, B]] = new ju.AbstractSet[ju.Map.Entry[A, B]] {
def size = self.size
def iterator = new ju.Iterator[ju.Map.Entry[A, B]] {
val ui = underlying.iterator
var prev : Option[A] = None
-
+
def hasNext = ui.hasNext
-
+
def next = {
val (k, v) = ui.next
prev = Some(k)
@@ -44,7 +44,7 @@ package scala.swing
}
}
}
-
+
def remove = prev match {
case Some(k) => val v = self.remove(k.asInstanceOf[AnyRef]) ; prev = None ; v
case _ => throw new IllegalStateException("next must be called at least once before remove")
@@ -53,7 +53,7 @@ package scala.swing
}
}
*/
-
+
/**
* See [java.awt.Font.getFont].
*/
@@ -62,9 +62,9 @@ package scala.swing
* See [java.awt.Font.getFont].
*/
def get(nm: String, font: Font) = java.awt.Font.getFont(nm, font)
-
+
def Insets(x: Int, y: Int, width: Int, height: Int) = new Insets(x, y, width, height)
def Rectangle(x: Int, y: Int, width: Int, height: Int) = new Insets(x, y, width, height)
def Point(x: Int, y: Int) = new Point(x, y)
- def Dimension(x: Int, y: Int) = new Dimension(x, y)
+ def Dimension(x: Int, y: Int) = new Dimension(x, y)
}*/ \ No newline at end of file
diff --git a/test/benchmarks/src/scala/collection/parallel/Benchmarking.scala b/test/benchmarks/src/scala/collection/parallel/Benchmarking.scala
index f6aa63cb1a..0054893b8a 100644
--- a/test/benchmarks/src/scala/collection/parallel/Benchmarking.scala
+++ b/test/benchmarks/src/scala/collection/parallel/Benchmarking.scala
@@ -93,6 +93,13 @@ trait BenchmarkRegister {
register(hashtries.Construct)
register(hashtries.Lookup)
register(hashtries.Combine)
+ register(hashtries.MultipleCombine)
+
+ // parallel hash trie benchmarks
+ register(hashtries.RefParallelHashTrieBenches.Reduce)
+ register(hashtries.RefParallelHashTrieBenches.ReduceMedium)
+ register(hashtries.RefParallelHashTrieBenches.Map)
+ register(hashtries.RefParallelHashTrieBenches.Map2)
}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/generic/Dummy.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/generic/Dummy.scala
index 3699b1a255..2b2ad81af6 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/generic/Dummy.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/generic/Dummy.scala
@@ -6,6 +6,7 @@ package scala.collection.parallel.benchmarks.generic
class Dummy(val in: Int) {
var num = in
override def toString = in.toString
+ override def hashCode = in
}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/generic/Operators.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/generic/Operators.scala
index 42399c980a..1268f94bac 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/generic/Operators.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/generic/Operators.scala
@@ -11,6 +11,7 @@ trait Operators[T] {
def mediumreducer: (T, T) => T
def filterer: T => Boolean
def mapper: T => T
+ def mapper2: T => T = error("unsupported")
def heavymapper: T => T
def taker: T => Boolean
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/generic/ParallelBenches.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/generic/ParallelBenches.scala
index 4e81cacd4d..3db33ebaed 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/generic/ParallelBenches.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/generic/ParallelBenches.scala
@@ -9,7 +9,7 @@ package generic
trait ParallelIterableBench[T, Coll <: ParallelIterable[T]] extends collection.parallel.benchmarks.Bench {
- self =>
+self =>
protected var seqcoll: Iterable[T] = null
protected var parcoll: Coll = null.asInstanceOf[Coll]
@@ -110,6 +110,20 @@ trait StandardParallelIterableBench[T, Coll <: ParallelIterable[T]] extends Para
def companion = ReduceMedium
}
+ object Map extends IterableBenchCompanion {
+ override def defaultSize = 5000
+ def benchName = "map";
+ def apply(sz: Int, p: Int, w: String) = new Map(sz, p, w)
+ }
+
+ class Map(val size: Int, val parallelism: Int, val runWhat: String)
+ extends IterableBench with StandardParallelIterableBench[T, Coll] {
+ def comparisonMap = collection.Map()
+ def runseq = this.seqcoll.map(operators.mapper)
+ def runpar = this.parcoll.map(operators.mapper)
+ def companion = Map
+ }
+
}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Combine.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Combine.scala
index f04688c7f9..3a070fb6ff 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Combine.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Combine.scala
@@ -21,14 +21,23 @@ class Combine(val size: Int, val parallelism: Int, val runWhat: String) extends
def runpar = throw new UnsupportedOperationException
def runseq = runhashtrie
def runhashtrie = {
- hashtrie combine thattrie
+ hashtrie merge thattrie
+ // println
+ // println("both tries: " + HashTrie.bothtries)
+ // println("one trie, one item: " + HashTrie.onetrie)
+ // println("both single: " + HashTrie.bothsingle)
+ // System exit 1
+ }
+ def rundestructive = {
+ hashtrie merge thattrie
}
def runappendtrie = hashtrie ++ thattrie
def runhashmap = hashmap ++ thatmap
def companion = Combine
- def comparisonMap = Map("hashtrie" -> runhashtrie _, "hashmap" -> runhashmap _, "appendtrie" -> runappendtrie _)
+ def comparisonMap = Map("hashtrie" -> runhashtrie _, "hashmap" -> runhashmap _, "destruct" -> rundestructive _, "appendtrie" -> runappendtrie _)
override def reset = runWhat match {
case "appendtrie" => initHashTrie
+ case "destruct" => initHashTrie
case _ => super.reset
}
}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/MultipleCombine.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/MultipleCombine.scala
new file mode 100644
index 0000000000..033c211849
--- /dev/null
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/MultipleCombine.scala
@@ -0,0 +1,87 @@
+package scala.collection.parallel.benchmarks
+package hashtries
+
+
+
+
+import collection.immutable.{HashMap => HashTrie}
+import collection.mutable.HashMap
+
+
+
+
+
+
+class MultipleCombine(val size: Int, val parallelism: Int, val runWhat: String) extends Bench with IntInit {
+ var combines = 10
+
+ var thattries = new Array[HashTrie[Int, Int]](combines)
+ def initTries = for (r <- 0 until combines) {
+ var thattrie = new HashTrie[Int, Int]
+ for (i <- ((r + 1) * size) until ((r + 2) * size)) thattrie += ((i, i))
+ thattries(r) = thattrie
+ }
+ initTries
+
+ val thatmaps = new Array[HashMap[Int, Int]](10)
+ def initMaps = for (r <- 0 until combines) {
+ var thatmap = new HashMap[Int, Int]
+ for (i <- ((r + 1) * size) until ((r + 2) * size)) thatmap += ((i, i))
+ thatmaps(r) = thatmap
+ }
+ initMaps
+
+ override def repetitionsPerRun = 25
+ def runpar = throw new UnsupportedOperationException
+ def runseq = runhashtrie
+ def runhashtrie = {
+ initHashTrie
+ var trie = hashtrie
+ for (r <- 0 until combines) trie = trie merge thattries(r)
+ }
+ def runappendtrie = {
+ initHashTrie
+ var trie = hashtrie
+ for (r <- 0 until combines) trie = trie ++ thattries(r)
+ }
+ def runhashmap = {
+ initHashMap
+ var map = hashmap
+ for (r <- 0 until combines) map = map ++ thatmaps(r)
+ }
+ def rundestructive = {
+ initHashTrie
+ var trie = hashtrie
+ for (r <- 0 until combines) trie = trie merge thattries(r)
+ }
+ def companion = MultipleCombine
+ def comparisonMap = Map("hashtrie" -> runhashtrie _, "hashmap" -> runhashmap _, "appendtrie" -> runappendtrie _, "destruct" -> rundestructive _)
+ override def reset = runWhat match {
+ case "appendtrie" => initHashTrie
+ case "destruct" => initHashTrie
+ case _ => super.reset
+ }
+}
+
+
+object MultipleCombine extends BenchCompanion {
+ def collectionName = "HashTrie"
+ def benchName = "multi-combine";
+ def apply(sz: Int, p: Int, what: String) = new MultipleCombine(sz, p, what)
+ override def defaultSize = 5000
+}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/ParallelHashTries.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/ParallelHashTries.scala
new file mode 100644
index 0000000000..c617f69161
--- /dev/null
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/ParallelHashTries.scala
@@ -0,0 +1,121 @@
+package scala.collection.parallel.benchmarks.hashtries
+
+
+
+
+import scala.collection.parallel.benchmarks.generic.StandardParallelIterableBench
+import scala.collection.parallel.benchmarks.generic.NotBenchmark
+import scala.collection.parallel.benchmarks.generic.Dummy
+import scala.collection.parallel.benchmarks.generic.Operators
+import scala.collection.parallel.immutable.ParallelHashTrie
+
+
+
+
+
+trait ParallelHashTrieBenches[K, V] extends StandardParallelIterableBench[(K, V), ParallelHashTrie[K, V]] {
+
+ def nameOfCollection = "ParallelHashTrie"
+ def comparisonMap = collection.Map()
+ val forkJoinPool = new scala.concurrent.forkjoin.ForkJoinPool
+
+ object Map2 extends IterableBenchCompanion {
+ override def defaultSize = 5000
+ def benchName = "map2";
+ def apply(sz: Int, p: Int, w: String) = new Map2(sz, p, w)
+ }
+
+ class Map2(val size: Int, val parallelism: Int, val runWhat: String)
+ extends IterableBench with StandardParallelIterableBench[(K, V), ParallelHashTrie[K, V]] {
+ var result: Int = 0
+ def comparisonMap = collection.Map()
+ def runseq = {
+ val r = this.seqcoll.asInstanceOf[collection.immutable.HashMap[K, V]].map(operators.mapper2)
+ result = r.size
+ }
+ def runpar = {
+ result = this.parcoll.map(operators.mapper2).size
+ //println(collection.parallel.immutable.ParallelHashTrie.totalcombines)
+ //System.exit(1)
+ }
+ def companion = Map2
+ override def repetitionsPerRun = 50
+ override def printResults {
+ println("Total combines: " + collection.parallel.immutable.ParallelHashTrie.totalcombines)
+ println("Size of last result: " + result)
+ }
+ }
+
+}
+
+
+
+
+
+object RefParallelHashTrieBenches extends ParallelHashTrieBenches[Dummy, Dummy] with NotBenchmark {
+
+ type DPair = (Dummy, Dummy)
+
+ object operators extends Operators[DPair] {
+ def gcd(a: Int, b: Int): Int = {
+ val result = if (b == 0) a else {
+ gcd(b, a - b * (a / b))
+ }
+ result + 1000
+ }
+ def heavy(a: Int): Int = {
+ var i = 0
+ var sum = a
+ while (i < 3000) {
+ i += 1
+ sum += a + i
+ }
+ sum
+ }
+ val reducer = (x: DPair, y: DPair) => {
+ //y._2.num = x._2.in + y._2.in
+ y
+ }
+ val mediumreducer = (x: DPair, y: DPair) => {
+ y._2.num = gcd(x._2.in, y._2.in)
+ y
+ }
+ val filterer = (p: DPair) => {
+ p._1.num % 2 == 0
+ }
+ val mapper = (p: DPair) => {
+ val a = p._1
+ a.num = a.in % 2
+ (a, p._2)
+ }
+ override val mapper2 = (p: DPair) => {
+ val a = 1 //heavy(p._1.in)
+ (new Dummy(p._1.in * -2 + a), p._2)
+ }
+ val heavymapper = (p: DPair) => {
+ val a = p._1
+ var i = -100
+ while (i < 0) {
+ if (a.in < i) a.num += 1
+ i += 1
+ }
+ (a, p._2)
+ }
+ val taker = (p: DPair) => true
+ }
+
+ def createSequential(sz: Int, p: Int) = {
+ var ht = new collection.immutable.HashMap[Dummy, Dummy]
+ for (i <- 0 until sz) ht += ((new Dummy(i), new Dummy(i)))
+ ht
+ }
+
+ def createParallel(sz: Int, p: Int) = {
+ var pht = new ParallelHashTrie[Dummy, Dummy]
+ for (i <- 0 until sz) pht += ((new Dummy(i), new Dummy(i)))
+ forkJoinPool.setParallelism(p)
+ pht.environment = forkJoinPool
+ pht
+ }
+
+}
diff --git a/test/files/jvm/actor-executor2.scala b/test/files/jvm/actor-executor2.scala
index da64a7fc43..f8fcaef69f 100644
--- a/test/files/jvm/actor-executor2.scala
+++ b/test/files/jvm/actor-executor2.scala
@@ -1,6 +1,6 @@
import scala.actors.{Actor, SchedulerAdapter, Exit}
import Actor._
-import java.util.concurrent.Executors
+import java.util.concurrent.{Executors, RejectedExecutionException}
object One extends AdaptedActor {
def act() {
@@ -57,9 +57,15 @@ object Test {
val scheduler =
new SchedulerAdapter {
def execute(block: => Unit) {
- executor.execute(new Runnable {
+ val task = new Runnable {
def run() { block }
- })
+ }
+ try {
+ executor.execute(task)
+ } catch {
+ case ree: RejectedExecutionException =>
+ task.run() // run task on current thread
+ }
}
}
diff --git a/test/files/neg/abstract-vars.check b/test/files/neg/abstract-vars.check
new file mode 100644
index 0000000000..8aa47745f6
--- /dev/null
+++ b/test/files/neg/abstract-vars.check
@@ -0,0 +1,21 @@
+abstract-vars.scala:5: error: class Fail1 needs to be abstract, since variable x is not defined
+(Note that variables need to be initialized to be defined)
+class Fail1 extends A {
+ ^
+abstract-vars.scala:9: error: class Fail2 needs to be abstract, since variable x in class A of type Int is not defined
+(Note that variables need to be initialized to be defined)
+class Fail2 extends A { }
+ ^
+abstract-vars.scala:11: error: class Fail3 needs to be abstract, since variable x in class A of type Int is not defined
+(Note that an abstract var requires a setter in addition to the getter)
+class Fail3 extends A {
+ ^
+abstract-vars.scala:14: error: class Fail4 needs to be abstract, since variable x in class A of type Int is not defined
+(Note that an abstract var requires a setter in addition to the getter)
+class Fail4 extends A {
+ ^
+abstract-vars.scala:18: error: class Fail5 needs to be abstract, since variable x in class A of type Int is not defined
+(Note that an abstract var requires a getter in addition to the setter)
+class Fail5 extends A {
+ ^
+5 errors found
diff --git a/test/files/neg/abstract-vars.scala b/test/files/neg/abstract-vars.scala
new file mode 100644
index 0000000000..df6109d3a8
--- /dev/null
+++ b/test/files/neg/abstract-vars.scala
@@ -0,0 +1,29 @@
+abstract class A {
+ var x: Int
+}
+
+class Fail1 extends A {
+ var x: Int
+}
+
+class Fail2 extends A { }
+
+class Fail3 extends A {
+ val x: Int = 5
+}
+class Fail4 extends A {
+ def x: Int = 5
+}
+
+class Fail5 extends A {
+ def x_=(y: Int) = ()
+}
+
+class Success1 extends A {
+ val x: Int = 5
+ def x_=(y: Int) = ()
+}
+
+class Success2 extends A {
+ var x: Int = 5
+}
diff --git a/test/files/neg/bug1275.check b/test/files/neg/bug1275.check
index 9f806c0689..40c5d79d27 100644
--- a/test/files/neg/bug1275.check
+++ b/test/files/neg/bug1275.check
@@ -1,4 +1,6 @@
-bug1275.scala:13: error: The kind of type MyType does not conform to the expected kind of type MyType[+t] <: TestCovariance.Seq[t] in trait Seq.
- def span[a, s <: Seq[a] { type MyType <: s } ](xs: s): s = xs f
- ^
+bug1275.scala:8: error: type mismatch;
+ found : xs.MyType[a]
+ required: s
+ = xs f // xs: s <: Seq[a]{type MyType <: s }
+ ^
one error found
diff --git a/test/files/neg/bug1275.scala b/test/files/neg/bug1275.scala
index e9be13c763..769156fff2 100644
--- a/test/files/neg/bug1275.scala
+++ b/test/files/neg/bug1275.scala
@@ -1,14 +1,12 @@
-// tested using Scala compiler version 2.6.0-RC1 -- (c) 2002-2010 LAMP/EPFL
-
-// prompted by "Covariant return types" mailing list question
-object TestCovariance {
-
- // see Type constructor polymorphism in http://www.scala-lang.org/docu/changelog.html
- trait Seq[+t] {
- type MyType[+t] <: Seq[t]
-
- def f: MyType[t]
- }
-
- def span[a, s <: Seq[a] { type MyType <: s } ](xs: s): s = xs f
-}
+object Test {
+ trait Seq[+t] {
+ type MyType[+t] <: Seq[t]
+ def f: MyType[t]
+ }
+
+ def span[a, s <: Seq[a] { type MyType <: s } ](xs: s): s
+ = xs f // xs: s <: Seq[a]{type MyType <: s }
+ // xs.f : xs.MyType[a] <: Seq[a]
+ // ill-formed type in bound for s: Seq[a] { type MyType <: s }
+ // refinements aren't checked -- should they?
+} \ No newline at end of file
diff --git a/test/files/neg/bug1845.check b/test/files/neg/bug1845.check
new file mode 100644
index 0000000000..164f3f60c1
--- /dev/null
+++ b/test/files/neg/bug1845.check
@@ -0,0 +1,4 @@
+bug1845.scala:9: error: illegal cyclic reference involving value <import>
+ val lexical = new StdLexical
+ ^
+one error found
diff --git a/test/files/neg/bug1845.scala b/test/files/neg/bug1845.scala
new file mode 100644
index 0000000000..ceb43a0552
--- /dev/null
+++ b/test/files/neg/bug1845.scala
@@ -0,0 +1,10 @@
+import scala.util.parsing.combinator.syntactical.TokenParsers
+import scala.util.parsing.combinator.lexical.StdLexical
+import scala.util.parsing.syntax.StdTokens
+
+class MyTokenParsers extends TokenParsers {
+ import lexical._
+ type Tokens = StdTokens
+ type Elem = lexical.Token
+ val lexical = new StdLexical
+}
diff --git a/test/files/neg/bug3209.check b/test/files/neg/bug3209.check
new file mode 100644
index 0000000000..fa50f4ce1d
--- /dev/null
+++ b/test/files/neg/bug3209.check
@@ -0,0 +1,4 @@
+bug3209.scala:2: error: expected start of definition
+package test
+^
+one error found
diff --git a/test/files/neg/bug3209.scala b/test/files/neg/bug3209.scala
new file mode 100644
index 0000000000..d893726659
--- /dev/null
+++ b/test/files/neg/bug3209.scala
@@ -0,0 +1,2 @@
+@javax.annotation.Generated(Array("test"))
+package test \ No newline at end of file
diff --git a/test/files/neg/bug3631.check b/test/files/neg/bug3631.check
new file mode 100644
index 0000000000..12d94aa4dc
--- /dev/null
+++ b/test/files/neg/bug3631.check
@@ -0,0 +1,4 @@
+bug3631.scala:3: error: Implementation restriction: case classes cannot have more than 22 parameters.
+case class X23(x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int, x14: Int, x15: Int, x16: Int, x17: Int, x18: Int, x19: Int, x20: Int, x21: Int, x22: Int, x23: Int) { }
+ ^
+one error found
diff --git a/test/files/neg/bug3631.scala b/test/files/neg/bug3631.scala
new file mode 100644
index 0000000000..bcf91619ee
--- /dev/null
+++ b/test/files/neg/bug3631.scala
@@ -0,0 +1,3 @@
+case class X22(x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int, x14: Int, x15: Int, x16: Int, x17: Int, x18: Int, x19: Int, x20: Int, x21: Int, x22: Int) { }
+
+case class X23(x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int, x14: Int, x15: Int, x16: Int, x17: Int, x18: Int, x19: Int, x20: Int, x21: Int, x22: Int, x23: Int) { } \ No newline at end of file
diff --git a/test/files/neg/bug882.check b/test/files/neg/bug882.check
index 8f47fefd9b..4e3e6d0860 100644
--- a/test/files/neg/bug882.check
+++ b/test/files/neg/bug882.check
@@ -1,4 +1,4 @@
-bug882.scala:2: error: traits cannot have type parameters with context bounds `: ...'
+bug882.scala:2: error: traits cannot have type parameters with context bounds `: ...' nor view bounds `<% ...'
trait SortedSet[A <% Ordered[A]] {
^
one error found
diff --git a/test/files/neg/names-defaults-neg.check b/test/files/neg/names-defaults-neg.check
index 38bacc0888..b2b00b7050 100644
--- a/test/files/neg/names-defaults-neg.check
+++ b/test/files/neg/names-defaults-neg.check
@@ -104,17 +104,20 @@ names-defaults-neg.scala:82: error: type mismatch;
Error occurred in an application involving default arguments.
new A2[String]()
^
-names-defaults-neg.scala:115: error: reference to var2 is ambiguous; it is both, a parameter
+names-defaults-neg.scala:86: error: module extending its companion class cannot use default constructor arguments
+ object C extends C()
+ ^
+names-defaults-neg.scala:120: error: reference to var2 is ambiguous; it is both, a parameter
name of the method and the name of a variable currently in scope.
delay(var2 = 40)
^
-names-defaults-neg.scala:118: error: missing parameter type for expanded function ((x$1) => a = x$1)
+names-defaults-neg.scala:123: error: missing parameter type for expanded function ((x$1) => a = x$1)
val taf2: Int => Unit = testAnnFun(a = _, b = get("+"))
^
-names-defaults-neg.scala:119: error: parameter specified twice: a
+names-defaults-neg.scala:124: error: parameter specified twice: a
val taf3 = testAnnFun(b = _: String, a = get(8))
^
-names-defaults-neg.scala:120: error: wrong number of parameters; expected = 2
+names-defaults-neg.scala:125: error: wrong number of parameters; expected = 2
val taf4: (Int, String) => Unit = testAnnFun(_, b = _)
^
-28 errors found
+29 errors found
diff --git a/test/files/neg/names-defaults-neg.scala b/test/files/neg/names-defaults-neg.scala
index e73dc71c9b..43883540a0 100644
--- a/test/files/neg/names-defaults-neg.scala
+++ b/test/files/neg/names-defaults-neg.scala
@@ -81,6 +81,11 @@ object Test extends Application {
// correct error message
new A2[String]()
+ object t3648 {
+ class C(val s: String = "")
+ object C extends C()
+ }
+
// DEFINITIONS
def test1(a: Int, b: String) = a +": "+ b
def test2(x: Unit) = println("test2")
diff --git a/test/files/neg/t2416.check b/test/files/neg/t2416.check
new file mode 100644
index 0000000000..0899ad09d5
--- /dev/null
+++ b/test/files/neg/t2416.check
@@ -0,0 +1,10 @@
+t2416.scala:3: error: type arguments [Int] do not conform to trait A's type parameter bounds [X <: Double]
+ def x : A[Int]#B = 10 // no you won't
+ ^
+t2416.scala:8: error: type arguments [Boolean] do not conform to type B's type parameter bounds [Y <: Double]
+ def x : A#B[Boolean] = 10 // seriously?
+ ^
+t2416.scala:13: error: type arguments [String] do not conform to type B's type parameter bounds [Z <: Double]
+ type C[Z <: A] = Z#B[String] // nuh-uh!
+ ^
+three errors found
diff --git a/test/files/neg/t2416.scala b/test/files/neg/t2416.scala
new file mode 100644
index 0000000000..6bb57a984b
--- /dev/null
+++ b/test/files/neg/t2416.scala
@@ -0,0 +1,14 @@
+object t2416a {
+ trait A[X <: Double] { type B = X }
+ def x : A[Int]#B = 10 // no you won't
+}
+
+object t2416b {
+ trait A{type B[Y <: Double] = Int}
+ def x : A#B[Boolean] = 10 // seriously?
+}
+
+object t2416c {
+ trait A{type B[Z <: Double] = Int}
+ type C[Z <: A] = Z#B[String] // nuh-uh!
+} \ No newline at end of file
diff --git a/test/files/neg/t3399.check b/test/files/neg/t3399.check
new file mode 100644
index 0000000000..eb6c679704
--- /dev/null
+++ b/test/files/neg/t3399.check
@@ -0,0 +1,4 @@
+t3399.scala:23: error: could not find implicit value for parameter e: =:=[Nats.Add[Nats._1,Nats._1],Nats._1]
+ implicitly[ Add[_1, _1] =:= _1]
+ ^
+one error found
diff --git a/test/files/neg/t3399.scala b/test/files/neg/t3399.scala
new file mode 100644
index 0000000000..3edaa0724f
--- /dev/null
+++ b/test/files/neg/t3399.scala
@@ -0,0 +1,24 @@
+object Nats {
+ sealed trait Nat {
+ // fold right on N, N-1, ..., 1
+ type FoldR[Init <: Type, Type, F <: Fold[Nat, Type]] <: Type
+ }
+ sealed trait _0 extends Nat {
+ type FoldR[Init <: Type, Type, F <: Fold[Nat, Type]] = Init
+ }
+ sealed trait Succ[N <: Nat] extends Nat {
+ type FoldR[Init <: Type, Type, F <: Fold[Nat, Type]] =
+ F#Apply[Succ[N], N#FoldR[Init, Type, F]]
+ }
+
+ type Add[A <: Nat, B <: Nat] = A#FoldR[B, Nat, Inc]
+ trait Fold[-Elem, Value] {
+ type Apply[N <: Elem, Acc <: Value] <: Value
+ }
+ type Inc = Fold[Any, Nat] {
+ type Apply[N <: Any, Acc <: Nat] = Succ[Acc]
+ }
+
+ type _1 = Succ[_0]
+ implicitly[ Add[_1, _1] =:= _1]
+} \ No newline at end of file
diff --git a/test/files/neg/t3507.check b/test/files/neg/t3507.check
new file mode 100644
index 0000000000..1246a20d09
--- /dev/null
+++ b/test/files/neg/t3507.check
@@ -0,0 +1,4 @@
+t3507.scala:13: error: could not find implicit value for evidence parameter of type Manifest[object _1.b.c]
+ mani/*[object _1.b.c]*/(c) // kaboom in manifestOfType / TreeGen.mkAttributedQualifier
+ ^
+one error found
diff --git a/test/files/neg/t3507.scala b/test/files/neg/t3507.scala
new file mode 100644
index 0000000000..9a8c7c5462
--- /dev/null
+++ b/test/files/neg/t3507.scala
@@ -0,0 +1,15 @@
+class A {
+ object b {
+ object c
+ }
+ def m = b.c
+}
+
+object Test {
+ var a: A = new A // mutable
+ val c /*: object _1.b.c forSome { val _1: A } */ = a.m // widening using existential
+
+ def mani[T: Manifest](x: T) = ()
+ mani/*[object _1.b.c]*/(c) // kaboom in manifestOfType / TreeGen.mkAttributedQualifier
+ // --> _1 is not in scope here
+} \ No newline at end of file
diff --git a/test/files/neg/t3604.check b/test/files/neg/t3604.check
new file mode 100644
index 0000000000..b07c5c9c71
--- /dev/null
+++ b/test/files/neg/t3604.check
@@ -0,0 +1,7 @@
+t3604.scala:3: error: in XML literal: expected closing tag of abbr
+ <abbr></div>
+ ^
+t3604.scala:3: error: start tag was here: abbr>
+ <abbr></div>
+ ^
+two errors found
diff --git a/test/files/neg/t3604.scala b/test/files/neg/t3604.scala
new file mode 100644
index 0000000000..f890a58e58
--- /dev/null
+++ b/test/files/neg/t3604.scala
@@ -0,0 +1,6 @@
+object Main {
+ <div>
+ <abbr></div>
+ { "..." }
+ </div>
+}
diff --git a/test/files/neg/t3653.check b/test/files/neg/t3653.check
new file mode 100644
index 0000000000..ac6e2ca9dc
--- /dev/null
+++ b/test/files/neg/t3653.check
@@ -0,0 +1,7 @@
+t3653.scala:3: error: double definition:
+method x:(implicit x: Int)Int and
+method x:(i: Int)Int at line 2
+have same type after erasure: (x: Int)Int
+ def x(implicit x: Int) = 5
+ ^
+one error found
diff --git a/test/files/neg/t3653.scala b/test/files/neg/t3653.scala
new file mode 100644
index 0000000000..96cf96008a
--- /dev/null
+++ b/test/files/neg/t3653.scala
@@ -0,0 +1,4 @@
+class B {
+ def x(i: Int) = 3
+ def x(implicit x: Int) = 5
+} \ No newline at end of file
diff --git a/test/files/neg/t742.check b/test/files/neg/t742.check
new file mode 100644
index 0000000000..f587948ef1
--- /dev/null
+++ b/test/files/neg/t742.check
@@ -0,0 +1,5 @@
+t742.scala:5: error: kinds of the type arguments (Crash._1,Crash._2,Any) do not conform to the expected kinds of the type parameters (type m,type n,type z).
+Crash._1's type parameters do not match type m's expected parameters: type s1 has one type parameter, but type n has two
+ type p = mul[_1, _2, Any] // mul[_1, _1, Any] needs -Yrecursion
+ ^
+one error found
diff --git a/test/files/neg/t742.scala b/test/files/neg/t742.scala
new file mode 100644
index 0000000000..bb1c2f85ea
--- /dev/null
+++ b/test/files/neg/t742.scala
@@ -0,0 +1,8 @@
+object Crash {
+ type mul[m[n[s[_], z], z], n[s[_], z], z] = m[n, z]
+ type _1[s1[_], z1] = s1[z1]
+ type _2[s1[_], z1] = s1[z1]
+ type p = mul[_1, _2, Any] // mul[_1, _1, Any] needs -Yrecursion
+ // _1[_2, Zero]
+ // _2[Zero]
+} \ No newline at end of file
diff --git a/test/files/neg/tailrec-2.check b/test/files/neg/tailrec-2.check
new file mode 100644
index 0000000000..ab6733946d
--- /dev/null
+++ b/test/files/neg/tailrec-2.check
@@ -0,0 +1,4 @@
+tailrec-2.scala:6: error: could not optimize @tailrec annotated method: it contains a recursive call targetting a supertype
+ @annotation.tailrec final def f[B >: A](mem: List[B]): List[B] = (null: Super[A]).f(mem)
+ ^
+one error found
diff --git a/test/files/neg/tailrec-2.scala b/test/files/neg/tailrec-2.scala
new file mode 100644
index 0000000000..4388815a06
--- /dev/null
+++ b/test/files/neg/tailrec-2.scala
@@ -0,0 +1,26 @@
+sealed abstract class Super[+A] {
+ def f[B >: A](mem: List[B]) : List[B]
+}
+// This one should fail, target is a supertype
+class Bop1[+A](val element: A) extends Super[A] {
+ @annotation.tailrec final def f[B >: A](mem: List[B]): List[B] = (null: Super[A]).f(mem)
+}
+// These succeed
+class Bop2[+A](val element: A) extends Super[A] {
+ @annotation.tailrec final def f[B >: A](mem: List[B]): List[B] = (null: Bop2[A]).f(mem)
+}
+object Bop3 extends Super[Nothing] {
+ @annotation.tailrec final def f[B](mem: List[B]): List[B] = (null: Bop3.type).f(mem)
+}
+class Bop4[+A](val element: A) extends Super[A] {
+ @annotation.tailrec final def f[B >: A](mem: List[B]): List[B] = Other.f[A].f(mem)
+}
+
+object Other {
+ def f[T] : Bop4[T] = error("")
+}
+
+object Bop {
+ def m1[A] : Super[A] = error("")
+ def m2[A] : Bop2[A] = error("")
+} \ No newline at end of file
diff --git a/test/files/pos/bug0013.scala b/test/files/pos/bug0013.scala
new file mode 100644
index 0000000000..999a2ab61c
--- /dev/null
+++ b/test/files/pos/bug0013.scala
@@ -0,0 +1,31 @@
+// covariant linked list
+abstract class M { self =>
+
+ type T
+ final type selfType = M {type T <: self.T}
+ type actualSelfType >: self.type <: selfType
+
+ def next: selfType
+
+ // I don't understand why this doesn't compile, but that's a separate matter
+ // error: method all2 cannot be accessed in M.this.selfType
+ // because its instance type => Stream[M{type T <: M.this.selfType#T}]
+ // contains a malformed type: M.this.selfType#T
+ // def all2: Stream[M {type T <: self.T}] = Stream.cons(self: actualSelfType, next.all2)
+
+
+ // compiles successfully
+ // def all3: Stream[M {type T <: self.T}] = all3Impl(self: actualSelfType)
+ // private def all3Impl(first: M {type T <: self.T}): Stream[M {type T <: self.T}] = Stream.cons(first, all3Impl(first.next))
+
+
+
+ def all4: Stream[M {type T <: self.T}] = Unrelated.all4Impl[T](self: actualSelfType)
+}
+
+object Unrelated {
+ def all4Impl[U](first: M {type T <: U}): Stream[M {type T <: U}] = Stream.cons(first, all4Impl(first.next))
+
+ // compiles successfully
+ // def all4Impl[U](first: M {type T <: U}): Stream[M {type T <: U}] = Stream.cons(first, all4Impl[U](first.next))
+} \ No newline at end of file
diff --git a/test/files/pos/bug0095.scala b/test/files/pos/bug0095.scala
new file mode 100644
index 0000000000..71386cf5c7
--- /dev/null
+++ b/test/files/pos/bug0095.scala
@@ -0,0 +1,15 @@
+class ParseResult[+T]
+case class Success[+T](t: T) extends ParseResult[T]
+
+abstract class Nonterminal[Output] {
+
+ type SubNonterminal = Nonterminal[T] forSome { type T <: Output }
+
+ def parse: ParseResult[Output]
+
+ def parse1(nts: List[SubNonterminal]): ParseResult[Output] =
+ nts match {
+ case nt::nts => nt.parse match { case Success(so) => Success(so) }
+ case Nil => throw new Error
+ }
+}
diff --git a/test/files/pos/bug1974.scala b/test/files/pos/bug1974.scala
new file mode 100644
index 0000000000..a0daa13c21
--- /dev/null
+++ b/test/files/pos/bug1974.scala
@@ -0,0 +1,20 @@
+object Broken {
+ private var map = Map[Class[_], String]()
+
+ def addToMap(c : Class[_], s : String) = map += (c -> s)
+ def fetch(c : Class[_]) = map(c)
+}
+
+object Works {
+ private var map = Map[Class[_], String]()
+
+ def addToMap(c : Class[_], s : String) = map += ((c, s))
+ def fetch(c : Class[_]) = map(c)
+}
+
+object Works2 {
+ private var map = Map[Class[_], String]()
+
+ def addToMap(c : Class[_], s : String) = map += ((c : Class[_]) -> s)
+ def fetch(c : Class[_]) = map(c)
+} \ No newline at end of file
diff --git a/test/files/pos/bug261-ab.scala b/test/files/pos/bug261-ab.scala
new file mode 100644
index 0000000000..80699e692c
--- /dev/null
+++ b/test/files/pos/bug261-ab.scala
@@ -0,0 +1,9 @@
+trait A { val foo: String = "A" }
+trait B {
+ private val foo: String = "B"
+ def f = println(foo)
+}
+object Test extends Application with B with A {
+ println(foo) // prints "A", as expected
+ f // prints "B", as expected
+} \ No newline at end of file
diff --git a/test/files/pos/bug261-ba.scala b/test/files/pos/bug261-ba.scala
new file mode 100644
index 0000000000..c66a68d101
--- /dev/null
+++ b/test/files/pos/bug261-ba.scala
@@ -0,0 +1,9 @@
+trait B {
+ private val foo: String = "B"
+ def f = println(foo)
+}
+trait A { val foo: String = "A" }
+object Test extends Application with B with A {
+ println(foo) // prints "A", as expected
+ f // prints "B", as expected
+} \ No newline at end of file
diff --git a/test/files/pos/bug3234.flags b/test/files/pos/bug3234.flags
new file mode 100644
index 0000000000..c9cefdc4b9
--- /dev/null
+++ b/test/files/pos/bug3234.flags
@@ -0,0 +1 @@
+-Yinline -Xfatal-warnings \ No newline at end of file
diff --git a/test/files/pos/bug3234.scala b/test/files/pos/bug3234.scala
new file mode 100644
index 0000000000..1553f1fa05
--- /dev/null
+++ b/test/files/pos/bug3234.scala
@@ -0,0 +1,19 @@
+trait Trait1 {
+ // need more work before this one works
+ // @inline
+ def foo2(n: Int) = n*n
+}
+
+trait Trait2 {
+ @inline def foo3(n: Int) = 1
+}
+
+class Base extends Trait1 {
+ @inline def foo(n: Int) = n
+}
+
+object Test extends Base with Trait2 {
+ def main(args: Array[String]) = {
+ println(foo(42) + foo2(11) + foo3(2))
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/bug3440.scala b/test/files/pos/bug3440.scala
new file mode 100644
index 0000000000..026abfea1f
--- /dev/null
+++ b/test/files/pos/bug3440.scala
@@ -0,0 +1,18 @@
+object test {
+ abstract class SampleFormat1 {
+ def readerFactory: Any
+ }
+
+ case object Int8 extends SampleFormat1 {
+ def readerFactory = error("")
+ }
+ case object Int16 extends SampleFormat1 {
+ def readerFactory = error("")
+ }
+
+ (new {}: Any) match {
+ case 8 => Int8
+ case 16 => Int16
+ case _ => error("")
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/bug3570.scala b/test/files/pos/bug3570.scala
new file mode 100644
index 0000000000..8921f83b2a
--- /dev/null
+++ b/test/files/pos/bug3570.scala
@@ -0,0 +1,7 @@
+class test {
+ object Break extends Throwable
+ def break = throw Break
+ def block(x: => Unit) {
+ try { x } catch { case e: Break.type => }
+ }
+}
diff --git a/test/files/pos/t1263/Test.java b/test/files/pos/t1263/Test.java
index 0eb43e881a..6ca88c21a1 100644
--- a/test/files/pos/t1263/Test.java
+++ b/test/files/pos/t1263/Test.java
@@ -5,7 +5,7 @@ import java.rmi.RemoteException;
import test.Map;
public class Test implements Map<String, String> {
- public Map<String, String>.MapTo plus(String o) {
+ public Map.MapTo plus(String o) {
return null;
}
diff --git a/test/files/pos/t2331.scala b/test/files/pos/t2331.scala
new file mode 100644
index 0000000000..9a15b5c2a9
--- /dev/null
+++ b/test/files/pos/t2331.scala
@@ -0,0 +1,11 @@
+trait C {
+ def m[T]: T
+}
+
+object Test {
+ val o /*: C --> no crash*/ = new C {
+ def m[T]: Nothing /*: T --> no crash*/ = error("omitted")
+ }
+
+ o.m[Nothing]
+} \ No newline at end of file
diff --git a/test/files/pos/t2413/TestJava.java b/test/files/pos/t2413/TestJava.java
new file mode 100644
index 0000000000..252c01fbc0
--- /dev/null
+++ b/test/files/pos/t2413/TestJava.java
@@ -0,0 +1,7 @@
+package pack;
+
+public class TestJava {
+ protected String repeatParam(String ... items) {
+ return "nothing";
+ }
+}
diff --git a/test/files/pos/t2413/TestScalac.scala b/test/files/pos/t2413/TestScalac.scala
new file mode 100644
index 0000000000..098e852dd7
--- /dev/null
+++ b/test/files/pos/t2413/TestScalac.scala
@@ -0,0 +1,23 @@
+import pack.TestJava
+
+class Foo extends TestJava {
+
+ // THIS METHOD YIELDS TO CRASH
+/* def foomethod : Option[String] => Unit = {
+ case None =>
+ val path = repeatParam("s","a","b","c")
+ ()
+ case Some(error) =>
+ ()
+ }
+
+ // THIS IS OK
+ def foomethod2 : String = repeatParam("s","a");
+
+ // THIS IS OK
+ val aVal = repeatParam("1","2","3") */
+
+ // THIS YIELDS TO CRASH
+ for (a <- 1 to 4 ; anotherVal = repeatParam("1","2","3"))
+ yield anotherVal
+}
diff --git a/test/files/pos/t3249/Test.java b/test/files/pos/t3249/Test.java
new file mode 100644
index 0000000000..4cc7cb2ab5
--- /dev/null
+++ b/test/files/pos/t3249/Test.java
@@ -0,0 +1,5 @@
+public class Test {
+ public static void meh() {
+ new A<Integer>().f();
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/t3249/a.scala b/test/files/pos/t3249/a.scala
new file mode 100644
index 0000000000..0394464549
--- /dev/null
+++ b/test/files/pos/t3249/a.scala
@@ -0,0 +1,11 @@
+class A[U] { def f[T] = { class X extends A[T] } }
+
+
+/*
+$ scalac a.scala
+$ javac -cp .:$SCALA_HOME/lib/scala-library.jar -Xprint 'A$X$1'
+
+ public class X$1 extends A<java.lang.Object> implements scala.ScalaObject {
+ public X$1(A<U> null);
+ }
+*/ \ No newline at end of file
diff --git a/test/files/pos/t3374.scala b/test/files/pos/t3374.scala
new file mode 100644
index 0000000000..4c0293181d
--- /dev/null
+++ b/test/files/pos/t3374.scala
@@ -0,0 +1,6 @@
+trait Parent {
+ type Test[A, H[B <: A]]
+}
+trait Sub extends Parent {
+ type Test[AS, HS[B <: AS]] = AS
+} \ No newline at end of file
diff --git a/test/files/pos/t3477.scala b/test/files/pos/t3477.scala
new file mode 100644
index 0000000000..660aa55736
--- /dev/null
+++ b/test/files/pos/t3477.scala
@@ -0,0 +1,7 @@
+class J3 {
+ def f[K, K1 >: K, V](x: Map[K1, V]): Map[K, V] = error("")
+}
+
+object Test {
+ (new J3).f(Map[Int, Int]())
+} \ No newline at end of file
diff --git a/test/files/pos/t3486/JTest.java b/test/files/pos/t3486/JTest.java
new file mode 100644
index 0000000000..0bf388b72d
--- /dev/null
+++ b/test/files/pos/t3486/JTest.java
@@ -0,0 +1,3 @@
+public class JTest<A> extends T2<A> {
+ public A m( A a ) { return a; }
+} \ No newline at end of file
diff --git a/test/files/pos/t3486/test.scala b/test/files/pos/t3486/test.scala
new file mode 100644
index 0000000000..544232b0d1
--- /dev/null
+++ b/test/files/pos/t3486/test.scala
@@ -0,0 +1,6 @@
+trait Test[A] {
+ def m( a: A ): A
+ def specified(a:A):A = a
+}
+
+abstract class T2[A] extends Test[A] \ No newline at end of file
diff --git a/test/files/pos/t3494.scala b/test/files/pos/t3494.scala
new file mode 100644
index 0000000000..35a4bcde5d
--- /dev/null
+++ b/test/files/pos/t3494.scala
@@ -0,0 +1,7 @@
+object Test {
+ def f[T](xs: T*) = ()
+
+ val x = "abc"
+
+ f[x.type](x)
+} \ No newline at end of file
diff --git a/test/files/pos/t3622/test/AsyncTask.java b/test/files/pos/t3622/test/AsyncTask.java
new file mode 100644
index 0000000000..cfcea3fe1a
--- /dev/null
+++ b/test/files/pos/t3622/test/AsyncTask.java
@@ -0,0 +1,5 @@
+package test;
+
+public abstract class AsyncTask<Params, Progress, Result> {
+ protected abstract Result doInBackground(Params... args);
+} \ No newline at end of file
diff --git a/test/files/pos/t3622/test/MyAsyncTask.java b/test/files/pos/t3622/test/MyAsyncTask.java
new file mode 100644
index 0000000000..9ef4947052
--- /dev/null
+++ b/test/files/pos/t3622/test/MyAsyncTask.java
@@ -0,0 +1,9 @@
+package test;
+
+public abstract class MyAsyncTask extends AsyncTask<String, String, String> {
+ protected abstract String doInBackground1(String[] args);
+ @Override
+ protected String doInBackground(String... args) {
+ return doInBackground1(new String[]{"dummy"});
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/t3622/test/Test.scala b/test/files/pos/t3622/test/Test.scala
new file mode 100644
index 0000000000..fb82c581f9
--- /dev/null
+++ b/test/files/pos/t3622/test/Test.scala
@@ -0,0 +1,5 @@
+package test
+
+class Test extends MyAsyncTask {
+ protected[test] def doInBackground1(args: Array[String]): String = ""
+} \ No newline at end of file
diff --git a/test/files/run/bug1766.scala b/test/files/run/bug1766.scala
new file mode 100644
index 0000000000..901f9ae664
--- /dev/null
+++ b/test/files/run/bug1766.scala
@@ -0,0 +1,16 @@
+object Test extends Application {
+
+ class C(s: String) {
+
+ def this(i: Int) = this("bar")
+
+ def f = {
+ val v: { def n: Int } = new { val n = 3 }
+ v.n
+ }
+
+ }
+
+ new C("foo").f
+
+}
diff --git a/test/files/run/bug2106.flags b/test/files/run/bug2106.flags
new file mode 100644
index 0000000000..eb4d19bcb9
--- /dev/null
+++ b/test/files/run/bug2106.flags
@@ -0,0 +1 @@
+-optimise \ No newline at end of file
diff --git a/test/files/run/bug2106.scala b/test/files/run/bug2106.scala
new file mode 100644
index 0000000000..e8124dabab
--- /dev/null
+++ b/test/files/run/bug2106.scala
@@ -0,0 +1,8 @@
+class A extends Cloneable {
+ @inline final def foo = clone()
+}
+
+object Test {
+ val x = new A
+ def main(args: Array[String]) = x.foo
+}
diff --git a/test/files/run/bug3616.check b/test/files/run/bug3616.check
new file mode 100644
index 0000000000..f31e21baff
--- /dev/null
+++ b/test/files/run/bug3616.check
@@ -0,0 +1 @@
+Fruit.ValueSet(A, B, C)
diff --git a/test/files/run/bug3616.scala b/test/files/run/bug3616.scala
new file mode 100644
index 0000000000..777b97f9ab
--- /dev/null
+++ b/test/files/run/bug3616.scala
@@ -0,0 +1,12 @@
+object X extends Enumeration {
+ val Y = Value
+}
+object Fruit extends Enumeration {
+ val x = X.Y
+ val A,B,C = Value
+}
+object Test {
+ def main(args: Array[String]): Unit = {
+ println(Fruit.values)
+ }
+}
diff --git a/test/files/run/colltest1.scala b/test/files/run/colltest1.scala
index dd9ecdbd5e..943fe4c4e7 100644
--- a/test/files/run/colltest1.scala
+++ b/test/files/run/colltest1.scala
@@ -29,8 +29,8 @@ object Test extends Application {
val (o, e) = ten.partition(_ % 2 == 0)
assert(o.size == e.size)
val gs = ten groupBy (x => x / 4)
- val vs1 = (for (k <- gs.keysIterator; v <- gs(k).toIterable.iterator) yield v).toList.sorted
- val vs2 = gs.values.toList.flatten.sorted
+ val vs1 = (for (k <- gs.keysIterator; v <- gs(k).toIterable.iterator) yield v).toList
+ val vs2 = gs.values.toList.flatten
// val vs2 = gs.values.toList flatMap (xs => xs)
assert(ten.head == 1)
assert(ten.tail.head == 2)
diff --git a/test/files/run/names-defaults.scala b/test/files/run/names-defaults.scala
index f197d2ff11..8ddfcd950d 100644
--- a/test/files/run/names-defaults.scala
+++ b/test/files/run/names-defaults.scala
@@ -345,6 +345,11 @@ object Test extends Application {
(new t3338.Test).a
+ // subclassing and defaults in both class constructors
+ class CBLAH(val x: Int = 1)
+ class DBLAH(val y: String = "2") extends CBLAH()
+ (new DBLAH())
+
// DEFINITIONS
def test1(a: Int, b: String) = println(a +": "+ b)
diff --git a/test/files/run/slice-strings.scala b/test/files/run/slice-strings.scala
new file mode 100644
index 0000000000..129314387a
--- /dev/null
+++ b/test/files/run/slice-strings.scala
@@ -0,0 +1,19 @@
+object Test {
+ def cmp(x1: String) = {
+ val x2 = x1.toList
+
+ -10 to 10 foreach { i =>
+ assert(x1.take(i) == x2.take(i).mkString)
+ assert(x1.drop(i) == x2.drop(i).mkString)
+ assert(x1.takeRight(i) == x2.takeRight(i).mkString)
+ assert(x1.dropRight(i) == x2.dropRight(i).mkString)
+ }
+ for (idx1 <- -3 to 3 ; idx2 <- -3 to 3) {
+ assert(x1.slice(idx1, idx2) == x2.slice(idx1, idx2).mkString)
+ }
+ }
+
+ def main(args: Array[String]): Unit = {
+ cmp("abcde")
+ }
+}
diff --git a/test/files/run/t0432.scala b/test/files/run/t0432.scala
new file mode 100644
index 0000000000..8ba9015d81
--- /dev/null
+++ b/test/files/run/t0432.scala
@@ -0,0 +1,15 @@
+object Test {
+ type valueType = { def value: this.type }
+
+ class StringValue(x: String) {
+ def value: this.type = this
+ }
+
+ def m(x: valueType) = x.value
+
+ val s = new StringValue("hei")
+
+ def main(args: Array[String]) {
+ m(s)
+ }
+}
diff --git a/test/files/run/t3493.scala b/test/files/run/t3493.scala
new file mode 100644
index 0000000000..aafe7a3a4a
--- /dev/null
+++ b/test/files/run/t3493.scala
@@ -0,0 +1,15 @@
+
+
+
+
+object Test {
+
+ def main(args: Array[String]) {
+ import scala.collection.immutable._
+ val x = TreeSet("a", "b", "c", "d")
+ val x2 = x + "e"
+ assert(x2.toString == "TreeSet(a, b, c, d, e)")
+ assert(x2.toString == runtime.ScalaRunTime.stringOf(x2).trim)
+ }
+
+}
diff --git a/test/files/run/t3580.scala b/test/files/run/t3580.scala
new file mode 100644
index 0000000000..50ff6c4551
--- /dev/null
+++ b/test/files/run/t3580.scala
@@ -0,0 +1,17 @@
+
+
+
+
+
+object Test {
+
+ class Empty extends Traversable[Nothing] {
+ def foreach[U](f: Nothing => U) {}
+ }
+
+ def main(args: Array[String]) {
+ val t = new Empty
+ t.toStream
+ }
+
+}
diff --git a/test/files/run/t3603.scala b/test/files/run/t3603.scala
new file mode 100644
index 0000000000..a89cb7080a
--- /dev/null
+++ b/test/files/run/t3603.scala
@@ -0,0 +1,18 @@
+
+
+
+object Test {
+
+ def main(args: Array[String]) {
+ import collection.immutable._
+
+ val intmap = IntMap(1 -> 1, 2 -> 2)
+ val intres = intmap.map { case (a, b) => (a, b.toString) }
+ assert(intres.isInstanceOf[IntMap[_]])
+
+ val longmap = LongMap(1L -> 1, 2L -> 2)
+ val longres = longmap.map { case (a, b) => (a, b.toString) }
+ assert(longres.isInstanceOf[LongMap[_]])
+ }
+
+}
diff --git a/test/files/run/t3645.scala b/test/files/run/t3645.scala
new file mode 100644
index 0000000000..af2543377b
--- /dev/null
+++ b/test/files/run/t3645.scala
@@ -0,0 +1,6 @@
+object Test {
+ def main(args: Array[String]) {
+ val s = Stream.tabulate(5)(x => x+2)
+ assert( s.toList == List(2,3,4,5,6) )
+ }
+}
diff --git a/test/files/run/xml-loop-bug.scala b/test/files/run/xml-loop-bug.scala
new file mode 100644
index 0000000000..3894779f7c
--- /dev/null
+++ b/test/files/run/xml-loop-bug.scala
@@ -0,0 +1,6 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ scala.tools.nsc.io.NullPrintStream.setOutAndErr()
+ scala.xml.parsing.ConstructingParser.fromSource(scala.io.Source.fromString("<!DOCTYPE xmeml SYSTEM> <xmeml> <sequence> </sequence> </xmeml> "), true).document.docElem
+ }
+}
diff --git a/test/pending/continuations-neg/t3628.check b/test/pending/continuations-neg/t3628.check
new file mode 100644
index 0000000000..4df94cdfcc
--- /dev/null
+++ b/test/pending/continuations-neg/t3628.check
@@ -0,0 +1,3 @@
+ending/continuations-neg/t3628.scala:4: error: not found: type Actor
+ val impl: Actor = actor {
+ ^
diff --git a/test/pending/continuations-neg/t3628.scala b/test/pending/continuations-neg/t3628.scala
new file mode 100644
index 0000000000..c56e7752c4
--- /dev/null
+++ b/test/pending/continuations-neg/t3628.scala
@@ -0,0 +1,11 @@
+import scala.actors.Actor._
+
+object Test {
+ val impl: Actor = actor {
+ loop {
+ react {
+ case 1 => impl ! 2
+ }
+ }
+ }
+}
diff --git a/test/pending/continuations-pos/t3620.scala b/test/pending/continuations-pos/t3620.scala
new file mode 100644
index 0000000000..8496ae2858
--- /dev/null
+++ b/test/pending/continuations-pos/t3620.scala
@@ -0,0 +1,73 @@
+import scala.collection.mutable.HashMap
+import scala.util.continuations._
+
+object Test extends Application {
+
+ class Store[K,V] {
+
+ trait Waiting {
+ def key: K
+ def inform(value: V): Unit
+ }
+
+ private val map = new HashMap[K, V]
+ private var waiting: List[Waiting] = Nil
+
+ def waitFor(k: K, f: (V => Unit)) {
+ map.get(k) match {
+ case Some(v) => f(v)
+ case None => {
+ val w = new Waiting {
+ def key = k
+ def inform(v: V) = f(v)
+ }
+ waiting = w :: waiting
+ }
+ }
+ }
+
+
+ def add(key: K, value: V) {
+ map(key) = value
+ val p = waiting.partition(_.key == key)
+ waiting = p._2
+ p._1.foreach(_.inform(value))
+ }
+
+ def required(key: K) = {
+ shift {
+ c: (V => Unit) => {
+ waitFor(key, c)
+ }
+ }
+ }
+
+ def option(key: Option[K]) = {
+ shift {
+ c: (Option[V] => Unit) => {
+ key match {
+ case Some(key) => waitFor(key, (v: V) => c(Some(v)))
+ case None => c(None)
+ }
+
+ }
+ }
+ }
+
+ }
+
+ val store = new Store[String, Int]
+
+ def test(p: Option[String]): Unit = {
+ reset {
+ // uncommenting the following two lines makes the compiler happy!
+// val o = store.option(p)
+// println(o)
+ val i = store.option(p).getOrElse(1)
+ println(i)
+ }
+ }
+
+ test(Some("a"))
+
+}
diff --git a/test/pending/jvm/serialization.check b/test/pending/jvm/serialization.check
deleted file mode 100644
index 397578bcba..0000000000
--- a/test/pending/jvm/serialization.check
+++ /dev/null
@@ -1,198 +0,0 @@
-x0 = List(1, 2, 3)
-y0 = List(1, 2, 3)
-x0 eq y0: false, y0 eq x0: false
-x0 equals y0: true, y0 equals x0: true
-
-x1 = List()
-y1 = List()
-x1 eq y1: true, y1 eq x1: true
-
-x2 = None
-y2 = None
-x2 eq y2: true, y2 eq x2: true
-
-x3 = Array[1,2,3]
-y3 = Array[1,2,3]
-arrayEquals(x3, y3): true
-
-x4 = <na>
-y4 = <na>
-x4(2): 4 - y4(2): 4
-
-x5 = 'hello
-y5 = 'hello
-x5 eq y5: true, y5 eq x5: true
-x5 equals y5: true, y5 equals x5: true
-
-x6 = (BannerLimit,12345)
-y6 = (BannerLimit,12345)
-x6 eq y6: false, y6 eq x6: false
-x6 equals y6: true, y6 equals x6: true
-
-x7 = RoundingMode
-y7 = RoundingMode
-x7 eq y7: true, y7 eq x7: true
-x7 equals y7: true, y7 equals x7: true
-
-x8 = WeekDay
-y8 = WeekDay
-x8 eq y8: true, y8 eq x8: true
-x8 equals y8: true, y8 equals x8: true
-
-x9 = UP
-y9 = UP
-x9 eq y9: true, y9 eq x9: true
-x9 equals y9: true, y9 equals x9: true
-
-x10 = WeekDay(0)
-y10 = WeekDay(0)
-x10 eq y10: true, y10 eq x10: true
-x10 equals y10: true, y10 equals x10: true
-
-x9 eq x10: false, x10 eq x9: false
-x9 equals x10: false, x10 equals x9: false
-x9 eq y10: false, y10 eq x9: false
-x9 equals y10: false, y10 equals x9: false
-
-x = List((buffers,20), (layers,2), (title,3))
-y = List((buffers,20), (layers,2), (title,3))
-x equals y: true, y equals x: true
-
-x = Map(2 -> C, 1 -> B, 0 -> A)
-y = Map(2 -> C, 1 -> B, 0 -> A)
-x equals y: true, y equals x: true
-
-x = Map(buffers -> 20, layers -> 2, title -> 3)
-y = Map(buffers -> 20, layers -> 2, title -> 3)
-x equals y: true, y equals x: true
-
-x = Set(2, 3)
-y = Set(2, 3)
-x equals y: true, y equals x: true
-
-x = Set(5, 3)
-y = Set(5, 3)
-x equals y: true, y equals x: true
-
-x = Queue(a, b, c)
-y = Queue(a, b, c)
-x equals y: true, y equals x: true
-
-x = Stack(a, b, c)
-y = Stack(a, b, c)
-x equals y: true, y equals x: true
-
-x = Map(42 -> FortyTwo)
-y = Map(42 -> FortyTwo)
-x equals y: true, y equals x: true
-
-x = TreeSet(0, 2)
-y = TreeSet(0, 2)
-x equals y: true, y equals x: true
-
-x = Vector(1, 2, 3)
-y = Vector(1, 2, 3)
-x equals y: true, y equals x: true
-
-x = ArrayBuffer(one, two)
-y = ArrayBuffer(one, two)
-x equals y: true, y equals x: true
-
-x = Map(title -> 3, buffers -> 20, layers -> 2)
-y = Map(title -> 3, buffers -> 20, layers -> 2)
-x equals y: true, y equals x: true
-
-x = Set(0, 8, 9)
-y = Set(0, 8, 9)
-x equals y: true, y equals x: true
-
-x = Set(layers, buffers, title)
-y = Set(layers, buffers, title)
-x equals y: true, y equals x: true
-
-x = LinkedList(2, 3)
-y = LinkedList(2, 3)
-x equals y: true, y equals x: true
-
-x = Queue(20, 2, 3)
-y = Queue(20, 2, 3)
-x equals y: true, y equals x: true
-
-x = Stack(3, 2, 20)
-y = Stack(3, 2, 20)
-x equals y: true, y equals x: true
-
-x = ListBuffer(white, black)
-y = ListBuffer(white, black)
-x equals y: true, y equals x: true
-
-x = History((Feed,hello))
-y = History((Feed,hello))
-x equals y: true, y equals x: true
-
-x = <html><title>title</title><body></body></html>
-y = <html><title>title</title><body></body></html>
-x equals y: true, y equals x: true
-
-x = <html>
- <body>
- <table cellpadding="2" cellspacing="0">
- <tr>
- <th>Last Name</th>
- <th>First Name</th>
- </tr>
- <tr>
- <td> Tom </td>
- <td> 20 </td>
- </tr><tr>
- <td> Bob </td>
- <td> 22 </td>
- </tr><tr>
- <td> James </td>
- <td> 19 </td>
- </tr>
- </table>
- </body>
- </html>
-y = <html>
- <body>
- <table cellpadding="2" cellspacing="0">
- <tr>
- <th>Last Name</th>
- <th>First Name</th>
- </tr>
- <tr>
- <td> Tom </td>
- <td> 20 </td>
- </tr><tr>
- <td> Bob </td>
- <td> 22 </td>
- </tr><tr>
- <td> James </td>
- <td> 19 </td>
- </tr>
- </table>
- </body>
- </html>
-x equals y: true, y equals x: true
-
-x = Tim
-y = Tim
-x equals y: true, y equals x: true
-
-x = Bob
-y = Bob
-x equals y: true, y equals x: true
-
-x = John
-y = John
-x equals y: true, y equals x: true
-
-x = Bill
-y = Bill
-x equals y: true, y equals x: true
-
-x = Paul
-y = Paul
-x equals y: true, y equals x: true
-
diff --git a/test/pending/jvm/serialization.scala b/test/pending/jvm/serialization.scala
deleted file mode 100644
index ff9413ae4b..0000000000
--- a/test/pending/jvm/serialization.scala
+++ /dev/null
@@ -1,411 +0,0 @@
-//############################################################################
-// Serialization
-//############################################################################
-
-import java.lang.System
-
-object Serialize {
- @throws(classOf[java.io.IOException])
- def write[A](o: A): Array[Byte] = {
- val ba = new java.io.ByteArrayOutputStream(512)
- val out = new java.io.ObjectOutputStream(ba)
- out.writeObject(o)
- out.close()
- ba.toByteArray()
- }
- @throws(classOf[java.io.IOException])
- @throws(classOf[ClassNotFoundException])
- def read[A](buffer: Array[Byte]): A = {
- val in =
- new java.io.ObjectInputStream(new java.io.ByteArrayInputStream(buffer))
- in.readObject().asInstanceOf[A]
- }
- def check[A, B](x: A, y: B) {
- println("x = " + x)
- println("y = " + y)
- println("x equals y: " + (x equals y) + ", y equals x: " + (y equals x))
- println()
- }
-}
-import Serialize._
-
-//############################################################################
-// Test classes in package "scala"
-
-object Test1_scala {
-
- private def arrayToString[A](arr: Array[A]): String =
- arr.mkString("Array[",",","]")
-
- private def arrayEquals[A, B](a1: Array[A], a2: Array[B]): Boolean =
- (a1.length == a2.length) &&
- (Iterator.range(0, a1.length) forall { i => a1(i) == a2(i) })
-
- @serializable
- object WeekDay extends Enumeration {
- type WeekDay = Value
- val Monday, Tuesday, Wednesday, Thusday, Friday, Saturday, Sunday = Value
- }
- import WeekDay._, BigDecimal._, RoundingMode._
-
- val x0 = List(1, 2, 3)
- val x1 = Nil
- val x2 = None
- val x3 = Array(1, 2, 3)
- val x4 = { x: Int => 2 * x }
- val x5 = 'hello
- val x6 = ("BannerLimit", 12345)
- val x7 = BigDecimal.RoundingMode
- val x8 = WeekDay
- val x9 = UP // named element
- val x10 = Monday // unamed element
-
- try {
- val y0: List[Int] = read(write(x0))
- val y1: List[Nothing] = read(write(x1))
- val y2: Option[Nothing] = read(write(x2))
- val y3: Array[Int] = read(write(x3))
- val y4: Function[Int, Int] = read(write(x4))
- val y5: Symbol = read(write(x5))
- val y6: (String, Int) = read(write(x6))
- val y7: RoundingMode.type = read(write(x7))
- val y8: WeekDay.type = read(write(x8))
- val y9: RoundingMode = read(write(x9))
- val y10: WeekDay = read(write(x10))
-
- println("x0 = " + x0)
- println("y0 = " + y0)
- println("x0 eq y0: " + (x0 eq y0) + ", y0 eq x0: " + (y0 eq x0))
- println("x0 equals y0: " + (x0 equals y0) + ", y0 equals x0: " + (y0 equals x0))
- println()
- println("x1 = " + x1)
- println("y1 = " + y1)
- println("x1 eq y1: " + (x1 eq y1) + ", y1 eq x1: " + (y1 eq x1))
- println()
- println("x2 = " + x2)
- println("y2 = " + y2)
- println("x2 eq y2: " + (x2 eq y2) + ", y2 eq x2: " + (y2 eq x2))
- println()
- println("x3 = " + arrayToString(x3))
- println("y3 = " + arrayToString(y3))
- println("arrayEquals(x3, y3): " + arrayEquals(x3, y3))
- println()
- println("x4 = <na>")
- println("y4 = <na>")
- println("x4(2): " + x4(2) + " - y4(2): " + y4(2))
- println()
- println("x5 = " + x5)
- println("y5 = " + y5)
- println("x5 eq y5: " + (x5 eq y5) + ", y5 eq x5: " + (y5 eq x5))
- println("x5 equals y5: " + (x5 equals y5) + ", y5 equals x5: " + (y5 equals x5))
- println()
- println("x6 = " + x6)
- println("y6 = " + y6)
- println("x6 eq y6: " + (x6 eq y6) + ", y6 eq x6: " + (y6 eq x6))
- println("x6 equals y6: " + (x6 equals y6) + ", y6 equals x6: " + (y6 equals x6))
- println()
- println("x7 = " + x7)
- println("y7 = " + y7)
- println("x7 eq y7: " + (x7 eq y7) + ", y7 eq x7: " + (y7 eq x7))
- println("x7 equals y7: " + (x7 equals y7) + ", y7 equals x7: " + (y7 equals x7))
- println()
- println("x8 = " + x8)
- println("y8 = " + y8)
- println("x8 eq y8: " + (x8 eq y8) + ", y8 eq x8: " + (y8 eq x8))
- println("x8 equals y8: " + (x8 equals y8) + ", y8 equals x8: " + (y8 equals x8))
- println()
- println("x9 = " + x9)
- println("y9 = " + y9)
- println("x9 eq y9: " + (x9 eq y9) + ", y9 eq x9: " + (y9 eq x9))
- println("x9 equals y9: " + (x9 equals y9) + ", y9 equals x9: " + (y9 equals x9))
- println()
- println("x10 = " + x10)
- println("y10 = " + y10)
- println("x10 eq y10: " + (x10 eq y10) + ", y10 eq x10: " + (y10 eq x10))
- println("x10 equals y10: " + (x10 equals y10) + ", y10 equals x10: " + (y10 equals x10))
- println()
- println("x9 eq x10: " + (x9 eq x10) + ", x10 eq x9: " + (x10 eq x9))
- println("x9 equals x10: " + (x9 equals x10) + ", x10 equals x9: " + (x10 equals x9))
- println("x9 eq y10: " + (x9 eq y10) + ", y10 eq x9: " + (y10 eq x9))
- println("x9 equals y10: " + (x9 equals y10) + ", y10 equals x9: " + (y10 equals x9))
- println()
- }
- catch {
- case e: Exception =>
- e.printStackTrace()
- println("Error in Test1_scala: " + e)
- }
-}
-
-//############################################################################
-// Test classes in package "scala.collection.immutable"
-
-@serializable
-object Test2_immutable {
- import scala.collection.immutable.{
- BitSet, HashMap, ListMap, ListSet, Queue, Stack, TreeSet, TreeMap, Vector}
-
- val x1 = List(
- ("buffers", 20),
- ("layers", 2),
- ("title", 3)
- )
-
- val m1 = new HashMap[Int, String] + (0 -> "A", 1 -> "B", 2 -> "C")
-
- val x2 = new ListMap[String, Int] + ("buffers" -> 20, "layers" -> 2, "title" -> 3)
-
- val x3 = {
- val bs = new collection.mutable.BitSet()
- bs += 2; bs += 3
- bs.toImmutable
- }
-
- val x4 = new ListSet[Int]() + 3 + 5
-
- val x5 = Queue("a", "b", "c")
-
- val x6 = Stack("a", "b", "c")
-
- val x7 = new TreeMap[Int, String] + (42 -> "FortyTwo")
-
- val x8 = new TreeSet[Int]() + 2 + 0
-
- val x9 = Vector(1, 2, 3)
-
- try {
- val y1: List[Pair[String, Int]] = read(write(x1))
- val n1: HashMap[Int, String] = read(write(m1))
- val y2: ListMap[String, Int] = read(write(x2))
- val y3: BitSet = read(write(x3))
- val y4: ListSet[Int] = read(write(x4))
- val y5: Queue[String] = read(write(x5))
- val y6: Stack[String] = read(write(x6))
- val y7: TreeMap[Int, String] = read(write(x7))
- val y8: TreeSet[Int] = read(write(x8))
- val y9: Vector[Int] = read(write(x9))
-
- check(x1, y1)
- check(m1, n1)
- check(x2, y2)
- check(x3, y3)
- check(x4, y4)
- check(x5, y5)
- check(x6, y6)
- check(x7, y7)
- check(x8, y8)
- check(x9, y9)
- }
- catch {
- case e: Exception =>
- println("Error in Test2_immutable: " + e)
- throw e
- }
-}
-
-//############################################################################
-// Test classes in package "scala.collection.mutable"
-
-object Test3_mutable {
- import scala.collection.mutable.{
- ArrayBuffer, BitSet, HashMap, HashSet, History, LinkedList, ListBuffer,
- Publisher, Queue, RevertableHistory, Stack}
-
- val x0 = new ArrayBuffer[String]
- x0 ++= List("one", "two")
-
- val x2 = new BitSet()
- x2 += 0
- x2 += 8
- x2 += 9
-
- val x1 = new HashMap[String, Int]
- x1 ++= Test2_immutable.x1
-
- val x3 = new HashSet[String]
- x3 ++= Test2_immutable.x1.map(p => p._1)
-
- val x4 = new LinkedList[Int](2, null)
- x4.append(new LinkedList(3, null))
-
- val x5 = new Queue[Int]
- x5 ++= Test2_immutable.x1.map(p => p._2)
-
- val x6 = new Stack[Int]
- x6 pushAll x5
-
- val x7 = new ListBuffer[String]
- x7 ++= List("white", "black")
-
- @serializable
- class Feed extends Publisher[String, Feed] {
- override def toString() = "Feed"
- }
- val feed = new Feed
-
- val x8 = new History[String, Feed]
- x8.notify(feed, "hello")
-
- try {
- val y0: ArrayBuffer[String] = read(write(x0))
- val y1: HashMap[String, Int] = read(write(x1))
- val y2: BitSet = read(write(x2))
- val y3: HashSet[String] = read(write(x3))
- val y4: LinkedList[Int] = read(write(x4))
- val y5: Queue[Int] = read(write(x5))
- val y6: Stack[Int] = read(write(x6))
- val y7: ListBuffer[String] = read(write(x7))
- val y8: History[String, Feed] = read(write(x8))
-
- check(x0, y0)
- check(x1, y1)
- check(x2, y2)
- check(x3, y3)
- check(x4, y4)
- check(x5, y5)
- check(x6, y6)
- check(x7, y7)
- check(x8, y8)
- }
- catch {
- case e: Exception =>
- println("Error in Test3_mutable: " + e)
- }
-}
-
-//############################################################################
-// Test classes in package "scala.xml"
-
-object Test4_xml {
- import scala.xml.Elem
-
- val x1 = <html><title>title</title><body></body></html>;
-
- case class Person(name: String, age: Int)
-
- class AddressBook(a: Person*) {
- private val people: List[Person] = a.toList
- def toXHTML =
- <table cellpadding="2" cellspacing="0">
- <tr>
- <th>Last Name</th>
- <th>First Name</th>
- </tr>
- { for (p <- people) yield
- <tr>
- <td> { p.name } </td>
- <td> { p.age.toString() } </td>
- </tr> }
- </table>;
- }
-
- val people = new AddressBook(
- Person("Tom", 20),
- Person("Bob", 22),
- Person("James", 19))
-
- val x2 =
- <html>
- <body>
- { people.toXHTML }
- </body>
- </html>;
-
- try {
- val y1: scala.xml.Elem = read(write(x1))
- val y2: scala.xml.Elem = read(write(x2))
-
- check(x1, y1)
- check(x2, y2)
- }
- catch {
- case e: Exception =>
- println("Error in Test4_xml: " + e)
- }
-}
-
-//############################################################################
-// Test user-defined classes WITHOUT nesting
-
-@serializable
-class Person(_name: String) {
- private var name = _name
- override def toString() = name
- override def equals(that: Any): Boolean =
- that.isInstanceOf[Person] &&
- (name == that.asInstanceOf[Person].name)
-}
-
-@serializable
-class Employee(_name: String) {
- private var name = _name
- override def toString() = name
-}
-@serializable
-object bob extends Employee("Bob")
-
-object Test5 {
- val x1 = new Person("Tim")
- val x2 = bob
-
- try {
- val y1: Person = read(write(x1))
- val y2: Employee = read(write(x2))
-
- check(x1, y1)
- check(x2, y2)
- }
- catch {
- case e: Exception =>
- println("Error in Test5: " + e)
- }
-}
-
-//############################################################################
-// Test user-defined classes WITH nesting
-
-@serializable
-object Test6 {
- @serializable
- object bill extends Employee("Bill") {
- val x = paul
- }
- @serializable
- object paul extends Person("Paul") {
- val x = 4 // bill; => StackOverflowException !!!
- }
- val x1 = new Person("John")
- val x2 = bill
- val x3 = paul
-
- try {
- val y1: Person = read(write(x1))
- val y2: Employee = read(write(x2))
- val y3: Person = read(write(x3))
-
- check(x1, y1)
- check(x2, y2)
- check(x3, y3)
- }
- catch {
- case e: Exception =>
- println("Error in Test6: " + e)
- }
-}
-
-//############################################################################
-// Test code
-
-object Test {
- def main(args: Array[String]) {
- Test1_scala
- Test2_immutable
- Test3_mutable
- Test4_xml
- Test5
- Test6
- }
-}
-
-//############################################################################
-
diff --git a/test/pending/pos/t3636.scala b/test/pending/pos/t3636.scala
new file mode 100644
index 0000000000..24d18c653d
--- /dev/null
+++ b/test/pending/pos/t3636.scala
@@ -0,0 +1,49 @@
+class CTxnLocal[ T ] {
+ def set( x: T )( implicit t: Txn ) {}
+ def get( implicit t: Txn ) : T = null.asInstanceOf[ T ]
+ def initialValue( t: Txn ) : T = null.asInstanceOf[ T ]
+}
+
+trait Txn
+
+trait ProcTxn {
+ def ccstm: Txn
+}
+
+trait TxnLocal[ @specialized T ] {
+ def apply()( implicit tx: ProcTxn ) : T
+ def set( v: T )( implicit tx: ProcTxn ) : Unit
+ def swap( v: T )( implicit tx: ProcTxn ) : T
+ def transform( f: T => T )( implicit tx: ProcTxn ) : Unit
+}
+
+object TxnLocal {
+ def apply[ @specialized T ] : TxnLocal[ T ] = new Impl( new CTxnLocal[ T ])
+ def apply[ @specialized T ]( initValue: => T ) : TxnLocal[ T ] = new Impl( new CTxnLocal[ T ] {
+ override def initialValue( tx: Txn ): T = initValue
+ })
+
+ private class Impl[ T ]( c: CTxnLocal[ T ]) extends TxnLocal[ T ] {
+ def apply()( implicit tx: ProcTxn ) : T = c.get( tx.ccstm )
+ def set( v: T )( implicit tx: ProcTxn ) : Unit = c.set( v )( tx.ccstm )
+ def swap( v: T )( implicit tx: ProcTxn ) : T = {
+ // currently not implemented in CTxnLocal
+ val oldV = apply
+ set( v )
+ oldV
+ }
+ def transform( f: T => T )( implicit tx: ProcTxn ) {
+ set( f( apply ))
+ }
+ }
+}
+
+
+object Transition {
+ private val currentRef = TxnLocal[ Transition ]( Instant )
+ def current( implicit tx: ProcTxn ) : Transition = currentRef()
+}
+
+sealed abstract class Transition
+case object Instant extends Transition
+