summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--build.number4
-rw-r--r--build.xml81
-rw-r--r--docs/licenses/apache_android.txt16
-rw-r--r--docs/licenses/bsd_jline.txt34
-rw-r--r--docs/licenses/mit_jquery.txt13
-rw-r--r--docs/licenses/mit_sizzle.txt13
-rw-r--r--docs/licenses/mit_tools.tooltip.txt13
-rw-r--r--lib/scala-compiler.jar.desired.sha12
-rw-r--r--lib/scala-library-src.jar.desired.sha12
-rw-r--r--lib/scala-library.jar.desired.sha12
-rw-r--r--src/actors/scala/actors/threadpool/AbstractExecutorService.java10
-rw-r--r--src/actors/scala/actors/threadpool/BlockingQueue.java24
-rw-r--r--src/actors/scala/actors/threadpool/Executors.java6
-rw-r--r--src/actors/scala/actors/threadpool/LinkedBlockingQueue.java716
-rw-r--r--src/actors/scala/actors/threadpool/ThreadPoolExecutor.java2
-rw-r--r--src/actors/scala/actors/threadpool/helpers/FIFOWaitQueue.java2
-rw-r--r--src/actors/scala/actors/threadpool/helpers/Utils.java12
-rw-r--r--src/actors/scala/actors/threadpool/locks/CondVar.java1
-rw-r--r--src/actors/scala/actors/threadpool/locks/FIFOCondVar.java1
-rw-r--r--src/actors/scala/actors/threadpool/locks/ReentrantReadWriteLock.java8
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala6
-rw-r--r--src/compiler/scala/tools/nsc/Interpreter.scala17
-rw-r--r--src/compiler/scala/tools/nsc/MainGenericRunner.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ScriptRunner.scala50
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/DocComments.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeDSL.scala22
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeGen.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeInfo.scala6
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreePrinters.scala6
-rw-r--r--src/compiler/scala/tools/nsc/ast/Trees.scala15
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala15
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/Inliners.scala1003
-rw-r--r--src/compiler/scala/tools/nsc/doc/DocFactory.scala2
-rw-r--r--src/compiler/scala/tools/nsc/doc/Settings.scala2
-rw-r--r--src/compiler/scala/tools/nsc/doc/Universe.scala5
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala2
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala7
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/Index.scala11
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/Template.scala142
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.pngbin481 -> 3519 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psdbin30823 -> 31923 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.pngbin533 -> 2977 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psdbin31295 -> 28574 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css37
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js8
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/old.css206
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/old.js126
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css69
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js148
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/Entity.scala68
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala119
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/comment/Comment.scala1
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala22
-rw-r--r--src/compiler/scala/tools/nsc/interactive/CompilerControl.scala3
-rw-r--r--src/compiler/scala/tools/nsc/interactive/Global.scala10
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/Completion.scala27
-rw-r--r--src/compiler/scala/tools/nsc/io/AbstractFile.scala3
-rw-r--r--src/compiler/scala/tools/nsc/io/File.scala11
-rw-r--r--src/compiler/scala/tools/nsc/io/NullPrintStream.scala19
-rw-r--r--src/compiler/scala/tools/nsc/io/Path.scala7
-rw-r--r--src/compiler/scala/tools/nsc/io/ZipArchive.scala34
-rw-r--r--src/compiler/scala/tools/nsc/matching/ParallelMatching.scala1
-rw-r--r--src/compiler/scala/tools/nsc/plugins/Plugin.scala2
-rw-r--r--src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala2
-rw-r--r--src/compiler/scala/tools/nsc/symtab/Definitions.scala8
-rw-r--r--src/compiler/scala/tools/nsc/symtab/StdNames.scala1
-rw-r--r--src/compiler/scala/tools/nsc/symtab/Symbols.scala3
-rw-r--r--src/compiler/scala/tools/nsc/symtab/Types.scala81
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/CleanUp.scala105
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala289
-rw-r--r--src/compiler/scala/tools/nsc/transform/Mixin.scala19
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala10
-rw-r--r--src/compiler/scala/tools/nsc/transform/TailCalls.scala29
-rw-r--r--src/compiler/scala/tools/nsc/transform/UnCurry.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Analyzer.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala126
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala36
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala109
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala3
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala119
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Unapplies.scala24
-rw-r--r--src/compiler/scala/tools/nsc/util/HashSet.scala25
-rw-r--r--src/compiler/scala/tools/nsc/util/InterruptReq.scala28
-rw-r--r--src/compiler/scala/tools/nsc/util/WorkScheduler.scala30
-rw-r--r--src/compiler/scala/tools/nsc/util/package.scala22
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala1
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala3
-rw-r--r--src/library/scala/Array.scala17
-rw-r--r--src/library/scala/Enumeration.scala154
-rw-r--r--src/library/scala/Option.scala10
-rw-r--r--src/library/scala/collection/IterableLike.scala3
-rw-r--r--src/library/scala/collection/Parallel.scala17
-rw-r--r--src/library/scala/collection/Parallelizable.scala38
-rw-r--r--src/library/scala/collection/Sequentializable.scala15
-rw-r--r--src/library/scala/collection/SetLike.scala1
-rw-r--r--src/library/scala/collection/TraversableLike.scala2
-rw-r--r--src/library/scala/collection/TraversableOnce.scala2
-rw-r--r--src/library/scala/collection/generic/CanCombineFrom.scala23
-rw-r--r--src/library/scala/collection/generic/GenericParallelCompanion.scala29
-rw-r--r--src/library/scala/collection/generic/GenericParallelTemplate.scala66
-rw-r--r--src/library/scala/collection/generic/HasNewCombiner.scala26
-rw-r--r--src/library/scala/collection/generic/ParallelFactory.scala43
-rw-r--r--src/library/scala/collection/generic/ParallelMapFactory.scala42
-rw-r--r--src/library/scala/collection/generic/Signalling.scala192
-rw-r--r--src/library/scala/collection/generic/Sizing.scala9
-rw-r--r--src/library/scala/collection/generic/TraversableFactory.scala16
-rw-r--r--src/library/scala/collection/immutable/HashMap.scala230
-rw-r--r--src/library/scala/collection/immutable/IntMap.scala16
-rw-r--r--src/library/scala/collection/immutable/LongMap.scala25
-rw-r--r--src/library/scala/collection/immutable/NumericRange.scala45
-rw-r--r--src/library/scala/collection/immutable/Stream.scala32
-rw-r--r--src/library/scala/collection/immutable/StreamView.scala12
-rw-r--r--src/library/scala/collection/immutable/StreamViewLike.scala76
-rw-r--r--src/library/scala/collection/immutable/StringOps.scala12
-rw-r--r--src/library/scala/collection/immutable/package.scala81
-rw-r--r--src/library/scala/collection/mutable/ArrayBuffer.scala6
-rw-r--r--src/library/scala/collection/mutable/ArrayOps.scala15
-rw-r--r--src/library/scala/collection/mutable/BufferLike.scala7
-rw-r--r--src/library/scala/collection/mutable/BufferProxy.scala10
-rw-r--r--src/library/scala/collection/mutable/PriorityQueue.scala1
-rw-r--r--src/library/scala/collection/mutable/Seq.scala8
-rw-r--r--src/library/scala/collection/mutable/SeqLike.scala31
-rw-r--r--src/library/scala/collection/mutable/Stack.scala5
-rw-r--r--src/library/scala/collection/parallel/Combiner.scala66
-rw-r--r--src/library/scala/collection/parallel/ParallelIterable.scala49
-rw-r--r--src/library/scala/collection/parallel/ParallelIterableLike.scala940
-rw-r--r--src/library/scala/collection/parallel/ParallelIterableView.scala33
-rw-r--r--src/library/scala/collection/parallel/ParallelIterableViewLike.scala59
-rw-r--r--src/library/scala/collection/parallel/ParallelMap.scala71
-rw-r--r--src/library/scala/collection/parallel/ParallelMapLike.scala43
-rw-r--r--src/library/scala/collection/parallel/ParallelSeq.scala64
-rw-r--r--src/library/scala/collection/parallel/ParallelSeqLike.scala473
-rw-r--r--src/library/scala/collection/parallel/ParallelSeqView.scala64
-rw-r--r--src/library/scala/collection/parallel/ParallelSeqViewLike.scala192
-rw-r--r--src/library/scala/collection/parallel/RemainsIterator.scala438
-rw-r--r--src/library/scala/collection/parallel/Splitter.scala86
-rw-r--r--src/library/scala/collection/parallel/TaskSupport.scala27
-rw-r--r--src/library/scala/collection/parallel/Tasks.scala230
-rw-r--r--src/library/scala/collection/parallel/immutable/ParallelHashTrie.scala248
-rw-r--r--src/library/scala/collection/parallel/immutable/ParallelIterable.scala.disabled53
-rw-r--r--src/library/scala/collection/parallel/immutable/ParallelRange.scala88
-rw-r--r--src/library/scala/collection/parallel/immutable/ParallelSeq.scala.disabled44
-rw-r--r--src/library/scala/collection/parallel/immutable/package.scala56
-rw-r--r--src/library/scala/collection/parallel/mutable/LazyCombiner.scala43
-rw-r--r--src/library/scala/collection/parallel/mutable/ParallelArray.scala605
-rw-r--r--src/library/scala/collection/parallel/mutable/ParallelArrayCombiner.scala105
-rw-r--r--src/library/scala/collection/parallel/mutable/ParallelIterable.scala51
-rw-r--r--src/library/scala/collection/parallel/mutable/ParallelSeq.scala61
-rw-r--r--src/library/scala/collection/parallel/mutable/package.scala32
-rw-r--r--src/library/scala/collection/parallel/package.scala70
-rw-r--r--src/library/scala/concurrent/SyncVar.scala35
-rw-r--r--src/library/scala/io/Source.scala18
-rw-r--r--src/library/scala/math/BigInt.scala6
-rw-r--r--src/library/scala/math/Ordering.scala18
-rwxr-xr-xsrc/library/scala/reflect/generic/Symbols.scala1
-rw-r--r--src/library/scala/runtime/AnyValCompanion.scala2
-rw-r--r--src/library/scala/runtime/ScalaRunTime.scala8
-rw-r--r--src/library/scala/util/Random.scala44
-rw-r--r--src/library/scala/xml/parsing/MarkupParser.scala6
-rw-r--r--src/manual/scala/man1/scaladoc.scala128
-rw-r--r--src/partest/scala/tools/partest/PartestTask.scala4
-rw-r--r--src/partest/scala/tools/partest/nest/Diff.java8
-rw-r--r--src/partest/scala/tools/partest/nest/DiffPrint.java2
-rw-r--r--src/partest/scala/tools/partest/nest/Worker.scala2
-rw-r--r--src/swing/scala/swing/Font.scala (renamed from src/swing/scala/swing/Font.scala.disabled)30
-rwxr-xr-xtest/benchmarks/bench57
-rw-r--r--test/benchmarks/lib/jsr166_and_extra.jar.desired.sha11
-rw-r--r--test/benchmarks/source.list68
-rw-r--r--test/benchmarks/src/scala/collection/parallel/Benchmarking.scala186
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/Bench.scala122
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/Arrays.scala63
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/Dummy.scala22
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/IntAccess.scala68
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/ObjectAccess.scala75
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/Resetting.scala39
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/UnknownManif.scala38
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/generic/Dummy.scala59
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/generic/Operators.scala52
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/generic/ParallelBenches.scala227
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Combine.scala66
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Construct.scala54
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Foreach.scala45
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/IntInit.scala31
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Iterate.scala51
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Lookup.scala57
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/MultipleCombine.scala87
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/ParallelHashTries.scala121
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/AggregateLight.scala39
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Companion.scala9
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CopyToArray.scala21
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Corresponds.scala47
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CountHeavy.scala36
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CountLight.scala21
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CountList.scala30
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/DiffHalf.scala48
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/DropMany.scala47
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ExistsLight.scala49
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/FilterLight.scala64
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/FindLight.scala52
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/FlatMapLight.scala24
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallHeavy.scala59
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallLight.scala46
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallQuickStop.scala46
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallStop80k.scala46
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForeachHeavy.scala42
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForeachLight.scala26
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/IndexWhere.scala47
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/IntersectHalf.scala48
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/LastIndexWhere.scala47
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MapLight.scala27
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MatrixMultiplication.scala84
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MinLight.scala28
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PadToDouble.scala53
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PartialMapLight.scala24
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PartitionLight.scala61
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PatchHalf.scala46
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PlusPlus.scala29
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceHeavy.scala22
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceLight.scala50
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceList.scala53
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceNew.scala30
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReducePrime.scala65
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/RemoveDuplicates.scala44
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Resettable.scala127
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Reverse.scala35
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReverseMap.scala48
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SameElementsLong.scala45
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SegmentLength.scala42
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SequentialOps.scala547
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SliceFew.scala47
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SliceMany.scala47
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SliceMedium.scala47
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SpanLight.scala62
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SplitHalf.scala47
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SumLight.scala28
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/TakeMany.scala47
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/TakeWhileLight.scala61
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_range/RangeBenches.scala208
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_view/SeqViewBenches.scala50
-rw-r--r--test/files/jvm/actor-executor2.scala12
-rw-r--r--[-rwxr-xr-x]test/files/jvm/libnatives.jnilibbin4248 -> 8456 bytes
-rwxr-xr-xtest/files/jvm/mkLibNatives.sh2
-rw-r--r--test/files/neg/abstract-vars.check21
-rw-r--r--test/files/neg/abstract-vars.scala29
-rw-r--r--test/files/neg/bug1275.check8
-rw-r--r--test/files/neg/bug1275.scala26
-rw-r--r--test/files/neg/bug1845.check4
-rw-r--r--test/files/neg/bug1845.scala10
-rw-r--r--test/files/neg/bug3209.check4
-rw-r--r--test/files/neg/bug3209.scala2
-rw-r--r--test/files/neg/bug3631.check4
-rw-r--r--test/files/neg/bug3631.scala3
-rw-r--r--test/files/neg/bug882.check2
-rw-r--r--test/files/neg/names-defaults-neg.check13
-rw-r--r--test/files/neg/names-defaults-neg.scala5
-rw-r--r--test/files/neg/t2416.check10
-rw-r--r--test/files/neg/t2416.scala14
-rw-r--r--test/files/neg/t3399.check4
-rw-r--r--test/files/neg/t3399.scala24
-rw-r--r--test/files/neg/t3507.check4
-rw-r--r--test/files/neg/t3507.scala15
-rw-r--r--test/files/neg/t3604.check7
-rw-r--r--test/files/neg/t3604.scala6
-rw-r--r--test/files/neg/t3653.check7
-rw-r--r--test/files/neg/t3653.scala4
-rw-r--r--test/files/neg/t742.check5
-rw-r--r--test/files/neg/t742.scala8
-rw-r--r--test/files/neg/tailrec-2.check4
-rw-r--r--test/files/neg/tailrec-2.scala26
-rw-r--r--test/files/pos/bug0013.scala31
-rw-r--r--test/files/pos/bug0095.scala15
-rw-r--r--test/files/pos/bug1974.scala20
-rw-r--r--test/files/pos/bug261-ab.scala9
-rw-r--r--test/files/pos/bug261-ba.scala9
-rw-r--r--test/files/pos/bug3234.flags1
-rw-r--r--test/files/pos/bug3234.scala19
-rw-r--r--test/files/pos/bug3440.scala18
-rw-r--r--test/files/pos/bug3521/DoubleValue.java7
-rw-r--r--test/files/pos/bug3521/a.scala4
-rw-r--r--test/files/pos/bug3570.scala7
-rw-r--r--test/files/pos/t1263/Test.java2
-rw-r--r--test/files/pos/t2133.scala18
-rw-r--r--test/files/pos/t2331.scala11
-rw-r--r--test/files/pos/t2413/TestJava.java7
-rw-r--r--test/files/pos/t2413/TestScalac.scala23
-rwxr-xr-xtest/files/pos/t3174.scala14
-rwxr-xr-xtest/files/pos/t3174b.scala12
-rw-r--r--test/files/pos/t3249/Test.java5
-rw-r--r--test/files/pos/t3249/a.scala11
-rw-r--r--test/files/pos/t3374.scala6
-rw-r--r--test/files/pos/t3477.scala7
-rw-r--r--test/files/pos/t3486/JTest.java3
-rw-r--r--test/files/pos/t3486/test.scala6
-rw-r--r--test/files/pos/t3494.scala7
-rwxr-xr-xtest/files/pos/t3568.scala46
-rw-r--r--test/files/pos/t3622/test/AsyncTask.java5
-rw-r--r--test/files/pos/t3622/test/MyAsyncTask.java9
-rw-r--r--test/files/pos/t3622/test/Test.scala5
-rw-r--r--test/files/run/bitsets-msil.check32
-rw-r--r--test/files/run/bug1766.scala16
-rw-r--r--test/files/run/bug2106.flags1
-rw-r--r--test/files/run/bug2106.scala8
-rw-r--r--test/files/run/bug3518.scala16
-rw-r--r--test/files/run/bug3540.scala7
-rw-r--r--test/files/run/bug3616.check1
-rw-r--r--test/files/run/bug3616.scala12
-rw-r--r--test/files/run/exceptions-nest.check12
-rw-r--r--test/files/run/exceptions-nest.scala139
-rw-r--r--test/files/run/names-defaults.scala5
-rw-r--r--test/files/run/slice-strings.scala19
-rw-r--r--test/files/run/t0432.scala15
-rw-r--r--test/files/run/t3493.scala15
-rw-r--r--test/files/run/t3496.scala15
-rw-r--r--test/files/run/t3502.scala24
-rw-r--r--test/files/run/t3508.scala11
-rw-r--r--test/files/run/t3511.scala36
-rw-r--r--test/files/run/t3580.scala17
-rw-r--r--test/files/run/t3603.scala18
-rw-r--r--test/files/run/t3645.scala6
-rwxr-xr-xtest/files/run/weakconform.scala4
-rw-r--r--test/files/run/xml-loop-bug.scala6
-rw-r--r--test/pending/continuations-neg/t3628.check3
-rw-r--r--test/pending/continuations-neg/t3628.scala11
-rw-r--r--test/pending/continuations-pos/t3620.scala73
-rw-r--r--test/pending/jvm/serialization.check198
-rw-r--r--test/pending/jvm/serialization.scala411
-rw-r--r--test/pending/pos/t3636.scala49
330 files changed, 2834 insertions, 13760 deletions
diff --git a/build.number b/build.number
index e00461fe6c..31a2b93530 100644
--- a/build.number
+++ b/build.number
@@ -1,5 +1,5 @@
#Tue Sep 11 19:21:09 CEST 2007
version.minor=8
-version.patch=1
-version.suffix=alpha
+version.patch=0
+version.suffix=final
version.major=2
diff --git a/build.xml b/build.xml
index 656c45a8c5..5c2c8f0add 100644
--- a/build.xml
+++ b/build.xml
@@ -261,15 +261,6 @@ INITIALISATION
<pathelement location="${lib.dir}/forkjoin.jar"/>
<pathelement location="${ant.jar}"/>
</path>
- <!-- What to have on the compilation path when compiling during certain phases -->
- <path id="quick.compilation.path">
- <pathelement location="${build-quick.dir}/classes/library"/>
- <pathelement location="${lib.dir}/forkjoin.jar"/>
- </path>
- <path id="strap.compilation.path">
- <pathelement location="${build-strap.dir}/classes/library"/>
- <pathelement location="${lib.dir}/forkjoin.jar"/>
- </path>
<taskdef resource="scala/tools/ant/sabbus/antlib.xml" classpathref="starr.classpath"/>
</target>
@@ -313,7 +304,6 @@ LOCAL REFERENCE BUILD (LOCKER)
<include name="**/*.scala"/>
<compilationpath>
<pathelement location="${build-locker.dir}/classes/library"/>
- <pathelement location="${lib.dir}/forkjoin.jar"/>
</compilationpath>
</scalacfork>
<propertyfile file="${build-locker.dir}/classes/library/library.properties">
@@ -447,14 +437,13 @@ QUICK BUILD (QUICK)
============================================================================ -->
<target name="quick.start" depends="locker.done"/>
-
+
<target name="quick.pre-lib" depends="quick.start">
<uptodate property="quick.lib.available" targetfile="${build-quick.dir}/library.complete">
<srcfiles dir="${src.dir}">
<include name="library/**"/>
<include name="dbc/**"/>
<include name="actors/**"/>
- <!--<include name="parallel-collections/**"/>-->
<include name="continuations/**"/>
<include name="swing/**"/>
</srcfiles>
@@ -488,7 +477,9 @@ QUICK BUILD (QUICK)
srcdir="${src.dir}/library"
jvmargs="${scalacfork.jvmargs}">
<include name="**/*.scala"/>
- <compilationpath refid="quick.compilation.path"/>
+ <compilationpath>
+ <pathelement location="${build-quick.dir}/classes/library"/>
+ </compilationpath>
</scalacfork>
<scalacfork
destdir="${build-quick.dir}/classes/library"
@@ -497,20 +488,11 @@ QUICK BUILD (QUICK)
srcdir="${src.dir}/actors"
jvmargs="${scalacfork.jvmargs}">
<include name="**/*.scala"/>
- <compilationpath refid="quick.compilation.path"/>
- </scalacfork>
- <!--<scalacfork
- destdir="${build-quick.dir}/classes/library"
- compilerpathref="locker.classpath"
- params="${scalac.args.quick}"
- srcdir="${src.dir}/parallel-collections"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
<compilationpath>
<pathelement location="${build-quick.dir}/classes/library"/>
<pathelement location="${lib.dir}/forkjoin.jar"/>
</compilationpath>
- </scalacfork>-->
+ </scalacfork>
<scalacfork
destdir="${build-quick.dir}/classes/library"
compilerpathref="locker.classpath"
@@ -518,7 +500,9 @@ QUICK BUILD (QUICK)
srcdir="${src.dir}/dbc"
jvmargs="${scalacfork.jvmargs}">
<include name="**/*.scala"/>
- <compilationpath refid="quick.compilation.path"/>
+ <compilationpath>
+ <pathelement location="${build-quick.dir}/classes/library"/>
+ </compilationpath>
</scalacfork>
<scalacfork
destdir="${build-quick.dir}/classes/library"
@@ -527,7 +511,9 @@ QUICK BUILD (QUICK)
srcdir="${src.dir}/swing"
jvmargs="${scalacfork.jvmargs}">
<include name="**/*.scala"/>
- <compilationpath refid="quick.compilation.path"/>
+ <compilationpath>
+ <pathelement location="${build-quick.dir}/classes/library"/>
+ </compilationpath>
</scalacfork>
<propertyfile file="${build-quick.dir}/classes/library/library.properties">
<entry key="version.number" value="${version.number}"/>
@@ -630,7 +616,6 @@ QUICK BUILD (QUICK)
<pathelement location="${build-quick.dir}/classes/library"/>
<pathelement location="${build-quick.dir}/classes/compiler"/>
<pathelement location="${build-quick.dir}/classes/continuations-plugin"/>
- <pathelement location="${lib.dir}/forkjoin.jar"/>
</compilationpath>
</scalacfork>
<copy
@@ -649,7 +634,10 @@ QUICK BUILD (QUICK)
srcdir="${src.dir}/continuations/library"
jvmargs="${scalacfork.jvmargs}">
<include name="**/*.scala"/>
- <compilationpath refid="quick.compilation.path"/>
+ <compilationpath>
+ <pathelement location="${build-quick.dir}/classes/library"/>
+ <pathelement location="${lib.dir}/forkjoin.jar"/>
+ </compilationpath>
</scalacfork>
<touch file="${build-quick.dir}/plugins.complete" verbose="no"/>
<stopwatch name="quick.plugins.timer" action="total"/>
@@ -677,7 +665,6 @@ QUICK BUILD (QUICK)
<pathelement location="${build-quick.dir}/classes/scalap"/>
<pathelement location="${build-quick.dir}/classes/partest"/>
<pathelement location="${ant.jar}"/>
- <pathelement location="${lib.dir}/forkjoin.jar"/>
</compilationpath>
</scalacfork>
<touch file="${build-quick.dir}/scalap.complete" verbose="no"/>
@@ -719,7 +706,6 @@ QUICK BUILD (QUICK)
<pathelement location="${build-quick.dir}/classes/scalap"/>
<pathelement location="${build-quick.dir}/classes/partest"/>
<pathelement location="${ant.jar}"/>
- <pathelement location="${lib.dir}/forkjoin.jar"/>
<pathelement location="${scalacheck.jar}"/>
</compilationpath>
</scalacfork>
@@ -1010,7 +996,9 @@ BOOTSTRAPPING BUILD (STRAP)
srcdir="${src.dir}/library"
jvmargs="${scalacfork.jvmargs}">
<include name="**/*.scala"/>
- <compilationpath refid="strap.compilation.path"/>
+ <compilationpath>
+ <pathelement location="${build-strap.dir}/classes/library"/>
+ </compilationpath>
</scalacfork>
<scalacfork
destdir="${build-strap.dir}/classes/library"
@@ -1019,20 +1007,11 @@ BOOTSTRAPPING BUILD (STRAP)
srcdir="${src.dir}/actors"
jvmargs="${scalacfork.jvmargs}">
<include name="**/*.scala"/>
- <compilationpath refid="strap.compilation.path"/>
- </scalacfork>
- <!--<scalacfork
- destdir="${build-strap.dir}/classes/library"
- compilerpathref="pack.classpath"
- params="${scalac.args.all}"
- srcdir="${src.dir}/parallel-collections"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
<compilationpath>
<pathelement location="${build-strap.dir}/classes/library"/>
<pathelement location="${forkjoin.jar}"/>
</compilationpath>
- </scalacfork>-->
+ </scalacfork>
<scalacfork
destdir="${build-strap.dir}/classes/library"
compilerpathref="pack.classpath"
@@ -1040,7 +1019,9 @@ BOOTSTRAPPING BUILD (STRAP)
srcdir="${src.dir}/dbc"
jvmargs="${scalacfork.jvmargs}">
<include name="**/*.scala"/>
- <compilationpath refid="strap.compilation.path"/>
+ <compilationpath>
+ <pathelement location="${build-strap.dir}/classes/library"/>
+ </compilationpath>
</scalacfork>
<scalacfork
destdir="${build-strap.dir}/classes/library"
@@ -1049,7 +1030,9 @@ BOOTSTRAPPING BUILD (STRAP)
srcdir="${src.dir}/swing"
jvmargs="${scalacfork.jvmargs}">
<include name="**/*.scala"/>
- <compilationpath refid="strap.compilation.path"/>
+ <compilationpath>
+ <pathelement location="${build-strap.dir}/classes/library"/>
+ </compilationpath>
</scalacfork>
<propertyfile file="${build-strap.dir}/classes/library/library.properties">
<entry key="version.number" value="${version.number}"/>
@@ -1132,7 +1115,6 @@ BOOTSTRAPPING BUILD (STRAP)
<pathelement location="${build-strap.dir}/classes/library"/>
<pathelement location="${build-strap.dir}/classes/compiler"/>
<pathelement location="${build-strap.dir}/classes/continuations-plugin"/>
- <pathelement location="${lib.dir}/forkjoin.jar"/>
</compilationpath>
</scalacfork>
<copy
@@ -1151,7 +1133,10 @@ BOOTSTRAPPING BUILD (STRAP)
srcdir="${src.dir}/continuations/library"
jvmargs="${scalacfork.jvmargs}">
<include name="**/*.scala"/>
- <compilationpath refid="strap.compilation.path"/>
+ <compilationpath>
+ <pathelement location="${build-strap.dir}/classes/library"/>
+ <pathelement location="${lib.dir}/forkjoin.jar"/>
+ </compilationpath>
</scalacfork>
<touch file="${build-strap.dir}/plugins.complete" verbose="no"/>
<stopwatch name="strap.plugins.timer" action="total"/>
@@ -1179,7 +1164,6 @@ BOOTSTRAPPING BUILD (STRAP)
<pathelement location="${build-strap.dir}/classes/scalap"/>
<pathelement location="${build-strap.dir}/classes/partest"/>
<pathelement location="${ant.jar}"/>
- <pathelement location="${lib.dir}/forkjoin.jar"/>
</compilationpath>
</scalacfork>
<touch file="${build-strap.dir}/scalap.complete" verbose="no"/>
@@ -1220,9 +1204,8 @@ BOOTSTRAPPING BUILD (STRAP)
<pathelement location="${build-strap.dir}/classes/compiler"/>
<pathelement location="${build-strap.dir}/classes/scalap"/>
<pathelement location="${build-strap.dir}/classes/partest"/>
- <pathelement location="${ant.jar}"/>
- <pathelement location="${lib.dir}/forkjoin.jar"/>
<pathelement location="${scalacheck.jar}"/>
+ <pathelement location="${ant.jar}"/>
</compilationpath>
</scalacfork>
<copy todir="${build-strap.dir}/classes/partest">
@@ -1385,7 +1368,6 @@ DOCUMENTATION
<include name="library/**"/>
<include name="dbc/**"/>
<include name="actors/**"/>
- <!--<include name="parallel-collections/**"/>-->
<include name="swing/**"/>
</srcfiles>
</uptodate>
@@ -1403,7 +1385,6 @@ DOCUMENTATION
classpathref="pack.classpath">
<src>
<files includes="${src.dir}/actors"/>
- <!--<files includes="${src.dir}/parallel-collections"/>-->
<files includes="${src.dir}/library/scala"/>
<files includes="${src.dir}/swing"/>
<files includes="${src.dir}/continuations/library"/>
@@ -1642,7 +1623,6 @@ DISTRIBUTION
<jar destfile="${dist.dir}/src/scala-library-src.jar">
<fileset dir="${src.dir}/library"/>
<fileset dir="${src.dir}/actors"/>
- <!--<fileset dir="${src.dir}/parallel-collections"/>-->
<fileset dir="${src.dir}/continuations/library"/>
</jar>
<jar destfile="${dist.dir}/src/scala-dbc-src.jar">
@@ -1731,7 +1711,6 @@ STABLE REFERENCE (STARR)
<jar destfile="${basedir}/lib/scala-library-src.jar">
<fileset dir="${basedir}/src/library"/>
<fileset dir="${basedir}/src/actors"/>
- <!--<fileset dir="${basedir}/src/parallel-collections"/>-->
<fileset dir="${basedir}/src/swing"/>
<fileset dir="${basedir}/src/dbc"/>
</jar>
diff --git a/docs/licenses/apache_android.txt b/docs/licenses/apache_android.txt
deleted file mode 100644
index 00f339625f..0000000000
--- a/docs/licenses/apache_android.txt
+++ /dev/null
@@ -1,16 +0,0 @@
-Scala includes various example files for Android:
-
-Copyright (c) 2005-2009, The Android Open Source Project
-Copyright (c) 2007, Steven Osborn
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License. \ No newline at end of file
diff --git a/docs/licenses/bsd_jline.txt b/docs/licenses/bsd_jline.txt
deleted file mode 100644
index 4ac4a378ed..0000000000
--- a/docs/licenses/bsd_jline.txt
+++ /dev/null
@@ -1,34 +0,0 @@
-Scala includes the jLine library:
-
-Copyright (c) 2002-2006, Marc Prud'hommeaux <mwp1@cornell.edu>
-All rights reserved.
-
-Redistribution and use in source and binary forms, with or
-without modification, are permitted provided that the following
-conditions are met:
-
-Redistributions of source code must retain the above copyright
-notice, this list of conditions and the following disclaimer.
-
-Redistributions in binary form must reproduce the above copyright
-notice, this list of conditions and the following disclaimer
-in the documentation and/or other materials provided with
-the distribution.
-
-Neither the name of JLine nor the names of its contributors
-may be used to endorse or promote products derived from this
-software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,
-BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
-AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
-EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
-FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
-OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
-PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
-AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
-LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
-IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
-OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/docs/licenses/mit_jquery.txt b/docs/licenses/mit_jquery.txt
deleted file mode 100644
index ef2c570469..0000000000
--- a/docs/licenses/mit_jquery.txt
+++ /dev/null
@@ -1,13 +0,0 @@
-Scala includes the jQuery library:
-
-Copyright (c) 2010 John Resig
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
diff --git a/docs/licenses/mit_sizzle.txt b/docs/licenses/mit_sizzle.txt
deleted file mode 100644
index d81d30aa0f..0000000000
--- a/docs/licenses/mit_sizzle.txt
+++ /dev/null
@@ -1,13 +0,0 @@
-Scala includes the Sizzle library:
-
-Copyright (c) 2010 The Dojo Foundation
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
diff --git a/docs/licenses/mit_tools.tooltip.txt b/docs/licenses/mit_tools.tooltip.txt
deleted file mode 100644
index 27a4dbc788..0000000000
--- a/docs/licenses/mit_tools.tooltip.txt
+++ /dev/null
@@ -1,13 +0,0 @@
-Scala includes the Tools Tooltip library:
-
-Copyright (c) 2009 Tero Piirainen
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
diff --git a/lib/scala-compiler.jar.desired.sha1 b/lib/scala-compiler.jar.desired.sha1
index 7f771c65d9..f2a687e44e 100644
--- a/lib/scala-compiler.jar.desired.sha1
+++ b/lib/scala-compiler.jar.desired.sha1
@@ -1 +1 @@
-dac47f86e72a0d4d1c196b30e5fab00671ae72cc ?scala-compiler.jar
+47cd1c12567af0aa7d93b4cf2877db26bd908fe7 ?scala-compiler.jar
diff --git a/lib/scala-library-src.jar.desired.sha1 b/lib/scala-library-src.jar.desired.sha1
index 3ca7e74721..f25112f05b 100644
--- a/lib/scala-library-src.jar.desired.sha1
+++ b/lib/scala-library-src.jar.desired.sha1
@@ -1 +1 @@
-4d83364ebab2a2d6da1cc9c7401e9fce40868b58 ?scala-library-src.jar
+d42ea573aada13c9ea6b05da483c3e08522ea1fe ?scala-library-src.jar
diff --git a/lib/scala-library.jar.desired.sha1 b/lib/scala-library.jar.desired.sha1
index 80d5c6b541..10dba65a67 100644
--- a/lib/scala-library.jar.desired.sha1
+++ b/lib/scala-library.jar.desired.sha1
@@ -1 +1 @@
-01733ad3ce01044f067f06de66a3aba634077b59 ?scala-library.jar
+82a0de3721dc7299d57d385b1d19286d63a5e763 ?scala-library.jar
diff --git a/src/actors/scala/actors/threadpool/AbstractExecutorService.java b/src/actors/scala/actors/threadpool/AbstractExecutorService.java
index 4a12aa3c28..7953bfe30f 100644
--- a/src/actors/scala/actors/threadpool/AbstractExecutorService.java
+++ b/src/actors/scala/actors/threadpool/AbstractExecutorService.java
@@ -120,7 +120,7 @@ public abstract class AbstractExecutorService implements ExecutorService {
int ntasks = tasks.size();
if (ntasks == 0)
throw new IllegalArgumentException();
- List<Future> futures = new ArrayList<Future>(ntasks);
+ List futures= new ArrayList(ntasks);
ExecutorCompletionService ecs =
new ExecutorCompletionService(this);
@@ -203,10 +203,10 @@ public abstract class AbstractExecutorService implements ExecutorService {
return doInvokeAny(tasks, true, unit.toNanos(timeout));
}
- public List<Future> invokeAll(Collection tasks) throws InterruptedException {
+ public List invokeAll(Collection tasks) throws InterruptedException {
if (tasks == null)
throw new NullPointerException();
- List<Future> futures = new ArrayList<Future>(tasks.size());
+ List futures = new ArrayList(tasks.size());
boolean done = false;
try {
for (Iterator t = tasks.iterator(); t.hasNext();) {
@@ -235,13 +235,13 @@ public abstract class AbstractExecutorService implements ExecutorService {
}
}
- public List<Future> invokeAll(Collection tasks,
+ public List invokeAll(Collection tasks,
long timeout, TimeUnit unit)
throws InterruptedException {
if (tasks == null || unit == null)
throw new NullPointerException();
long nanos = unit.toNanos(timeout);
- List<Future> futures = new ArrayList<Future>(tasks.size());
+ List futures = new ArrayList(tasks.size());
boolean done = false;
try {
for (Iterator t = tasks.iterator(); t.hasNext();)
diff --git a/src/actors/scala/actors/threadpool/BlockingQueue.java b/src/actors/scala/actors/threadpool/BlockingQueue.java
index 1b4e808d84..880c2580da 100644
--- a/src/actors/scala/actors/threadpool/BlockingQueue.java
+++ b/src/actors/scala/actors/threadpool/BlockingQueue.java
@@ -7,10 +7,9 @@
package scala.actors.threadpool;
import java.util.Collection;
-import java.util.Queue;
/**
- * A {@link java.util.Queue} that additionally supports operations
+ * A {@link edu.emory.mathcs.backport.java.util.Queue} that additionally supports operations
* that wait for the queue to become non-empty when retrieving an
* element, and wait for space to become available in the queue when
* storing an element.
@@ -147,9 +146,8 @@ import java.util.Queue;
*
* @since 1.5
* @author Doug Lea
- * @param <E> the type of elements held in this collection
*/
-public interface BlockingQueue<E> extends Queue<E> {
+public interface BlockingQueue extends Queue {
/**
* Inserts the specified element into this queue if it is possible to do
* so immediately without violating capacity restrictions, returning
@@ -159,7 +157,7 @@ public interface BlockingQueue<E> extends Queue<E> {
* use {@link #offer(Object) offer}.
*
* @param e the element to add
- * @return <tt>true</tt> (as specified by {@link Collection#add})
+ * @return <tt>true</tt> (as specified by {@link java.util.Collection#add})
* @throws IllegalStateException if the element cannot be added at this
* time due to capacity restrictions
* @throws ClassCastException if the class of the specified element
@@ -168,7 +166,7 @@ public interface BlockingQueue<E> extends Queue<E> {
* @throws IllegalArgumentException if some property of the specified
* element prevents it from being added to this queue
*/
- boolean add(E e);
+ boolean add(Object e);
/**
* Inserts the specified element into this queue if it is possible to do
@@ -187,7 +185,7 @@ public interface BlockingQueue<E> extends Queue<E> {
* @throws IllegalArgumentException if some property of the specified
* element prevents it from being added to this queue
*/
- boolean offer(E e);
+ boolean offer(Object e);
/**
* Inserts the specified element into this queue, waiting if necessary
@@ -201,7 +199,7 @@ public interface BlockingQueue<E> extends Queue<E> {
* @throws IllegalArgumentException if some property of the specified
* element prevents it from being added to this queue
*/
- void put(E e) throws InterruptedException;
+ void put(Object e) throws InterruptedException;
/**
* Inserts the specified element into this queue, waiting up to the
@@ -221,7 +219,7 @@ public interface BlockingQueue<E> extends Queue<E> {
* @throws IllegalArgumentException if some property of the specified
* element prevents it from being added to this queue
*/
- boolean offer(E e, long timeout, TimeUnit unit)
+ boolean offer(Object e, long timeout, TimeUnit unit)
throws InterruptedException;
/**
@@ -231,7 +229,7 @@ public interface BlockingQueue<E> extends Queue<E> {
* @return the head of this queue
* @throws InterruptedException if interrupted while waiting
*/
- E take() throws InterruptedException;
+ Object take() throws InterruptedException;
/**
* Retrieves and removes the head of this queue, waiting up to the
@@ -245,7 +243,7 @@ public interface BlockingQueue<E> extends Queue<E> {
* specified waiting time elapses before an element is available
* @throws InterruptedException if interrupted while waiting
*/
- E poll(long timeout, TimeUnit unit)
+ Object poll(long timeout, TimeUnit unit)
throws InterruptedException;
/**
@@ -315,7 +313,7 @@ public interface BlockingQueue<E> extends Queue<E> {
* queue, or some property of an element of this queue prevents
* it from being added to the specified collection
*/
- int drainTo(Collection<? super E> c);
+ int drainTo(Collection c);
/**
* Removes at most the given number of available elements from
@@ -340,5 +338,5 @@ public interface BlockingQueue<E> extends Queue<E> {
* queue, or some property of an element of this queue prevents
* it from being added to the specified collection
*/
- int drainTo(Collection<? super E> c, int maxElements);
+ int drainTo(Collection c, int maxElements);
}
diff --git a/src/actors/scala/actors/threadpool/Executors.java b/src/actors/scala/actors/threadpool/Executors.java
index 49a127a8db..e74d665f33 100644
--- a/src/actors/scala/actors/threadpool/Executors.java
+++ b/src/actors/scala/actors/threadpool/Executors.java
@@ -605,12 +605,12 @@ public class Executors {
public Future submit(Runnable task, Object result) {
return e.submit(task, result);
}
- public List<Future> invokeAll(Collection tasks)
+ public List invokeAll(Collection tasks)
throws InterruptedException {
return e.invokeAll(tasks);
}
- public List<Future> invokeAll(Collection tasks,
- long timeout, TimeUnit unit)
+ public List invokeAll(Collection tasks,
+ long timeout, TimeUnit unit)
throws InterruptedException {
return e.invokeAll(tasks, timeout, unit);
}
diff --git a/src/actors/scala/actors/threadpool/LinkedBlockingQueue.java b/src/actors/scala/actors/threadpool/LinkedBlockingQueue.java
index f434ab0e7b..87fecff09c 100644
--- a/src/actors/scala/actors/threadpool/LinkedBlockingQueue.java
+++ b/src/actors/scala/actors/threadpool/LinkedBlockingQueue.java
@@ -6,13 +6,11 @@
package scala.actors.threadpool;
-import java.util.concurrent.atomic.AtomicInteger;
-import java.util.concurrent.locks.Condition;
-import java.util.concurrent.locks.ReentrantLock;
-import java.util.AbstractQueue;
import java.util.Collection;
import java.util.Iterator;
import java.util.NoSuchElementException;
+//import edu.emory.mathcs.backport.java.util.*;
+import scala.actors.threadpool.helpers.*;
/**
* An optionally-bounded {@linkplain BlockingQueue blocking queue} based on
@@ -43,11 +41,10 @@ import java.util.NoSuchElementException;
*
* @since 1.5
* @author Doug Lea
- * @param <E> the type of elements held in this collection
*
*/
-public class LinkedBlockingQueue<E> extends AbstractQueue<E>
- implements BlockingQueue<E>, java.io.Serializable {
+public class LinkedBlockingQueue extends AbstractQueue
+ implements BlockingQueue, java.io.Serializable {
private static final long serialVersionUID = -6903933977591709194L;
/*
@@ -62,87 +59,43 @@ public class LinkedBlockingQueue<E> extends AbstractQueue<E>
* items have been entered since the signal. And symmetrically for
* takes signalling puts. Operations such as remove(Object) and
* iterators acquire both locks.
- *
- * Visibility between writers and readers is provided as follows:
- *
- * Whenever an element is enqueued, the putLock is acquired and
- * count updated. A subsequent reader guarantees visibility to the
- * enqueued Node by either acquiring the putLock (via fullyLock)
- * or by acquiring the takeLock, and then reading n = count.get();
- * this gives visibility to the first n items.
- *
- * To implement weakly consistent iterators, it appears we need to
- * keep all Nodes GC-reachable from a predecessor dequeued Node.
- * That would cause two problems:
- * - allow a rogue Iterator to cause unbounded memory retention
- * - cause cross-generational linking of old Nodes to new Nodes if
- * a Node was tenured while live, which generational GCs have a
- * hard time dealing with, causing repeated major collections.
- * However, only non-deleted Nodes need to be reachable from
- * dequeued Nodes, and reachability does not necessarily have to
- * be of the kind understood by the GC. We use the trick of
- * linking a Node that has just been dequeued to itself. Such a
- * self-link implicitly means to advance to head.next.
*/
/**
* Linked list node class
*/
- static class Node<E> {
- E item;
-
- /**
- * One of:
- * - the real successor Node
- * - this Node, meaning the successor is head.next
- * - null, meaning there is no successor (this is the last node)
- */
- Node<E> next;
-
- Node(E x) { item = x; }
+ static class Node {
+ /** The item, volatile to ensure barrier separating write and read */
+ volatile Object item;
+ Node next;
+ Node(Object x) { item = x; }
}
/** The capacity bound, or Integer.MAX_VALUE if none */
private final int capacity;
/** Current number of elements */
- private final AtomicInteger count = new AtomicInteger(0);
+ private volatile int count = 0;
- /**
- * Head of linked list.
- * Invariant: head.item == null
- */
- private transient Node<E> head;
+ /** Head of linked list */
+ private transient Node head;
- /**
- * Tail of linked list.
- * Invariant: last.next == null
- */
- private transient Node<E> last;
+ /** Tail of linked list */
+ private transient Node last;
/** Lock held by take, poll, etc */
- private final ReentrantLock takeLock = new ReentrantLock();
-
- /** Wait queue for waiting takes */
- private final Condition notEmpty = takeLock.newCondition();
+ private final Object takeLock = new SerializableLock();
/** Lock held by put, offer, etc */
- private final ReentrantLock putLock = new ReentrantLock();
-
- /** Wait queue for waiting puts */
- private final Condition notFull = putLock.newCondition();
+ private final Object putLock = new SerializableLock();
/**
* Signals a waiting take. Called only from put/offer (which do not
* otherwise ordinarily lock takeLock.)
*/
private void signalNotEmpty() {
- final ReentrantLock takeLock = this.takeLock;
- takeLock.lock();
- try {
- notEmpty.signal();
- } finally {
- takeLock.unlock();
+ synchronized (takeLock) {
+ takeLock.notify();
}
}
@@ -150,69 +103,34 @@ public class LinkedBlockingQueue<E> extends AbstractQueue<E>
* Signals a waiting put. Called only from take/poll.
*/
private void signalNotFull() {
- final ReentrantLock putLock = this.putLock;
- putLock.lock();
- try {
- notFull.signal();
- } finally {
- putLock.unlock();
+ synchronized (putLock) {
+ putLock.notify();
}
}
/**
* Creates a node and links it at end of queue.
- *
* @param x the item
*/
- private void enqueue(E x) {
- // assert putLock.isHeldByCurrentThread();
- // assert last.next == null;
- last = last.next = new Node<E>(x);
+ private void insert(Object x) {
+ last = last.next = new Node(x);
}
/**
- * Removes a node from head of queue.
- *
+ * Removes a node from head of queue,
* @return the node
*/
- private E dequeue() {
- // assert takeLock.isHeldByCurrentThread();
- // assert head.item == null;
- Node<E> h = head;
- Node<E> first = h.next;
- h.next = h; // help GC
+ private Object extract() {
+ Node first = head.next;
head = first;
- E x = first.item;
+ Object x = first.item;
first.item = null;
return x;
}
- /**
- * Lock to prevent both puts and takes.
- */
- void fullyLock() {
- putLock.lock();
- takeLock.lock();
- }
/**
- * Unlock to allow both puts and takes.
- */
- void fullyUnlock() {
- takeLock.unlock();
- putLock.unlock();
- }
-
-// /**
-// * Tells whether both locks are held by current thread.
-// */
-// boolean isFullyLocked() {
-// return (putLock.isHeldByCurrentThread() &&
-// takeLock.isHeldByCurrentThread());
-// }
-
- /**
- * Creates a {@code LinkedBlockingQueue} with a capacity of
+ * Creates a <tt>LinkedBlockingQueue</tt> with a capacity of
* {@link Integer#MAX_VALUE}.
*/
public LinkedBlockingQueue() {
@@ -220,20 +138,20 @@ public class LinkedBlockingQueue<E> extends AbstractQueue<E>
}
/**
- * Creates a {@code LinkedBlockingQueue} with the given (fixed) capacity.
+ * Creates a <tt>LinkedBlockingQueue</tt> with the given (fixed) capacity.
*
* @param capacity the capacity of this queue
- * @throws IllegalArgumentException if {@code capacity} is not greater
+ * @throws IllegalArgumentException if <tt>capacity</tt> is not greater
* than zero
*/
public LinkedBlockingQueue(int capacity) {
if (capacity <= 0) throw new IllegalArgumentException();
this.capacity = capacity;
- last = head = new Node<E>(null);
+ last = head = new Node(null);
}
/**
- * Creates a {@code LinkedBlockingQueue} with a capacity of
+ * Creates a <tt>LinkedBlockingQueue</tt> with a capacity of
* {@link Integer#MAX_VALUE}, initially containing the elements of the
* given collection,
* added in traversal order of the collection's iterator.
@@ -242,23 +160,11 @@ public class LinkedBlockingQueue<E> extends AbstractQueue<E>
* @throws NullPointerException if the specified collection or any
* of its elements are null
*/
- public LinkedBlockingQueue(Collection<? extends E> c) {
+ public LinkedBlockingQueue(Collection c) {
this(Integer.MAX_VALUE);
- final ReentrantLock putLock = this.putLock;
- putLock.lock(); // Never contended, but necessary for visibility
- try {
- int n = 0;
- for (E e : c) {
- if (e == null)
- throw new NullPointerException();
- if (n == capacity)
- throw new IllegalStateException("Queue full");
- enqueue(e);
- ++n;
- }
- count.set(n);
- } finally {
- putLock.unlock();
+ for (Iterator itr = c.iterator(); itr.hasNext();) {
+ Object e = itr.next();
+ add(e);
}
}
@@ -271,7 +177,7 @@ public class LinkedBlockingQueue<E> extends AbstractQueue<E>
* @return the number of elements in this queue
*/
public int size() {
- return count.get();
+ return count;
}
// this doc comment is a modified copy of the inherited doc comment,
@@ -280,15 +186,15 @@ public class LinkedBlockingQueue<E> extends AbstractQueue<E>
* Returns the number of additional elements that this queue can ideally
* (in the absence of memory or resource constraints) accept without
* blocking. This is always equal to the initial capacity of this queue
- * less the current {@code size} of this queue.
+ * less the current <tt>size</tt> of this queue.
*
* <p>Note that you <em>cannot</em> always tell if an attempt to insert
- * an element will succeed by inspecting {@code remainingCapacity}
+ * an element will succeed by inspecting <tt>remainingCapacity</tt>
* because it may be the case that another thread is about to
* insert or remove an element.
*/
public int remainingCapacity() {
- return capacity - count.get();
+ return capacity - count;
}
/**
@@ -298,33 +204,34 @@ public class LinkedBlockingQueue<E> extends AbstractQueue<E>
* @throws InterruptedException {@inheritDoc}
* @throws NullPointerException {@inheritDoc}
*/
- public void put(E e) throws InterruptedException {
+ public void put(Object e) throws InterruptedException {
if (e == null) throw new NullPointerException();
- // Note: convention in all put/take/etc is to preset local var
- // holding count negative to indicate failure unless set.
+ // Note: convention in all put/take/etc is to preset
+ // local var holding count negative to indicate failure unless set.
int c = -1;
- final ReentrantLock putLock = this.putLock;
- final AtomicInteger count = this.count;
- putLock.lockInterruptibly();
- try {
+ synchronized (putLock) {
/*
* Note that count is used in wait guard even though it is
* not protected by lock. This works because count can
* only decrease at this point (all other puts are shut
* out by lock), and we (or some other waiting put) are
- * signalled if it ever changes from capacity. Similarly
- * for all other uses of count in other wait guards.
+ * signalled if it ever changes from
+ * capacity. Similarly for all other uses of count in
+ * other wait guards.
*/
- while (count.get() == capacity) {
- notFull.await();
+ try {
+ while (count == capacity)
+ putLock.wait();
+ } catch (InterruptedException ie) {
+ putLock.notify(); // propagate to a non-interrupted thread
+ throw ie;
}
- enqueue(e);
- c = count.getAndIncrement();
+ insert(e);
+ synchronized (this) { c = count++; }
if (c + 1 < capacity)
- notFull.signal();
- } finally {
- putLock.unlock();
+ putLock.notify();
}
+
if (c == 0)
signalNotEmpty();
}
@@ -333,32 +240,37 @@ public class LinkedBlockingQueue<E> extends AbstractQueue<E>
* Inserts the specified element at the tail of this queue, waiting if
* necessary up to the specified wait time for space to become available.
*
- * @return {@code true} if successful, or {@code false} if
+ * @return <tt>true</tt> if successful, or <tt>false</tt> if
* the specified waiting time elapses before space is available.
* @throws InterruptedException {@inheritDoc}
* @throws NullPointerException {@inheritDoc}
*/
- public boolean offer(E e, long timeout, TimeUnit unit)
+ public boolean offer(Object e, long timeout, TimeUnit unit)
throws InterruptedException {
if (e == null) throw new NullPointerException();
long nanos = unit.toNanos(timeout);
int c = -1;
- final ReentrantLock putLock = this.putLock;
- final AtomicInteger count = this.count;
- putLock.lockInterruptibly();
- try {
- while (count.get() == capacity) {
+ synchronized (putLock) {
+ long deadline = Utils.nanoTime() + nanos;
+ for (;;) {
+ if (count < capacity) {
+ insert(e);
+ synchronized (this) { c = count++; }
+ if (c + 1 < capacity)
+ putLock.notify();
+ break;
+ }
if (nanos <= 0)
return false;
- nanos = notFull.awaitNanos(nanos);
+ try {
+ TimeUnit.NANOSECONDS.timedWait(putLock, nanos);
+ nanos = deadline - Utils.nanoTime();
+ } catch (InterruptedException ie) {
+ putLock.notify(); // propagate to a non-interrupted thread
+ throw ie;
+ }
}
- enqueue(e);
- c = count.getAndIncrement();
- if (c + 1 < capacity)
- notFull.signal();
- } finally {
- putLock.unlock();
}
if (c == 0)
signalNotEmpty();
@@ -368,7 +280,7 @@ public class LinkedBlockingQueue<E> extends AbstractQueue<E>
/**
* Inserts the specified element at the tail of this queue if it is
* possible to do so immediately without exceeding the queue's capacity,
- * returning {@code true} upon success and {@code false} if this queue
+ * returning <tt>true</tt> upon success and <tt>false</tt> if this queue
* is full.
* When using a capacity-restricted queue, this method is generally
* preferable to method {@link BlockingQueue#add add}, which can fail to
@@ -376,23 +288,18 @@ public class LinkedBlockingQueue<E> extends AbstractQueue<E>
*
* @throws NullPointerException if the specified element is null
*/
- public boolean offer(E e) {
+ public boolean offer(Object e) {
if (e == null) throw new NullPointerException();
- final AtomicInteger count = this.count;
- if (count.get() == capacity)
+ if (count == capacity)
return false;
int c = -1;
- final ReentrantLock putLock = this.putLock;
- putLock.lock();
- try {
- if (count.get() < capacity) {
- enqueue(e);
- c = count.getAndIncrement();
+ synchronized (putLock) {
+ if (count < capacity) {
+ insert(e);
+ synchronized (this) { c = count++; }
if (c + 1 < capacity)
- notFull.signal();
+ putLock.notify();
}
- } finally {
- putLock.unlock();
}
if (c == 0)
signalNotEmpty();
@@ -400,134 +307,128 @@ public class LinkedBlockingQueue<E> extends AbstractQueue<E>
}
- public E take() throws InterruptedException {
- E x;
+ public Object take() throws InterruptedException {
+ Object x;
int c = -1;
- final AtomicInteger count = this.count;
- final ReentrantLock takeLock = this.takeLock;
- takeLock.lockInterruptibly();
- try {
- while (count.get() == 0) {
- notEmpty.await();
+ synchronized (takeLock) {
+ try {
+ while (count == 0)
+ takeLock.wait();
+ } catch (InterruptedException ie) {
+ takeLock.notify(); // propagate to a non-interrupted thread
+ throw ie;
}
- x = dequeue();
- c = count.getAndDecrement();
+
+ x = extract();
+ synchronized (this) { c = count--; }
if (c > 1)
- notEmpty.signal();
- } finally {
- takeLock.unlock();
+ takeLock.notify();
}
if (c == capacity)
signalNotFull();
return x;
}
- public E poll(long timeout, TimeUnit unit) throws InterruptedException {
- E x = null;
+ public Object poll(long timeout, TimeUnit unit) throws InterruptedException {
+ Object x = null;
int c = -1;
long nanos = unit.toNanos(timeout);
- final AtomicInteger count = this.count;
- final ReentrantLock takeLock = this.takeLock;
- takeLock.lockInterruptibly();
- try {
- while (count.get() == 0) {
+ synchronized (takeLock) {
+ long deadline = Utils.nanoTime() + nanos;
+ for (;;) {
+ if (count > 0) {
+ x = extract();
+ synchronized (this) { c = count--; }
+ if (c > 1)
+ takeLock.notify();
+ break;
+ }
if (nanos <= 0)
return null;
- nanos = notEmpty.awaitNanos(nanos);
+ try {
+ TimeUnit.NANOSECONDS.timedWait(takeLock, nanos);
+ nanos = deadline - Utils.nanoTime();
+ } catch (InterruptedException ie) {
+ takeLock.notify(); // propagate to a non-interrupted thread
+ throw ie;
+ }
}
- x = dequeue();
- c = count.getAndDecrement();
- if (c > 1)
- notEmpty.signal();
- } finally {
- takeLock.unlock();
}
if (c == capacity)
signalNotFull();
return x;
}
- public E poll() {
- final AtomicInteger count = this.count;
- if (count.get() == 0)
+ public Object poll() {
+ if (count == 0)
return null;
- E x = null;
+ Object x = null;
int c = -1;
- final ReentrantLock takeLock = this.takeLock;
- takeLock.lock();
- try {
- if (count.get() > 0) {
- x = dequeue();
- c = count.getAndDecrement();
+ synchronized (takeLock) {
+ if (count > 0) {
+ x = extract();
+ synchronized (this) { c = count--; }
if (c > 1)
- notEmpty.signal();
+ takeLock.notify();
}
- } finally {
- takeLock.unlock();
}
if (c == capacity)
signalNotFull();
return x;
}
- public E peek() {
- if (count.get() == 0)
+
+ public Object peek() {
+ if (count == 0)
return null;
- final ReentrantLock takeLock = this.takeLock;
- takeLock.lock();
- try {
- Node<E> first = head.next;
+ synchronized (takeLock) {
+ Node first = head.next;
if (first == null)
return null;
else
return first.item;
- } finally {
- takeLock.unlock();
}
}
/**
- * Unlinks interior Node p with predecessor trail.
- */
- void unlink(Node<E> p, Node<E> trail) {
- // assert isFullyLocked();
- // p.next is not changed, to allow iterators that are
- // traversing p to maintain their weak-consistency guarantee.
- p.item = null;
- trail.next = p.next;
- if (last == p)
- last = trail;
- if (count.getAndDecrement() == capacity)
- notFull.signal();
- }
-
- /**
* Removes a single instance of the specified element from this queue,
- * if it is present. More formally, removes an element {@code e} such
- * that {@code o.equals(e)}, if this queue contains one or more such
+ * if it is present. More formally, removes an element <tt>e</tt> such
+ * that <tt>o.equals(e)</tt>, if this queue contains one or more such
* elements.
- * Returns {@code true} if this queue contained the specified element
+ * Returns <tt>true</tt> if this queue contained the specified element
* (or equivalently, if this queue changed as a result of the call).
*
* @param o element to be removed from this queue, if present
- * @return {@code true} if this queue changed as a result of the call
+ * @return <tt>true</tt> if this queue changed as a result of the call
*/
public boolean remove(Object o) {
if (o == null) return false;
- fullyLock();
- try {
- for (Node<E> trail = head, p = trail.next;
- p != null;
- trail = p, p = p.next) {
- if (o.equals(p.item)) {
- unlink(p, trail);
- return true;
+ boolean removed = false;
+ synchronized (putLock) {
+ synchronized (takeLock) {
+ Node trail = head;
+ Node p = head.next;
+ while (p != null) {
+ if (o.equals(p.item)) {
+ removed = true;
+ break;
+ }
+ trail = p;
+ p = p.next;
+ }
+ if (removed) {
+ p.item = null;
+ trail.next = p.next;
+ if (last == p)
+ last = trail;
+ synchronized (this) {
+ if (count-- == capacity)
+ putLock.notifyAll();
+ }
}
}
- return false;
- } finally {
- fullyUnlock();
}
+ return removed;
}
/**
@@ -544,16 +445,15 @@ public class LinkedBlockingQueue<E> extends AbstractQueue<E>
* @return an array containing all of the elements in this queue
*/
public Object[] toArray() {
- fullyLock();
- try {
- int size = count.get();
- Object[] a = new Object[size];
- int k = 0;
- for (Node<E> p = head.next; p != null; p = p.next)
- a[k++] = p.item;
- return a;
- } finally {
- fullyUnlock();
+ synchronized (putLock) {
+ synchronized (takeLock) {
+ int size = count;
+ Object[] a = new Object[size];
+ int k = 0;
+ for (Node p = head.next; p != null; p = p.next)
+ a[k++] = p.item;
+ return a;
+ }
}
}
@@ -567,22 +467,22 @@ public class LinkedBlockingQueue<E> extends AbstractQueue<E>
* <p>If this queue fits in the specified array with room to spare
* (i.e., the array has more elements than this queue), the element in
* the array immediately following the end of the queue is set to
- * {@code null}.
+ * <tt>null</tt>.
*
* <p>Like the {@link #toArray()} method, this method acts as bridge between
* array-based and collection-based APIs. Further, this method allows
* precise control over the runtime type of the output array, and may,
* under certain circumstances, be used to save allocation costs.
*
- * <p>Suppose {@code x} is a queue known to contain only strings.
+ * <p>Suppose <tt>x</tt> is a queue known to contain only strings.
* The following code can be used to dump the queue into a newly
- * allocated array of {@code String}:
+ * allocated array of <tt>String</tt>:
*
* <pre>
* String[] y = x.toArray(new String[0]);</pre>
*
- * Note that {@code toArray(new Object[0])} is identical in function to
- * {@code toArray()}.
+ * Note that <tt>toArray(new Object[0])</tt> is identical in function to
+ * <tt>toArray()</tt>.
*
* @param a the array into which the elements of the queue are to
* be stored, if it is big enough; otherwise, a new array of the
@@ -593,32 +493,29 @@ public class LinkedBlockingQueue<E> extends AbstractQueue<E>
* this queue
* @throws NullPointerException if the specified array is null
*/
- @SuppressWarnings("unchecked")
- public <T> T[] toArray(T[] a) {
- fullyLock();
- try {
- int size = count.get();
- if (a.length < size)
- a = (T[])java.lang.reflect.Array.newInstance
- (a.getClass().getComponentType(), size);
-
- int k = 0;
- for (Node<E> p = head.next; p != null; p = p.next)
- a[k++] = (T)p.item;
- if (a.length > k)
- a[k] = null;
- return a;
- } finally {
- fullyUnlock();
+ public Object[] toArray(Object[] a) {
+ synchronized (putLock) {
+ synchronized (takeLock) {
+ int size = count;
+ if (a.length < size)
+ a = (Object[])java.lang.reflect.Array.newInstance
+ (a.getClass().getComponentType(), size);
+
+ int k = 0;
+ for (Node p = head.next; p != null; p = p.next)
+ a[k++] = (Object)p.item;
+ if (a.length > k)
+ a[k] = null;
+ return a;
+ }
}
}
public String toString() {
- fullyLock();
- try {
- return super.toString();
- } finally {
- fullyUnlock();
+ synchronized (putLock) {
+ synchronized (takeLock) {
+ return super.toString();
+ }
}
}
@@ -627,18 +524,19 @@ public class LinkedBlockingQueue<E> extends AbstractQueue<E>
* The queue will be empty after this call returns.
*/
public void clear() {
- fullyLock();
- try {
- for (Node<E> p, h = head; (p = h.next) != null; h = p) {
- h.next = h;
- p.item = null;
+ synchronized (putLock) {
+ synchronized (takeLock) {
+ head.next = null;
+ assert head.item == null;
+ last = head;
+ int c;
+ synchronized (this) {
+ c = count;
+ count = 0;
+ }
+ if (c == capacity)
+ putLock.notifyAll();
}
- head = last;
- // assert head.item == null && head.next == null;
- if (count.getAndSet(0) == capacity)
- notFull.signal();
- } finally {
- fullyUnlock();
}
}
@@ -648,8 +546,35 @@ public class LinkedBlockingQueue<E> extends AbstractQueue<E>
* @throws NullPointerException {@inheritDoc}
* @throws IllegalArgumentException {@inheritDoc}
*/
- public int drainTo(Collection<? super E> c) {
- return drainTo(c, Integer.MAX_VALUE);
+ public int drainTo(Collection c) {
+ if (c == null)
+ throw new NullPointerException();
+ if (c == this)
+ throw new IllegalArgumentException();
+ Node first;
+ synchronized (putLock) {
+ synchronized (takeLock) {
+ first = head.next;
+ head.next = null;
+ assert head.item == null;
+ last = head;
+ int cold;
+ synchronized (this) {
+ cold = count;
+ count = 0;
+ }
+ if (cold == capacity)
+ putLock.notifyAll();
+ }
+ }
+ // Transfer the elements outside of locks
+ int n = 0;
+ for (Node p = first; p != null; p = p.next) {
+ c.add(p.item);
+ p.item = null;
+ ++n;
+ }
+ return n;
}
/**
@@ -658,77 +583,70 @@ public class LinkedBlockingQueue<E> extends AbstractQueue<E>
* @throws NullPointerException {@inheritDoc}
* @throws IllegalArgumentException {@inheritDoc}
*/
- public int drainTo(Collection<? super E> c, int maxElements) {
+ public int drainTo(Collection c, int maxElements) {
if (c == null)
throw new NullPointerException();
if (c == this)
throw new IllegalArgumentException();
- boolean signalNotFull = false;
- final ReentrantLock takeLock = this.takeLock;
- takeLock.lock();
- try {
- int n = Math.min(maxElements, count.get());
- // count.get provides visibility to first n Nodes
- Node<E> h = head;
- int i = 0;
- try {
- while (i < n) {
- Node<E> p = h.next;
+ synchronized (putLock) {
+ synchronized (takeLock) {
+ int n = 0;
+ Node p = head.next;
+ while (p != null && n < maxElements) {
c.add(p.item);
p.item = null;
- h.next = h;
- h = p;
- ++i;
+ p = p.next;
+ ++n;
}
- return n;
- } finally {
- // Restore invariants even if c.add() threw
- if (i > 0) {
- // assert h.item == null;
- head = h;
- signalNotFull = (count.getAndAdd(-i) == capacity);
+ if (n != 0) {
+ head.next = p;
+ assert head.item == null;
+ if (p == null)
+ last = head;
+ int cold;
+ synchronized (this) {
+ cold = count;
+ count -= n;
+ }
+ if (cold == capacity)
+ putLock.notifyAll();
}
+ return n;
}
- } finally {
- takeLock.unlock();
- if (signalNotFull)
- signalNotFull();
}
}
/**
* Returns an iterator over the elements in this queue in proper sequence.
- * The returned {@code Iterator} is a "weakly consistent" iterator that
- * will never throw {@link java.util.ConcurrentModificationException
- * ConcurrentModificationException},
+ * The returned <tt>Iterator</tt> is a "weakly consistent" iterator that
+ * will never throw {@link java.util.ConcurrentModificationException},
* and guarantees to traverse elements as they existed upon
* construction of the iterator, and may (but is not guaranteed to)
* reflect any modifications subsequent to construction.
*
* @return an iterator over the elements in this queue in proper sequence
*/
- public Iterator<E> iterator() {
+ public Iterator iterator() {
return new Itr();
}
- private class Itr implements Iterator<E> {
+ private class Itr implements Iterator {
/*
- * Basic weakly-consistent iterator. At all times hold the next
+ * Basic weak-consistent iterator. At all times hold the next
* item to hand out so that if hasNext() reports true, we will
* still have it to return even if lost race with a take etc.
*/
- private Node<E> current;
- private Node<E> lastRet;
- private E currentElement;
+ private Node current;
+ private Node lastRet;
+ private Object currentElement;
Itr() {
- fullyLock();
- try {
- current = head.next;
- if (current != null)
- currentElement = current.item;
- } finally {
- fullyUnlock();
+ synchronized (putLock) {
+ synchronized (takeLock) {
+ current = head.next;
+ if (current != null)
+ currentElement = current.item;
+ }
}
}
@@ -736,56 +654,45 @@ public class LinkedBlockingQueue<E> extends AbstractQueue<E>
return current != null;
}
- /**
- * Returns the next live successor of p, or null if no such.
- *
- * Unlike other traversal methods, iterators need to handle both:
- * - dequeued nodes (p.next == p)
- * - (possibly multiple) interior removed nodes (p.item == null)
- */
- private Node<E> nextNode(Node<E> p) {
- for (;;) {
- Node<E> s = p.next;
- if (s == p)
- return head.next;
- if (s == null || s.item != null)
- return s;
- p = s;
- }
- }
-
- public E next() {
- fullyLock();
- try {
- if (current == null)
- throw new NoSuchElementException();
- E x = currentElement;
- lastRet = current;
- current = nextNode(current);
- currentElement = (current == null) ? null : current.item;
- return x;
- } finally {
- fullyUnlock();
+ public Object next() {
+ synchronized (putLock) {
+ synchronized (takeLock) {
+ if (current == null)
+ throw new NoSuchElementException();
+ Object x = currentElement;
+ lastRet = current;
+ current = current.next;
+ if (current != null)
+ currentElement = current.item;
+ return x;
+ }
}
}
public void remove() {
if (lastRet == null)
throw new IllegalStateException();
- fullyLock();
- try {
- Node<E> node = lastRet;
- lastRet = null;
- for (Node<E> trail = head, p = trail.next;
- p != null;
- trail = p, p = p.next) {
+ synchronized (putLock) {
+ synchronized (takeLock) {
+ Node node = lastRet;
+ lastRet = null;
+ Node trail = head;
+ Node p = head.next;
+ while (p != null && p != node) {
+ trail = p;
+ p = p.next;
+ }
if (p == node) {
- unlink(p, trail);
- break;
+ p.item = null;
+ trail.next = p.next;
+ if (last == p)
+ last = trail;
+ int c;
+ synchronized (this) { c = count--; }
+ if (c == capacity)
+ putLock.notifyAll();
}
}
- } finally {
- fullyUnlock();
}
}
}
@@ -794,33 +701,31 @@ public class LinkedBlockingQueue<E> extends AbstractQueue<E>
* Save the state to a stream (that is, serialize it).
*
* @serialData The capacity is emitted (int), followed by all of
- * its elements (each an {@code Object}) in the proper order,
+ * its elements (each an <tt>Object</tt>) in the proper order,
* followed by a null
* @param s the stream
*/
private void writeObject(java.io.ObjectOutputStream s)
throws java.io.IOException {
- fullyLock();
- try {
- // Write out any hidden stuff, plus capacity
- s.defaultWriteObject();
+ synchronized (putLock) {
+ synchronized (takeLock) {
+ // Write out any hidden stuff, plus capacity
+ s.defaultWriteObject();
- // Write out all elements in the proper order.
- for (Node<E> p = head.next; p != null; p = p.next)
- s.writeObject(p.item);
+ // Write out all elements in the proper order.
+ for (Node p = head.next; p != null; p = p.next)
+ s.writeObject(p.item);
- // Use trailing null as sentinel
- s.writeObject(null);
- } finally {
- fullyUnlock();
+ // Use trailing null as sentinel
+ s.writeObject(null);
+ }
}
}
/**
* Reconstitute this queue instance from a stream (that is,
* deserialize it).
- *
* @param s the stream
*/
private void readObject(java.io.ObjectInputStream s)
@@ -828,16 +733,19 @@ public class LinkedBlockingQueue<E> extends AbstractQueue<E>
// Read in capacity, and any hidden stuff
s.defaultReadObject();
- count.set(0);
- last = head = new Node<E>(null);
+ synchronized (this) { count = 0; }
+ last = head = new Node(null);
// Read in all elements and place in queue
for (;;) {
- @SuppressWarnings("unchecked")
- E item = (E)s.readObject();
+ Object item = (Object)s.readObject();
if (item == null)
break;
add(item);
}
}
+
+ private static class SerializableLock implements java.io.Serializable {
+ private final static long serialVersionUID = -8856990691138858668L;
+ }
}
diff --git a/src/actors/scala/actors/threadpool/ThreadPoolExecutor.java b/src/actors/scala/actors/threadpool/ThreadPoolExecutor.java
index 11e35b034c..f41b2790b6 100644
--- a/src/actors/scala/actors/threadpool/ThreadPoolExecutor.java
+++ b/src/actors/scala/actors/threadpool/ThreadPoolExecutor.java
@@ -791,7 +791,7 @@ public class ThreadPoolExecutor extends AbstractExecutorService {
*/
private List drainQueue() {
BlockingQueue q = workQueue;
- List<Runnable> taskList = new ArrayList<Runnable>();
+ List taskList = new ArrayList();
q.drainTo(taskList);
if (!q.isEmpty()) {
Runnable[] arr = (Runnable[])q.toArray(new Runnable[0]);
diff --git a/src/actors/scala/actors/threadpool/helpers/FIFOWaitQueue.java b/src/actors/scala/actors/threadpool/helpers/FIFOWaitQueue.java
index 432b851f3e..6306faa08f 100644
--- a/src/actors/scala/actors/threadpool/helpers/FIFOWaitQueue.java
+++ b/src/actors/scala/actors/threadpool/helpers/FIFOWaitQueue.java
@@ -64,7 +64,7 @@ public class FIFOWaitQueue extends WaitQueue implements java.io.Serializable {
}
public Collection getWaitingThreads() {
- List<Thread> list = new ArrayList<Thread>();
+ List list = new ArrayList();
int count = 0;
WaitNode node = head_;
while (node != null) {
diff --git a/src/actors/scala/actors/threadpool/helpers/Utils.java b/src/actors/scala/actors/threadpool/helpers/Utils.java
index d12389215d..df1dbd4960 100644
--- a/src/actors/scala/actors/threadpool/helpers/Utils.java
+++ b/src/actors/scala/actors/threadpool/helpers/Utils.java
@@ -41,9 +41,9 @@ public final class Utils {
static {
NanoTimer timer = null;
try {
- String nanoTimerClassName =
- AccessController.doPrivileged(new PrivilegedAction<String>() {
- public String run() {
+ String nanoTimerClassName = (String)
+ AccessController.doPrivileged(new PrivilegedAction() {
+ public Object run() {
return System.getProperty(providerProp);
}
});
@@ -206,9 +206,9 @@ public final class Utils {
final Perf perf;
final long multiplier, divisor;
SunPerfProvider() {
- perf =
- AccessController.doPrivileged(new PrivilegedAction<Perf>() {
- public Perf run() {
+ perf = (Perf)
+ AccessController.doPrivileged(new PrivilegedAction() {
+ public Object run() {
return Perf.getPerf();
}
});
diff --git a/src/actors/scala/actors/threadpool/locks/CondVar.java b/src/actors/scala/actors/threadpool/locks/CondVar.java
index 44df1c0b97..132e72fe2a 100644
--- a/src/actors/scala/actors/threadpool/locks/CondVar.java
+++ b/src/actors/scala/actors/threadpool/locks/CondVar.java
@@ -17,7 +17,6 @@ import scala.actors.threadpool.*;
import scala.actors.threadpool.helpers.*;
class CondVar implements Condition, java.io.Serializable {
- private static final long serialVersionUID = -5009898475638427940L;
/** The lock **/
protected final ExclusiveLock lock;
diff --git a/src/actors/scala/actors/threadpool/locks/FIFOCondVar.java b/src/actors/scala/actors/threadpool/locks/FIFOCondVar.java
index 144ac54d37..7495a8a884 100644
--- a/src/actors/scala/actors/threadpool/locks/FIFOCondVar.java
+++ b/src/actors/scala/actors/threadpool/locks/FIFOCondVar.java
@@ -17,7 +17,6 @@ import scala.actors.threadpool.*;
import scala.actors.threadpool.helpers.*;
class FIFOCondVar extends CondVar implements Condition, java.io.Serializable {
- private static final long serialVersionUID = -497497271881010475L;
private static final WaitQueue.QueuedSync sync = new WaitQueue.QueuedSync() {
public boolean recheck(WaitQueue.WaitNode node) { return false; }
diff --git a/src/actors/scala/actors/threadpool/locks/ReentrantReadWriteLock.java b/src/actors/scala/actors/threadpool/locks/ReentrantReadWriteLock.java
index 437af77c7a..6411bbea01 100644
--- a/src/actors/scala/actors/threadpool/locks/ReentrantReadWriteLock.java
+++ b/src/actors/scala/actors/threadpool/locks/ReentrantReadWriteLock.java
@@ -190,7 +190,7 @@ public class ReentrantReadWriteLock implements ReadWriteLock, java.io.Serializab
transient int writeHolds_ = 0;
/** Number of acquires on read lock by any reader thread **/
- transient HashMap<Thread, Integer> readers_ = new HashMap<Thread, Integer>();
+ transient HashMap readers_ = new HashMap();
/** cache/reuse the special Integer value one to speed up readlocks **/
static final Integer IONE = new Integer(1);
@@ -344,7 +344,7 @@ public class ReentrantReadWriteLock implements ReadWriteLock, java.io.Serializab
synchronized int getReadHoldCount() {
if (activeReaders_ == 0) return 0;
Thread t = Thread.currentThread();
- Integer i = readers_.get(t);
+ Integer i = (Integer)readers_.get(t);
return (i == null) ? 0 : i.intValue();
}
@@ -363,7 +363,7 @@ public class ReentrantReadWriteLock implements ReadWriteLock, java.io.Serializab
// and ensure visibility by synchronizing (all other accesses to
// readers_ are also synchronized on "this")
synchronized (this) {
- readers_ = new HashMap<Thread, Integer>();
+ readers_ = new HashMap();
}
}
}
@@ -372,8 +372,6 @@ public class ReentrantReadWriteLock implements ReadWriteLock, java.io.Serializab
* Nonfair version of Sync
*/
private static class NonfairSync extends Sync {
- private static final long serialVersionUID = -2392241841540339773L;
-
NonfairSync() {}
}
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index 768f207968..7efd8ad2a0 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -216,10 +216,8 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
}
}
- if (settings.verbose.value || settings.Ylogcp.value) {
- inform("[search path for source files: " + classPath.sourcepaths.mkString(",") + "]")
- inform("[search path for class files: " + classPath.asClasspathString + "]")
- }
+ if (settings.verbose.value || settings.Ylogcp.value)
+ inform("[Classpath = " + classPath.asClasspathString + "]")
/** True if -Xscript has been set, indicating a script run.
*/
diff --git a/src/compiler/scala/tools/nsc/Interpreter.scala b/src/compiler/scala/tools/nsc/Interpreter.scala
index 5d11973a74..128c378e23 100644
--- a/src/compiler/scala/tools/nsc/Interpreter.scala
+++ b/src/compiler/scala/tools/nsc/Interpreter.scala
@@ -146,7 +146,7 @@ class Interpreter(val settings: Settings, out: PrintWriter) {
else null
}
- import compiler.{ Traverser, CompilationUnit, Symbol, Name, Type, TypeRef, PolyType }
+ import compiler.{ Traverser, CompilationUnit, Symbol, Name, Type }
import compiler.{
Tree, TermTree, ValOrDefDef, ValDef, DefDef, Assign, ClassDef,
ModuleDef, Ident, Select, TypeDef, Import, MemberDef, DocDef,
@@ -946,19 +946,14 @@ class Interpreter(val settings: Settings, out: PrintWriter) {
lazy val typeOf: Map[Name, String] = {
def getTypes(names: List[Name], nameMap: Name => Name): Map[Name, String] = {
names.foldLeft(Map.empty[Name, String]) { (map, name) =>
- val tp1 = atNextPhase(resObjSym.info.nonPrivateDecl(name).tpe)
+ val rawType = atNextPhase(resObjSym.info.member(name).tpe)
// the types are all =>T; remove the =>
- val tp2 = tp1 match {
- case PolyType(Nil, tp) => tp
- case tp => tp
+ val cleanedType = rawType match {
+ case compiler.PolyType(Nil, rt) => rt
+ case rawType => rawType
}
- // normalize non-public types so we don't see protected aliases like Self
- val tp3 = compiler.atPhase(objRun.typerPhase)(tp2 match {
- case TypeRef(_, sym, _) if !sym.isPublic => tp2.normalize.toString
- case tp => tp.toString
- })
- map + (name -> tp3)
+ map + (name -> atNextPhase(cleanedType.toString))
}
}
diff --git a/src/compiler/scala/tools/nsc/MainGenericRunner.scala b/src/compiler/scala/tools/nsc/MainGenericRunner.scala
index 96d6846d64..936ee3c1db 100644
--- a/src/compiler/scala/tools/nsc/MainGenericRunner.scala
+++ b/src/compiler/scala/tools/nsc/MainGenericRunner.scala
@@ -13,7 +13,7 @@ import java.net.{ URL, MalformedURLException }
import scala.tools.util.PathResolver
import io.{ File, Process }
-import util.{ ClassPath, ScalaClassLoader, waitingForThreads }
+import util.{ ClassPath, ScalaClassLoader }
import Properties.{ versionString, copyrightString }
/** An object that runs Scala code. It has three possible
diff --git a/src/compiler/scala/tools/nsc/ScriptRunner.scala b/src/compiler/scala/tools/nsc/ScriptRunner.scala
index 9a6aa78352..032e8d0bb8 100644
--- a/src/compiler/scala/tools/nsc/ScriptRunner.scala
+++ b/src/compiler/scala/tools/nsc/ScriptRunner.scala
@@ -17,7 +17,6 @@ import java.lang.reflect.InvocationTargetException
import java.net.URL
import java.util.jar.{ JarEntry, JarOutputStream }
-import util.waitingForThreads
import scala.tools.util.PathResolver
import scala.tools.nsc.reporters.{Reporter,ConsoleReporter}
@@ -199,36 +198,31 @@ object ScriptRunner {
else None
}
- /** The script runner calls System.exit to communicate a return value, but this must
- * not take place until there are no non-daemon threads running. Tickets #1955, #2006.
- */
- waitingForThreads {
- if (settings.savecompiled.value) {
- val jarFile = jarFileFor(scriptFile)
- def jarOK = jarFile.canRead && (jarFile isFresher File(scriptFile))
-
- def recompile() = {
- jarFile.delete()
-
- compile match {
- case Some(compiledPath) =>
- tryMakeJar(jarFile, compiledPath)
- if (jarOK) {
- compiledPath.deleteRecursively()
- handler(jarFile.toAbsolute.path)
- }
- // jar failed; run directly from the class files
- else handler(compiledPath.path)
- case _ => false
- }
+ if (settings.savecompiled.value) {
+ val jarFile = jarFileFor(scriptFile)
+ def jarOK = jarFile.canRead && (jarFile isFresher File(scriptFile))
+
+ def recompile() = {
+ jarFile.delete()
+
+ compile match {
+ case Some(compiledPath) =>
+ tryMakeJar(jarFile, compiledPath)
+ if (jarOK) {
+ compiledPath.deleteRecursively()
+ handler(jarFile.toAbsolute.path)
+ }
+ // jar failed; run directly from the class files
+ else handler(compiledPath.path)
+ case _ => false
}
-
- if (jarOK) handler(jarFile.toAbsolute.path) // pre-compiled jar is current
- else recompile() // jar old - recompile the script.
}
- // don't use a cache jar at all--just use the class files
- else compile map (cp => handler(cp.path)) getOrElse false
+
+ if (jarOK) handler(jarFile.toAbsolute.path) // pre-compiled jar is current
+ else recompile() // jar old - recompile the script.
}
+ // don't use a cache jar at all--just use the class files
+ else compile map (cp => handler(cp.path)) getOrElse false
}
/** Run a script after it has been compiled
diff --git a/src/compiler/scala/tools/nsc/ast/DocComments.scala b/src/compiler/scala/tools/nsc/ast/DocComments.scala
index d851d6ffdf..735416874d 100755
--- a/src/compiler/scala/tools/nsc/ast/DocComments.scala
+++ b/src/compiler/scala/tools/nsc/ast/DocComments.scala
@@ -415,7 +415,7 @@ trait DocComments { self: SymbolTable =>
}
for (defn <- defined) yield {
- defn.cloneSymbol.setFlag(Flags.SYNTHETIC).setInfo(
+ defn.cloneSymbol(defn.owner).setFlag(Flags.SYNTHETIC).setInfo(
substAliases(defn.info).asSeenFrom(site.thisType, defn.owner))
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
index a24c8c01d3..34d3423401 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
@@ -76,24 +76,20 @@ trait TreeDSL {
else gen.mkAnd(target, other)
/** Note - calling ANY_== in the matcher caused primitives to get boxed
- * for the comparison, whereas looking up nme.EQ does not. See #3570 for
- * an example of how target.tpe can be non-null, yet it claims not to have
- * a mmeber called nme.EQ. Not sure if that should happen, but we can be
- * robust by dragging in Any regardless.
+ * for the comparison, whereas looking up nme.EQ does not.
*/
def MEMBER_== (other: Tree) = {
- val opSym = if (target.tpe == null) NoSymbol else target.tpe member nme.EQ
- if (opSym == NoSymbol) ANY_==(other)
- else fn(target, opSym, other)
+ if (target.tpe == null) ANY_==(other)
+ else fn(target, target.tpe member nme.EQ, other)
}
- def ANY_EQ (other: Tree) = fn(target, nme.eq, toAnyRef(other))
def ANY_NE (other: Tree) = fn(target, nme.ne, toAnyRef(other))
+ def ANY_EQ (other: Tree) = fn(target, nme.eq, toAnyRef(other))
def ANY_== (other: Tree) = fn(target, Any_==, other)
- def ANY_!= (other: Tree) = fn(target, Any_!=, other)
- def OBJ_== (other: Tree) = fn(target, Object_==, other)
- def OBJ_!= (other: Tree) = fn(target, Object_!=, other)
- def OBJ_EQ (other: Tree) = fn(target, Object_eq, other)
- def OBJ_NE (other: Tree) = fn(target, Object_ne, other)
+ def ANY_>= (other: Tree) = fn(target, nme.GE, other)
+ def ANY_<= (other: Tree) = fn(target, nme.LE, other)
+ def OBJ_!= (other: Tree) = fn(target, Object_ne, other)
+ def OBJ_EQ (other: Tree) = fn(target, nme.eq, other)
+ def OBJ_NE (other: Tree) = fn(target, nme.ne, other)
def INT_| (other: Tree) = fn(target, getMember(IntClass, nme.OR), other)
def INT_& (other: Tree) = fn(target, getMember(IntClass, nme.AND), other)
diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
index 41e93ae386..5044105684 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
@@ -137,7 +137,7 @@ abstract class TreeGen {
assert(!tree.tpe.isInstanceOf[MethodType], tree)
assert(!pt.typeSymbol.isPackageClass)
assert(!pt.typeSymbol.isPackageObjectClass)
- assert(pt eq pt.normalize, tree +" : "+ debugString(pt) +" ~>"+ debugString(pt.normalize)) //@MAT only called during erasure, which already takes care of that
+ assert(pt eq pt.normalize) //@MAT only called during erasure, which already takes care of that
atPos(tree.pos)(mkAsInstanceOf(tree, pt, false))
}
diff --git a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
index 46ddf7b24b..f21b1b20ff 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
@@ -151,12 +151,6 @@ abstract class TreeInfo {
case _ :: stats1 => firstConstructor(stats1)
}
- /** The arguments to the first constructor in `stats'. */
- def firstConstructorArgs(stats: List[Tree]): List[Tree] = firstConstructor(stats) match {
- case DefDef(_, _, _, args :: _, _, _) => args
- case _ => Nil
- }
-
/** The value definitions marked PRESUPER in this statement sequence */
def preSuperFields(stats: List[Tree]): List[ValDef] =
for (vdef @ ValDef(mods, _, _, _) <- stats if mods hasFlag PRESUPER) yield vdef
diff --git a/src/compiler/scala/tools/nsc/ast/TreePrinters.scala b/src/compiler/scala/tools/nsc/ast/TreePrinters.scala
index 10b50db6d5..ddc1c3169a 100644
--- a/src/compiler/scala/tools/nsc/ast/TreePrinters.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreePrinters.scala
@@ -117,7 +117,7 @@ trait TreePrinters { trees: SymbolTable =>
def pw = tree.symbol.privateWithin
val args =
if (tree.symbol == NoSymbol) (mods.flags, mods.privateWithin)
- else if (pw == NoSymbol) (tree.symbol.flags, "")
+ else if (pw == NoSymbol || pw == tree.symbol.owner) (tree.symbol.flags, "")
else (tree.symbol.flags, pw.name)
printFlags(args._1, args._2.toString)
@@ -379,9 +379,6 @@ trait TreePrinters { trees: SymbolTable =>
case SelectFromArray(qualifier, name, _) =>
print(qualifier); print(".<arr>"); print(symName(tree, name))
- case TypeTreeWithDeferredRefCheck() =>
- print("<tree with deferred refcheck>")
-
case tree =>
print("<unknown tree of class "+tree.getClass+">")
}
@@ -578,7 +575,6 @@ trait TreePrinters { trees: SymbolTable =>
// eliminated by refchecks
case ModuleDef(mods, name, impl) =>
- case TypeTreeWithDeferredRefCheck() =>
// eliminated by erasure
case TypeDef(mods, name, tparams, rhs) =>
diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala
index dbe4a587ba..35db3c0984 100644
--- a/src/compiler/scala/tools/nsc/ast/Trees.scala
+++ b/src/compiler/scala/tools/nsc/ast/Trees.scala
@@ -338,9 +338,6 @@ trait Trees extends reflect.generic.Trees { self: SymbolTable =>
case class Parens(args: List[Tree]) extends Tree // only used during parsing
- /** emitted by typer, eliminated by refchecks **/
- case class TypeTreeWithDeferredRefCheck()(val check: () => TypeTree) extends AbsTypeTree
-
// ----- subconstructors --------------------------------------------
class ApplyToImplicitArgs(fun: Tree, args: List[Tree]) extends Apply(fun, args)
@@ -386,7 +383,6 @@ trait Trees extends reflect.generic.Trees { self: SymbolTable =>
def Ident(tree: Tree, name: Name): Ident
def Literal(tree: Tree, value: Constant): Literal
def TypeTree(tree: Tree): TypeTree
- def TypeTreeWithDeferredRefCheck(tree: Tree): TypeTreeWithDeferredRefCheck
def Annotated(tree: Tree, annot: Tree, arg: Tree): Annotated
def SingletonTypeTree(tree: Tree, ref: Tree): SingletonTypeTree
def SelectFromTypeTree(tree: Tree, qualifier: Tree, selector: Name): SelectFromTypeTree
@@ -474,9 +470,6 @@ trait Trees extends reflect.generic.Trees { self: SymbolTable =>
new Literal(value).copyAttrs(tree)
def TypeTree(tree: Tree) =
new TypeTree().copyAttrs(tree)
- def TypeTreeWithDeferredRefCheck(tree: Tree) = tree match {
- case dc@TypeTreeWithDeferredRefCheck() => new TypeTreeWithDeferredRefCheck()(dc.check).copyAttrs(tree)
- }
def Annotated(tree: Tree, annot: Tree, arg: Tree) =
new Annotated(annot, arg).copyAttrs(tree)
def SingletonTypeTree(tree: Tree, ref: Tree) =
@@ -677,10 +670,6 @@ trait Trees extends reflect.generic.Trees { self: SymbolTable =>
case t @ TypeTree() => t
case _ => treeCopy.TypeTree(tree)
}
- def TypeTreeWithDeferredRefCheck(tree: Tree) = tree match {
- case t @ TypeTreeWithDeferredRefCheck() => t
- case _ => treeCopy.TypeTreeWithDeferredRefCheck(tree)
- }
def Annotated(tree: Tree, annot: Tree, arg: Tree) = tree match {
case t @ Annotated(annot0, arg0)
if (annot0==annot) => t
@@ -827,8 +816,6 @@ trait Trees extends reflect.generic.Trees { self: SymbolTable =>
treeCopy.Literal(tree, value)
case TypeTree() =>
treeCopy.TypeTree(tree)
- case TypeTreeWithDeferredRefCheck() =>
- treeCopy.TypeTreeWithDeferredRefCheck(tree)
case Annotated(annot, arg) =>
treeCopy.Annotated(tree, transform(annot), transform(arg))
case SingletonTypeTree(ref) =>
@@ -891,8 +878,6 @@ trait Trees extends reflect.generic.Trees { self: SymbolTable =>
traverse(definition)
case Parens(ts) =>
traverseTrees(ts)
- case TypeTreeWithDeferredRefCheck() => // TODO: should we traverse the wrapped tree?
- // (and rewrap the result? how to update the deferred check? would need to store wrapped tree instead of returning it from check)
case _ => super.traverse(tree)
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index 7a28fd0538..e28f07e840 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -1312,9 +1312,6 @@ self =>
}
def simpleExprRest(t: Tree, canApply: Boolean): Tree = {
- // Various errors in XML literals can cause xmlLiteral to propagate
- // EmptyTree's. Watch out for them here (see also postfixExpr).
- if (EmptyTree == t) return EmptyTree // #3604 (mics)
if (canApply) newLineOptWhenFollowedBy(LBRACE)
in.token match {
case DOT =>
@@ -1823,7 +1820,7 @@ self =>
if (in.token != RPAREN) {
if (in.token == IMPLICIT) {
if (!contextBounds.isEmpty)
- syntaxError("cannot have both implicit parameters and context bounds `: ...' or view bounds `<% ...' on type parameters", false)
+ syntaxError("cannot have both implicit parameters and context bounds `: ...' on type parameters", false)
in.nextToken()
implicitmod = Flags.IMPLICIT
}
@@ -2331,7 +2328,7 @@ self =>
classContextBounds = contextBoundBuf.toList
val tstart = (in.offset::classContextBounds.map(_.pos.startOrPoint)).min
if (!classContextBounds.isEmpty && mods.hasFlag(Flags.TRAIT)) {
- syntaxError("traits cannot have type parameters with context bounds `: ...' nor view bounds `<% ...'", false)
+ syntaxError("traits cannot have type parameters with context bounds `: ...'", false)
classContextBounds = List()
}
val constrAnnots = annotations(false, true)
@@ -2734,10 +2731,10 @@ self =>
topstats() match {
case List(stat @ PackageDef(_, _)) => stat
case stats =>
- val start =
- if (stats forall (_ == EmptyTree)) 0
- else wrappingPos(stats).startOrPoint
-
+ val start = stats match {
+ case Nil => 0
+ case _ => wrappingPos(stats).startOrPoint
+ }
makePackaging(start, atPos(start, start, start) { Ident(nme.EMPTY_PACKAGE_NAME) }, stats)
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
index b86e22787b..5e9898a7e6 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
@@ -7,9 +7,9 @@
package scala.tools.nsc
package backend.opt
+
import scala.util.control.Breaks._
-import scala.collection.{ mutable, immutable }
-import mutable.{ HashMap, HashSet }
+import scala.collection.mutable.{Map, HashMap, Set, HashSet}
import scala.tools.nsc.symtab._
/**
@@ -19,11 +19,6 @@ abstract class Inliners extends SubComponent {
import global._
import icodes._
import icodes.opcodes._
- import definitions.{
- NullClass, NothingClass, ObjectClass,
- PredefModule, RuntimePackage, ScalaInlineClass, ScalaNoInlineClass,
- isFunctionType
- }
val phaseName = "inliner"
@@ -33,24 +28,15 @@ abstract class Inliners extends SubComponent {
val res = body
val t2 = System.currentTimeMillis()
val ms = (t2 - t1).toInt
- if (ms >= MAX_INLINE_MILLIS)
+ if (ms >= 2000)
println("%s: %d milliseconds".format(s, ms))
res
}
- /* A warning threshold */
- private final val MAX_INLINE_MILLIS = 2000
-
/** The maximum size in basic blocks of methods considered for inlining. */
final val MAX_INLINE_SIZE = 16
- /** Maximum loop iterations. */
- final val MAX_INLINE_RETRY = 15
-
- /** Small method size (in blocks) */
- val SMALL_METHOD_SIZE = 1
-
/** Create a new phase */
override def newPhase(p: Phase) = new InliningPhase(p)
@@ -61,59 +47,251 @@ abstract class Inliners extends SubComponent {
val inliner = new Inliner
override def apply(c: IClass) {
- inliner analyzeClass c
+ inliner.analyzeClass(c)
}
}
- def isBottomType(sym: Symbol) = sym == NullClass || sym == NothingClass
- def posToStr(pos: util.Position) = if (pos.isDefined) pos.point.toString else "<nopos>"
-
- /** Is the given class a closure? */
- def isClosureClass(cls: Symbol): Boolean =
- cls.isFinal && cls.isSynthetic && !cls.isModuleClass && cls.isAnonymousFunction
-
/**
* Simple inliner.
+ *
*/
class Inliner {
- object NonPublicRefs extends Enumeration {
- val Public, Protected, Private = Value
- /** Cache whether a method calls private members. */
- val usesNonPublics: mutable.Map[IMethod, Value] = new HashMap
- }
- import NonPublicRefs._
+ val fresh = new HashMap[String, Int]
/* fresh name counter */
- val fresh = new HashMap[String, Int]
var count = 0
- def freshName(s: String) = {
- val count = fresh.getOrElseUpdate(s, 0)
- fresh(s) += 1
- s + count
+
+ def freshName(s: String) = fresh.get(s) match {
+ case Some(count) =>
+ fresh(s) = count + 1
+ s + count
+ case None =>
+ fresh(s) = 1
+ s + "0"
}
- private def hasInline(sym: Symbol) = sym hasAnnotation ScalaInlineClass
- private def hasNoInline(sym: Symbol) = sym hasAnnotation ScalaNoInlineClass
+ lazy val ScalaInlineAttr = definitions.getClass("scala.inline")
+ lazy val ScalaNoInlineAttr = definitions.getClass("scala.noinline")
+
+ /** Inline the 'callee' method inside the 'caller' in the given
+ * basic block, at the given instruction (which has to be a CALL_METHOD).
+ */
+ def inline(caller: IMethod,
+ block: BasicBlock,
+ instr: Instruction,
+ callee: IMethod) {
+ def posToStr(pos: util.Position) = if (pos.isDefined) pos.point.toString else "<nopos>"
+ log("Inlining " + callee + " in " + caller + " at pos: " + posToStr(instr.pos))
+
+ val targetPos = instr.pos
+ val a = new analysis.MethodTFA(callee)
+
+ /* The exception handlers that are active at the current block. */
+ val activeHandlers = caller.exh.filter(_.covered.contains(block))
+
+ /* Map 'original' blocks to the ones inlined in the caller. */
+ val inlinedBlock: Map[BasicBlock, BasicBlock] = new HashMap
+
+ val varsInScope: Set[Local] = HashSet() ++= block.varsInScope
+
+ val instrBefore = block.toList.takeWhile {
+ case i @ SCOPE_ENTER(l) => varsInScope += l
+ i ne instr
+ case i =>
+ i ne instr
+ }
+ val instrAfter = block.toList.drop(instrBefore.length + 1);
+
+ assert(!instrAfter.isEmpty, "CALL_METHOD cannot be the last instruction in block!");
+
+ // store the '$this' into the special local
+ val inlinedThis = new Local(caller.symbol.newVariable(instr.pos, freshName("$inlThis")), REFERENCE(definitions.ObjectClass), false);
+
+ /** buffer for the returned value */
+ val retVal =
+ if (callee.returnType != UNIT)
+ new Local(caller.symbol.newVariable(instr.pos, freshName("$retVal")), callee.returnType, false);
+ else
+ null;
+
+ /** Add a new block in the current context. */
+ def newBlock = {
+ val b = caller.code.newBlock
+ activeHandlers.foreach (_.addCoveredBlock(b))
+ if (retVal ne null) b.varsInScope += retVal
+ b.varsInScope += inlinedThis
+ b.varsInScope ++= varsInScope
+ b
+ }
+
+ def translateExh(e: ExceptionHandler) = {
+ var handler: ExceptionHandler = e.dup
+ handler.covered = handler.covered.map(inlinedBlock)
+ handler.setStartBlock(inlinedBlock(e.startBlock))
+ handler
+ }
+
+ var inlinedLocals: Map[Local, Local] = new HashMap
+
+ /** alfa-rename `l' in caller's context. */
+ def dupLocal(l: Local): Local = {
+ val sym = caller.symbol.newVariable(l.sym.pos, freshName(l.sym.name.toString()));
+// sym.setInfo(l.sym.tpe);
+ val dupped = new Local(sym, l.kind, false)
+ inlinedLocals(l) = dupped
+ dupped
+ }
+
+ def addLocals(m: IMethod, ls: List[Local]) =
+ m.locals = m.locals ::: ls;
+ def addLocal(m: IMethod, l: Local): Unit =
+ addLocals(m, List(l));
+
+ val afterBlock = newBlock;
+
+ /** Map from nw.init instructions to their matching NEW call */
+ val pending: collection.mutable.Map[Instruction, NEW] = new collection.mutable.HashMap
+
+ /** Map an instruction from the callee to one suitable for the caller. */
+ def map(i: Instruction): Instruction = {
+ val newInstr = i match {
+ case THIS(clasz) =>
+ LOAD_LOCAL(inlinedThis);
+
+ case STORE_THIS(_) =>
+ STORE_LOCAL(inlinedThis)
+
+ case JUMP(whereto) =>
+ JUMP(inlinedBlock(whereto));
+
+ case CJUMP(success, failure, cond, kind) =>
+ CJUMP(inlinedBlock(success), inlinedBlock(failure), cond, kind);
+
+ case CZJUMP(success, failure, cond, kind) =>
+ CZJUMP(inlinedBlock(success), inlinedBlock(failure), cond, kind);
+
+ case SWITCH(tags, labels) =>
+ SWITCH(tags, labels map inlinedBlock);
+
+ case RETURN(kind) =>
+ JUMP(afterBlock);
+
+ case LOAD_LOCAL(l) if inlinedLocals.isDefinedAt(l) =>
+ LOAD_LOCAL(inlinedLocals(l))
+
+ case STORE_LOCAL(l) if inlinedLocals.isDefinedAt(l) =>
+ STORE_LOCAL(inlinedLocals(l))
+
+ case LOAD_LOCAL(l) =>
+ assert(caller.locals contains l,
+ "Could not find local '" + l + "' in locals, nor in inlinedLocals: " + inlinedLocals)
+ i
+ case STORE_LOCAL(l) =>
+ assert(caller.locals contains l,
+ "Could not find local '" + l + "' in locals, nor in inlinedLocals: " + inlinedLocals)
+ i
+
+ case SCOPE_ENTER(l) if inlinedLocals.isDefinedAt(l) =>
+ SCOPE_ENTER(inlinedLocals(l))
+
+ case SCOPE_EXIT(l) if inlinedLocals.isDefinedAt(l) =>
+ SCOPE_EXIT(inlinedLocals(l))
+
+ case nw @ NEW(sym) =>
+ val r = NEW(sym)
+ pending(nw.init) = r
+ r
+
+ case CALL_METHOD(meth, Static(true)) if (meth.isClassConstructor) =>
+ CALL_METHOD(meth, Static(true))
+
+ case _ => i.clone
+ }
+ // check any pending NEW's
+ if (pending isDefinedAt i) {
+ pending(i).init = newInstr.asInstanceOf[CALL_METHOD]
+ pending -= i
+ }
+ newInstr
+ }
+
+ addLocals(caller, callee.locals map dupLocal);
+ addLocal(caller, inlinedThis);
+ if (retVal ne null)
+ addLocal(caller, retVal);
+ callee.code.blocks.foreach { b =>
+ inlinedBlock += (b -> newBlock)
+ inlinedBlock(b).varsInScope ++= (b.varsInScope map inlinedLocals)
+ }
+
+ // analyse callee
+ a.run
+
+ // re-emit the instructions before the call
+ block.open
+ block.clear
+ instrBefore.foreach(i => block.emit(i, i.pos))
+
+ // store the arguments into special locals
+ callee.params.reverse.foreach { param =>
+ block.emit(STORE_LOCAL(inlinedLocals(param)), targetPos);
+ }
+ block.emit(STORE_LOCAL(inlinedThis), targetPos);
+
+ // jump to the start block of the callee
+ block.emit(JUMP(inlinedBlock(callee.code.startBlock)), targetPos);
+ block.close
+
+ // duplicate the other blocks in the callee
+ linearizer.linearize(callee).foreach { bb =>
+ var info = a.in(bb);
+ for (i <- bb) {
+ i match {
+ case RETURN(kind) => kind match {
+ case UNIT =>
+ if (!info.stack.types.isEmpty) {
+ info.stack.types foreach { t => inlinedBlock(bb).emit(DROP(t), targetPos); }
+ }
+ case _ =>
+ if (info.stack.length > 1) {
+ inlinedBlock(bb).emit(STORE_LOCAL(retVal), targetPos);
+ info.stack.types.drop(1) foreach { t => inlinedBlock(bb).emit(DROP(t), targetPos); }
+ inlinedBlock(bb).emit(LOAD_LOCAL(retVal), targetPos);
+ }
+ }
+ case _ => ();
+ }
+ inlinedBlock(bb).emit(map(i), targetPos);
+ info = a.interpret(info, i);
+ }
+ inlinedBlock(bb).close
+ }
+
+ instrAfter.foreach(i => afterBlock.emit(i, i.pos));
+ afterBlock.close;
+ count += 1
+
+ // add exception handlers of the callee
+ caller.exh = (callee.exh map translateExh) ::: caller.exh;
+ assert(pending.isEmpty, "Pending NEW elements: " + pending)
+ }
/** The current iclass */
private var currentIClazz: IClass = _
- private def warn(pos: Position, msg: String) = currentIClazz.cunit.warning(pos, msg)
-
- def analyzeClass(cls: IClass): Unit =
- if (settings.inline.value) {
- if (settings.debug.value)
- log("Analyzing " + cls)
- this.currentIClazz = cls
- cls.methods filterNot (_.symbol.isConstructor) foreach analyzeMethod
- }
+ def analyzeClass(cls: IClass): Unit = if (settings.inline.value) {
+ if (settings.debug.value)
+ log("Analyzing " + cls);
+ this.currentIClazz = cls
+ cls.methods filterNot (_.symbol.isConstructor) foreach analyzeMethod
+ }
- val tfa = new analysis.MethodTFA()
- tfa.stat = settings.Ystatistics.value
+ val tfa = new analysis.MethodTFA();
+ tfa.stat = settings.Ystatistics.value
// how many times have we already inlined this method here?
- private val inlinedMethodCount: mutable.Map[Symbol, Int] = new HashMap[Symbol, Int] {
+ private val inlinedMethods: Map[Symbol, Int] = new HashMap[Symbol, Int] {
override def default(k: Symbol) = 0
}
@@ -121,525 +299,272 @@ abstract class Inliners extends SubComponent {
var retry = false
var count = 0
fresh.clear
- inlinedMethodCount.clear
- val caller = new IMethodInfo(m)
- var info: tfa.lattice.Elem = null
-
- def analyzeInc(msym: Symbol, i: Instruction, bb: BasicBlock) = {
- def paramTypes = msym.info.paramTypes
- val receiver = (info.stack.types drop paramTypes.length).head match {
- case REFERENCE(s) => s
- case _ => NoSymbol
- }
- val concreteMethod = lookupImplFor(msym, receiver)
-
- def warnNoInline(reason: String) = {
- if (hasInline(msym) && !caller.isBridge)
- warn(i.pos, "Could not inline required method %s because %s.".format(msym.originalName.decode, reason))
- }
-
- if (shouldLoadImplFor(concreteMethod, receiver))
- icodes.icode(receiver, true)
-
- def isAvailable = icodes available receiver
- def isCandidate = isClosureClass(receiver) || concreteMethod.isEffectivelyFinal || receiver.isFinal
- def isApply = concreteMethod.name == nme.apply
- def isCountable = !(isClosureClass(receiver) && isApply) // only count non-closures
-
- if (settings.debug.value)
- log("Treating " + i
- + "\n\treceiver: " + receiver
- + "\n\ticodes.available: " + isAvailable
- + "\n\tconcreteMethod.isEffectivelyFinal: " + concreteMethod.isEffectivelyFinal)
-
- if (isAvailable && isCandidate) {
- lookupIMethod(concreteMethod, receiver) match {
- case Some(callee) =>
- val inc = new IMethodInfo(callee)
- val pair = new CallerCalleeInfo(caller, inc)
-
- if (pair isStampedForInlining info.stack) {
- retry = true
- if (isCountable)
- count += 1
-
- pair.doInline(bb, i)
- inlinedMethodCount(inc.sym) += 1
-
- /* Remove this method from the cache, as the calls-private relation
- * might have changed after the inlining.
- */
- usesNonPublics -= m
- }
- else {
- if (settings.debug.value)
- pair logFailure info.stack
-
- warnNoInline(pair failureReason info.stack)
- }
- case None =>
- warnNoInline("bytecode was not available")
- if (settings.debug.value)
- log("could not find icode\n\treceiver: " + receiver + "\n\tmethod: " + concreteMethod)
- }
- }
- else warnNoInline(
- if (!isAvailable) "bytecode was not available"
- else "it is not final"
- )
- }
+ inlinedMethods.clear
do {
- retry = false
- if (caller.inline) {
- log("Not inlining into " + caller.sym.originalName.decode + " because it is marked @inline.")
+ retry = false;
+ if (m.symbol.hasAnnotation(ScalaInlineAttr)) {
+ log("Not inlining into " + m.symbol.originalName.decode + " because it is marked @inline.")
}
- else if (caller.hasCode) {
- log("Analyzing " + m + " count " + count + " with " + caller.length + " blocks")
- tfa init m
+ else if (m.code ne null) {
+ log("Analyzing " + m + " count " + count + " with " + m.code.blocks.length + " blocks");
+ tfa.init(m)
tfa.run
- caller.linearized foreach { bb =>
- info = tfa in bb
-
+ for (bb <- linearizer.linearize(m)) {
+ var info = tfa.in(bb);
for (i <- bb) {
if (!retry) {
i match {
- case CALL_METHOD(msym, Dynamic) => analyzeInc(msym, i, bb)
- case _ => ()
+ case CALL_METHOD(msym, Dynamic) =>
+ def warnNoInline(reason: String) = {
+ if (msym.hasAnnotation(ScalaInlineAttr) && !m.symbol.hasFlag(Flags.BRIDGE))
+ currentIClazz.cunit.warning(i.pos,
+ "Could not inline required method %s because %s.".format(msym.originalName.decode, reason))
+ }
+
+ val receiver = info.stack.types.drop(msym.info.paramTypes.length).head match {
+ case REFERENCE(s) => s;
+ case _ => NoSymbol;
+ }
+ var concreteMethod = msym;
+ if (receiver != msym.owner && receiver != NoSymbol) {
+ if (settings.debug.value)
+ log("" + i + " has actual receiver: " + receiver);
+ if (!concreteMethod.isEffectivelyFinal && receiver.isFinal) {
+ concreteMethod = lookupImpl(concreteMethod, receiver)
+ if (settings.debug.value)
+ log("\tlooked up method: " + concreteMethod.fullName)
+ }
+ }
+
+ if (shouldLoad(receiver, concreteMethod)) {
+ icodes.icode(receiver, true)
+ }
+ if (settings.debug.value)
+ log("Treating " + i
+ + "\n\treceiver: " + receiver
+ + "\n\ticodes.available: " + icodes.available(receiver)
+ + "\n\tconcreteMethod.isEffectivelyFinal: " + concreteMethod.isFinal);
+
+ if ( icodes.available(receiver)
+ && (isClosureClass(receiver)
+ || concreteMethod.isEffectivelyFinal
+ || receiver.isFinal)) {
+ icodes.icode(receiver).get.lookupMethod(concreteMethod) match {
+ case Some(inc) =>
+ if (inc.symbol != m.symbol
+ && (inc.code ne null)
+ && shouldInline(m, inc)
+ && isSafeToInline(m, inc, info.stack)) {
+ retry = true;
+ if (!(isClosureClass(receiver) && (concreteMethod.name == nme.apply))) // only count non-closures
+ count = count + 1;
+ inline(m, bb, i, inc);
+ inlinedMethods(inc.symbol) = inlinedMethods(inc.symbol) + 1
+
+ /* Remove this method from the cache, as the calls-private relation
+ might have changed after the inlining. */
+ usesNonPublics -= m;
+ } else {
+ if (settings.debug.value)
+ log("inline failed for " + inc + " because:\n\tinc.symbol != m.symbol: " + (inc.symbol != m.symbol)
+ + "\n\t(inlinedMethods(inc.symbol) < 2): " + (inlinedMethods(inc.symbol) < 2)
+ + "\n\tinc.code ne null: " + (inc.code ne null) + (if (inc.code ne null)
+ "\n\tisSafeToInline(m, inc, info.stack): " + isSafeToInline(m, inc, info.stack)
+ + "\n\tshouldInline heuristics: " + shouldInline(m, inc) else ""));
+ warnNoInline(
+ if (inc.code eq null) "bytecode was unavailable"
+ else if (!isSafeToInline(m, inc, info.stack)) "it is unsafe (target may reference private fields)"
+ else "a bug (run with -Ylog:inline -Ydebug for more information)")
+ }
+ case None =>
+ warnNoInline("bytecode was not available")
+ if (settings.debug.value)
+ log("could not find icode\n\treceiver: " + receiver + "\n\tmethod: " + concreteMethod)
+ }
+ } else
+ warnNoInline(if (icodes.available(receiver)) "it is not final" else "bytecode was not available")
+
+ case _ => ();
}
info = tfa.interpret(info, i)
- }
- }
- }
-
- if (tfa.stat)
- log(m.symbol.fullName + " iterations: " + tfa.iterations + " (size: " + caller.length + ")")
- }
- }
- while (retry && count < MAX_INLINE_RETRY)
-
+ }}}
+ if (tfa.stat) log(m.symbol.fullName + " iterations: " + tfa.iterations + " (size: " + m.code.blocks.length + ")")
+ }} while (retry && count < 15)
m.normalize
}
- private def isMonadicMethod(sym: Symbol) = sym.name match {
- case nme.foreach | nme.filter | nme.map | nme.flatMap => true
- case _ => false
- }
- private def isHigherOrderMethod(sym: Symbol) =
- sym.isMethod && atPhase(currentRun.erasurePhase.prev)(sym.info.paramTypes exists isFunctionType)
- /** Should method 'sym' being called in 'receiver' be loaded from disk? */
- def shouldLoadImplFor(sym: Symbol, receiver: Symbol): Boolean = {
- if (settings.debug.value)
- log("shouldLoadImplFor: " + receiver + "." + sym)
+ def isMonadMethod(method: Symbol): Boolean =
+ (method.name == nme.foreach
+ || method.name == nme.filter
+ || method.name == nme.map
+ || method.name == nme.flatMap)
- def alwaysLoad = (receiver.enclosingPackage == RuntimePackage) || (receiver == PredefModule.moduleClass)
- def loadCondition = sym.isEffectivelyFinal && isMonadicMethod(sym) && isHigherOrderMethod(sym)
-
- hasInline(sym) || alwaysLoad || loadCondition
+ /** Should the given method be loaded from disk? */
+ def shouldLoad(receiver: Symbol, method: Symbol): Boolean = {
+ if (settings.debug.value) log("shouldLoad: " + receiver + "." + method)
+ ((method.isEffectivelyFinal && isMonadMethod(method) && isHigherOrderMethod(method))
+ || (receiver.enclosingPackage == definitions.ScalaRunTimeModule.enclosingPackage)
+ || (receiver == definitions.PredefModule.moduleClass)
+ || (method.hasAnnotation(ScalaInlineAttr)))
}
- /** Look up implementation of method 'sym in 'clazz'.
- */
- def lookupImplFor(sym: Symbol, clazz: Symbol): Symbol = {
- // TODO: verify that clazz.superClass is equivalent here to clazz.tpe.parents(0).typeSymbol (.tpe vs .info)
- def needsLookup = (clazz != NoSymbol) && (clazz != sym.owner) && !sym.isEffectivelyFinal && clazz.isFinal
-
- def lookup(clazz: Symbol): Symbol = {
- // println("\t\tlooking up " + meth + " in " + clazz.fullName + " meth.owner = " + meth.owner)
- if (sym.owner == clazz || isBottomType(clazz)) sym
- else sym.overridingSymbol(clazz) match {
- case NoSymbol => if (sym.owner.isTrait) sym else lookup(clazz.superClass)
- case imp => imp
- }
- }
- if (needsLookup) {
- val concreteMethod = lookup(clazz)
- if (settings.debug.value)
- log("\tlooked up method: " + concreteMethod.fullName)
+ /** Cache whether a method calls private members. */
+ val usesNonPublics: Map[IMethod, NonPublicRefs.Value] = new HashMap;
- concreteMethod
- }
- else sym
- }
-
- class IMethodInfo(val m: IMethod) {
- val sym = m.symbol
- val name = sym.name
- def owner = sym.owner
- def paramTypes = sym.info.paramTypes
- def minimumStack = paramTypes.length + 1
-
- def inline = hasInline(sym)
- def noinline = hasNoInline(sym)
- def numInlined = inlinedMethodCount(sym)
-
- def isBridge = sym.isBridge
- def isInClosure = isClosureClass(owner)
- def isHigherOrder = isHigherOrderMethod(sym)
- def isMonadic = isMonadicMethod(sym)
-
- def handlers = m.exh
- def blocks = m.code.blocks
- def locals = m.locals
- def length = blocks.length
- def openBlocks = blocks filterNot (_.closed)
- def instructions = blocks.flatten
- def linearized = linearizer linearize m
-
- def isSmall = length <= SMALL_METHOD_SIZE
- def isLarge = length > MAX_INLINE_SIZE
- def isRecursive = m.recursive
- def hasCode = m.code != null
- def hasSourceFile = m.sourceFile != null
- def hasHandlers = handlers.nonEmpty
-
- def addLocals(ls: List[Local]) = m.locals ++= ls
- def addLocal(l: Local) = addLocals(List(l))
- def addHandlers(exhs: List[ExceptionHandler]) = m.exh = exhs ::: m.exh
+ object NonPublicRefs extends Enumeration {
+ val Public, Protected, Private = Value
}
- class CallerCalleeInfo(val caller: IMethodInfo, val inc: IMethodInfo) {
- def isLargeSum = caller.length + inc.length - 1 > SMALL_METHOD_SIZE
-
-
- /** Inline 'inc' into 'caller' at the given block and instruction.
- * The instruction must be a CALL_METHOD.
- */
- def doInline(block: BasicBlock, instr: Instruction) {
- val targetPos = instr.pos
- log("Inlining " + inc.m + " in " + caller.m + " at pos: " + posToStr(targetPos))
-
- def blockEmit(i: Instruction) = block.emit(i, targetPos)
- def newLocal(baseName: String, kind: TypeKind) =
- new Local(caller.sym.newVariable(targetPos, freshName(baseName)), kind, false)
-
- val a = new analysis.MethodTFA(inc.m)
-
- /* The exception handlers that are active at the current block. */
- val activeHandlers = caller.handlers filter (_ covered block)
-
- /* Map 'original' blocks to the ones inlined in the caller. */
- val inlinedBlock: mutable.Map[BasicBlock, BasicBlock] = new HashMap
-
- val varsInScope: mutable.Set[Local] = HashSet() ++= block.varsInScope
-
- /** Side effects varsInScope when it sees SCOPE_ENTERs. */
- def instrBeforeFilter(i: Instruction): Boolean = {
- i match { case SCOPE_ENTER(l) => varsInScope += l ; case _ => () }
- i ne instr
- }
- val instrBefore = block.toList takeWhile instrBeforeFilter
- val instrAfter = block.toList drop (instrBefore.length + 1)
-
- assert(!instrAfter.isEmpty, "CALL_METHOD cannot be the last instruction in block!")
-
- // store the '$this' into the special local
- val inlinedThis = newLocal("$inlThis", REFERENCE(ObjectClass))
-
- /** buffer for the returned value */
- val retVal = inc.m.returnType match {
- case UNIT => null
- case x => newLocal("$retVal", x)
- }
-
- val inlinedLocals: mutable.Map[Local, Local] = new HashMap
-
- /** Add a new block in the current context. */
- def newBlock() = {
- val b = caller.m.code.newBlock
- activeHandlers foreach (_ addCoveredBlock b)
- if (retVal ne null) b.varsInScope += retVal
- b.varsInScope += inlinedThis
- b.varsInScope ++= varsInScope
- b
- }
-
- def translateExh(e: ExceptionHandler) = {
- val handler: ExceptionHandler = e.dup
- handler.covered = handler.covered map inlinedBlock
- handler setStartBlock inlinedBlock(e.startBlock)
- handler
- }
-
- /** alfa-rename `l' in caller's context. */
- def dupLocal(l: Local): Local = {
- val sym = caller.sym.newVariable(l.sym.pos, freshName(l.sym.name.toString()))
- // sym.setInfo(l.sym.tpe)
- val dupped = new Local(sym, l.kind, false)
- inlinedLocals(l) = dupped
- dupped
- }
-
- val afterBlock = newBlock()
-
- /** Map from nw.init instructions to their matching NEW call */
- val pending: mutable.Map[Instruction, NEW] = new HashMap
-
- /** Map an instruction from the callee to one suitable for the caller. */
- def map(i: Instruction): Instruction = {
- def assertLocal(l: Local) = {
- assert(caller.locals contains l, "Could not find local '" + l + "' in locals, nor in inlinedLocals: " + inlinedLocals)
- i
- }
- def isInlined(l: Local) = inlinedLocals isDefinedAt l
-
- val newInstr = i match {
- case THIS(clasz) =>
- LOAD_LOCAL(inlinedThis)
-
- case STORE_THIS(_) =>
- STORE_LOCAL(inlinedThis)
-
- case JUMP(whereto) =>
- JUMP(inlinedBlock(whereto))
-
- case CJUMP(success, failure, cond, kind) =>
- CJUMP(inlinedBlock(success), inlinedBlock(failure), cond, kind)
-
- case CZJUMP(success, failure, cond, kind) =>
- CZJUMP(inlinedBlock(success), inlinedBlock(failure), cond, kind)
-
- case SWITCH(tags, labels) =>
- SWITCH(tags, labels map inlinedBlock)
-
- case RETURN(kind) =>
- JUMP(afterBlock)
-
- case LOAD_LOCAL(l) if isInlined(l) =>
- LOAD_LOCAL(inlinedLocals(l))
-
- case STORE_LOCAL(l) if isInlined(l) =>
- STORE_LOCAL(inlinedLocals(l))
-
- case LOAD_LOCAL(l) => assertLocal(l)
- case STORE_LOCAL(l) => assertLocal(l)
-
- case SCOPE_ENTER(l) if isInlined(l) =>
- SCOPE_ENTER(inlinedLocals(l))
-
- case SCOPE_EXIT(l) if isInlined(l) =>
- SCOPE_EXIT(inlinedLocals(l))
-
- case nw @ NEW(sym) =>
- val r = NEW(sym)
- pending(nw.init) = r
- r
-
- case CALL_METHOD(meth, Static(true)) if meth.isClassConstructor =>
- CALL_METHOD(meth, Static(true))
-
- case _ => i.clone()
- }
- // check any pending NEW's
- pending remove i foreach (_.init = newInstr.asInstanceOf[CALL_METHOD])
- newInstr
- }
-
- caller addLocals (inc.locals map dupLocal)
- caller addLocal inlinedThis
-
- if (retVal ne null)
- caller addLocal retVal
-
- inc.blocks foreach { b =>
- inlinedBlock += (b -> newBlock())
- inlinedBlock(b).varsInScope ++= (b.varsInScope map inlinedLocals)
- }
-
- // analyse callee
- a.run
-
- // re-emit the instructions before the call
- block.open
- block.clear
- block emit instrBefore
-
- // store the arguments into special locals
- inc.m.params.reverse foreach (p => blockEmit(STORE_LOCAL(inlinedLocals(p))))
- blockEmit(STORE_LOCAL(inlinedThis))
-
- // jump to the start block of the callee
- blockEmit(JUMP(inlinedBlock(inc.m.code.startBlock)))
- block.close
-
- // duplicate the other blocks in the callee
- linearizer linearize inc.m foreach { bb =>
- var info = a in bb
- def emitInlined(i: Instruction) = inlinedBlock(bb).emit(i, targetPos)
- def emitDrops(toDrop: Int) = info.stack.types drop toDrop foreach (t => emitInlined(DROP(t)))
-
- for (i <- bb) {
- i match {
- case RETURN(UNIT) => emitDrops(0)
- case RETURN(kind) =>
- if (info.stack.length > 1) {
- emitInlined(STORE_LOCAL(retVal))
- emitDrops(1)
- emitInlined(LOAD_LOCAL(retVal))
- }
- case _ => ()
- }
- emitInlined(map(i))
- info = a.interpret(info, i)
- }
- inlinedBlock(bb).close
- }
-
- afterBlock emit instrAfter
- afterBlock.close
- count += 1
-
- // add exception handlers of the callee
- caller addHandlers (inc.handlers map translateExh)
- assert(pending.isEmpty, "Pending NEW elements: " + pending)
- }
-
- def isStampedForInlining(stack: TypeStack) =
- !sameSymbols && inc.hasCode && shouldInline && isSafeToInline(stack)
-
- def logFailure(stack: TypeStack) = log(
- """|inline failed for %s:
- | pair.sameSymbols: %s
- | inc.numInlined < 2: %s
- | inc.hasCode: %s
- | isSafeToInline: %s
- | shouldInline: %s
- """.stripMargin.format(
- inc.m, sameSymbols, inc.numInlined < 2,
- inc.hasCode, isSafeToInline(stack), shouldInline
- )
- )
-
- def failureReason(stack: TypeStack) =
- if (!inc.hasCode) "bytecode was unavailable"
- else if (!isSafeToInline(stack)) "it is unsafe (target may reference private fields)"
- else "of a bug (run with -Ylog:inline -Ydebug for more information)"
-
- def canAccess(level: NonPublicRefs.Value) = level match {
- case Private => caller.owner == inc.owner
- case Protected => caller.owner.tpe <:< inc.owner.tpe
- case Public => true
- }
- private def sameSymbols = caller.sym == inc.sym
-
- /** A method is safe to inline when:
- * - it does not contain calls to private methods when
- * called from another class
- * - it is not inlined into a position with non-empty stack,
- * while having a top-level finalizer (see liftedTry problem)
- * - it is not recursive
- * Note:
- * - synthetic private members are made public in this pass.
- */
- def isSafeToInline(stack: TypeStack): Boolean = {
- def makePublic(f: Symbol): Boolean =
- inc.hasSourceFile && (f.isSynthetic || f.isParamAccessor) && {
- if (settings.debug.value)
- log("Making not-private symbol out of synthetic: " + f)
-
- f setFlag Flags.notPRIVATE
- true
- }
-
- if (!inc.hasCode || inc.isRecursive)
- return false
-
- val accessNeeded = usesNonPublics.getOrElseUpdate(inc.m, {
+ def isRecursive(m: IMethod): Boolean = m.recursive
+
+ /** A method is safe to inline when:
+ * - it does not contain calls to private methods when
+ * called from another class
+ * - it is not inlined into a position with non-empty stack,
+ * while having a top-level finalizer (see liftedTry problem)
+ * - it is not recursive
+ * Note:
+ * - synthetic private members are made public in this pass.
+ */
+ def isSafeToInline(caller: IMethod, callee: IMethod, stack: TypeStack): Boolean = {
+ def makePublic(f: Symbol): Boolean =
+ if ((callee.sourceFile ne null)
+ && (f.hasFlag(Flags.SYNTHETIC | Flags.PARAMACCESSOR))) {
+ if (settings.debug.value) log("Making not-private symbol out of synthetic: " + f)
+ f.setFlag(Flags.notPRIVATE)
+ true
+ } else false
+
+ import NonPublicRefs._
+ var callsNonPublic = Public
+
+ if (callee.recursive) return false
+
+ usesNonPublics.get(callee) match {
+ case Some(b) =>
+ callsNonPublic = b
+ case None =>
// Avoiding crashing the compiler if there are open blocks.
- inc.openBlocks foreach { b =>
- warn(inc.sym.pos,
- "Encountered open block in isSafeToInline: this indicates a bug in the optimizer!\n" +
- " caller = " + caller.m + ", callee = " + inc.m
- )
+ callee.code.blocks filterNot (_.closed) foreach { b =>
+ currentIClazz.cunit.warning(callee.symbol.pos,
+ "Encountered open block in isSafeToInline: this indicates a bug in the optimizer!\n" +
+ " caller = " + caller + ", callee = " + callee
+ )
return false
}
- def check(sym: Symbol, cond: Boolean) =
- if (cond) Private
- else if (sym.isProtected) Protected
- else Public
-
- def checkField(f: Symbol) = check(f, f.isPrivate && !makePublic(f))
- def checkSuper(m: Symbol) = check(m, m.isPrivate || !m.isClassConstructor)
- def checkMethod(m: Symbol) = check(m, m.isPrivate)
-
- def getAccess(i: Instruction) = i match {
- case CALL_METHOD(m, SuperCall(_)) => checkSuper(m)
- case CALL_METHOD(m, _) => checkMethod(m)
- case LOAD_FIELD(f, _) => checkField(f)
- case STORE_FIELD(f, _) => checkField(f)
- case _ => Public
- }
- def iterate(): NonPublicRefs.Value = {
- var seenProtected = false
- inc.instructions foreach { i =>
- getAccess(i) match {
- case Private => return Private
- case Protected => seenProtected = true
- case _ => ()
+ breakable {
+ for (b <- callee.code.blocks; i <- b)
+ i match {
+ case CALL_METHOD(m, style) =>
+ if (m.hasFlag(Flags.PRIVATE) ||
+ (style.isSuper && !m.isClassConstructor)) {
+ callsNonPublic = Private
+ break
+ }
+ if (m.hasFlag(Flags.PROTECTED)) callsNonPublic = Protected
+
+ case LOAD_FIELD(f, _) =>
+ if (f.hasFlag(Flags.PRIVATE) && !makePublic(f)) {
+ callsNonPublic = Private;
+ break
+ }
+ if (f.hasFlag(Flags.PROTECTED)) callsNonPublic = Protected
+
+ case STORE_FIELD(f, _) =>
+ if (f.hasFlag(Flags.PRIVATE) && !makePublic(f)) {
+ callsNonPublic = Private;
+ break
+ }
+ if (f.hasFlag(Flags.PROTECTED)) callsNonPublic = Protected
+
+ case _ => ()
}
- }
- if (seenProtected) Protected else Public
}
- iterate()
- })
+ usesNonPublics += (callee -> callsNonPublic)
+ }
- def isIllegalStack = (stack.length > inc.minimumStack && inc.hasHandlers) || {
- if (settings.debug.value)
- log("method " + inc.sym + " is used on a non-empty stack with finalizer.")
+ if ((callsNonPublic == Private && (caller.symbol.owner != callee.symbol.owner))
+ || callsNonPublic == Protected && !(caller.symbol.owner.tpe <:< callee.symbol.owner.tpe))
+ return false;
- false
- }
+ if (stack.length > (1 + callee.symbol.info.paramTypes.length) &&
+ callee.exh != Nil) {
+ if (settings.debug.value) log("method " + callee.symbol + " is used on a non-empty stack with finalizer.");
+ false
+ } else
+ true
+ }
- canAccess(accessNeeded) && !isIllegalStack
+ private def lookupImpl(meth: Symbol, clazz: Symbol): Symbol = {
+ //println("\t\tlooking up " + meth + " in " + clazz.fullName + " meth.owner = " + meth.owner)
+ if (meth.owner == clazz
+ || clazz == definitions.NullClass
+ || clazz == definitions.NothingClass) meth
+ else {
+ val implementingMethod = meth.overridingSymbol(clazz)
+ if (implementingMethod != NoSymbol)
+ implementingMethod
+ else if (meth.owner.isTrait)
+ meth
+ else
+ lookupImpl(meth, clazz.tpe.parents(0).typeSymbol)
}
-
- /** Decide whether to inline or not. Heuristics:
- * - it's bad to make the caller larger (> SMALL_METHOD_SIZE) if it was small
- * - it's bad to inline large methods
- * - it's good to inline higher order functions
- * - it's good to inline closures functions.
- * - it's bad (useless) to inline inside bridge methods
- */
- private def neverInline = caller.isBridge || !inc.hasCode || inc.noinline
- private def alwaysInline = inc.inline
-
- def shouldInline: Boolean = !neverInline && (alwaysInline || {
- if (settings.debug.value)
- log("shouldInline: " + caller.m + " with " + inc.m)
-
- var score = 0
- if (inc.isSmall)
- score += 1
- if (caller.isSmall && isLargeSum) {
- score -= 1
- if (settings.debug.value)
- log("shouldInline: score decreased to " + score + " because small " + caller + " would become large")
- }
- if (inc.isLarge)
- score -= 1
-
- if (inc.isMonadic)
- score += 2
- else if (inc.isHigherOrder)
- score += 1
- if (inc.isInClosure)
- score += 2
- if (inc.numInlined > 2)
- score -= 2
-
- if (settings.debug.value)
- log("shouldInline(" + inc.m + ") score: " + score)
-
- score > 0
- })
}
- def lookupIMethod(meth: Symbol, receiver: Symbol): Option[IMethod] = {
- def tryParent(sym: Symbol) = icodes icode sym flatMap (_ lookupMethod meth)
-
- receiver.info.baseClasses.iterator map tryParent find (_.isDefined) getOrElse None
- }
+ /** small method size (in blocks) */
+ val SMALL_METHOD_SIZE = 1
+
+ /** Decide whether to inline or not. Heuristics:
+ * - it's bad to make the caller larger (> SMALL_METHOD_SIZE)
+ * if it was small
+ * - it's bad to inline large methods
+ * - it's good to inline higher order functions
+ * - it's good to inline closures functions.
+ * - it's bad (useless) to inline inside bridge methods
+ */
+ def shouldInline(caller: IMethod, callee: IMethod): Boolean = {
+ if (caller.symbol.hasFlag(Flags.BRIDGE)) return false;
+ if (callee.symbol.hasAnnotation(ScalaNoInlineAttr)) return false
+ if (callee.symbol.hasAnnotation(ScalaInlineAttr)) return true
+ if (settings.debug.value)
+ log("shouldInline: " + caller + " with " + callee)
+ var score = 0
+ if (callee.code.blocks.length <= SMALL_METHOD_SIZE) score = score + 1
+ if (caller.code.blocks.length <= SMALL_METHOD_SIZE
+ && ((caller.code.blocks.length + callee.code.blocks.length - 1) > SMALL_METHOD_SIZE)) {
+ score -= 1
+ if (settings.debug.value)
+ log("shouldInline: score decreased to " + score + " because small " + caller + " would become large")
+ }
+ if (callee.code.blocks.length > MAX_INLINE_SIZE)
+ score -= 1
+
+ if (isMonadMethod(callee.symbol))
+ score += 2
+ else if (isHigherOrderMethod(callee.symbol))
+ score += 1
+ if (isClosureClass(callee.symbol.owner))
+ score += 2
+
+ if (inlinedMethods(callee.symbol) > 2) score -= 2
+ if (settings.debug.value) log("shouldInline(" + callee + ") score: " + score)
+ score > 0
+ }
} /* class Inliner */
+
+ /** Is the given class a closure? */
+ def isClosureClass(cls: Symbol): Boolean = {
+ val res = (cls.isFinal && cls.hasFlag(Flags.SYNTHETIC)
+ && !cls.isModuleClass && cls.isAnonymousFunction)
+ res
+ }
+
+ /** Does 'sym' denote a higher order method? */
+ def isHigherOrderMethod(sym: Symbol): Boolean =
+ (sym.isMethod
+ && atPhase(currentRun.erasurePhase.prev)(sym.info.paramTypes exists definitions.isFunctionType))
+
} /* class Inliners */
diff --git a/src/compiler/scala/tools/nsc/doc/DocFactory.scala b/src/compiler/scala/tools/nsc/doc/DocFactory.scala
index 066f800f79..42aab918f7 100644
--- a/src/compiler/scala/tools/nsc/doc/DocFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/DocFactory.scala
@@ -54,7 +54,7 @@ class DocFactory(val reporter: Reporter, val settings: doc.Settings) { processor
val modelFactory = (new model.ModelFactory(compiler, settings) with model.comment.CommentFactory)
val docModel = modelFactory.makeModel
println("model contains " + modelFactory.templatesCount + " documentable templates")
- (new html.HtmlFactory(docModel)).generate
+ (new html.HtmlFactory(docModel)) generate docModel
}
}
diff --git a/src/compiler/scala/tools/nsc/doc/Settings.scala b/src/compiler/scala/tools/nsc/doc/Settings.scala
index b628b3b19c..f3fe8c384b 100644
--- a/src/compiler/scala/tools/nsc/doc/Settings.scala
+++ b/src/compiler/scala/tools/nsc/doc/Settings.scala
@@ -29,8 +29,6 @@ class Settings(error: String => Unit) extends scala.tools.nsc.Settings(error) {
* If needed the sourcepath option can be used to exclude undesired initial part of the link to sources */
val docsourceurl = StringSetting ("-doc-source-url", "url", "The URL prefix where documentation will link to sources", "")
- val useStupidTypes = BooleanSetting ("-Yuse-stupid-types", "Print the types of inherited members as seen from their original definition context. Hint: you don't want to do that!")
-
// working around issue described in r18708.
suppressVTWarn.value = true
diff --git a/src/compiler/scala/tools/nsc/doc/Universe.scala b/src/compiler/scala/tools/nsc/doc/Universe.scala
index 71b4a4a4b0..666a06dc4b 100644
--- a/src/compiler/scala/tools/nsc/doc/Universe.scala
+++ b/src/compiler/scala/tools/nsc/doc/Universe.scala
@@ -5,7 +5,4 @@ package scala.tools.nsc.doc
* @author Pedro Furlanetto
* @author Gilles Dubochet
*/
-trait Universe {
- def settings: Settings
- def rootPackage: model.Package
-}
+class Universe(val settings: Settings, val rootPackage: model.Package)
diff --git a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala b/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
index 94b1f38b65..348dc4b26c 100644
--- a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
@@ -26,7 +26,7 @@ class HtmlFactory(val universe: Universe) {
/** Generates the Scaladoc site for a model into the site root. A scaladoc site is a set of HTML and related files
* that document a model extracted from a compiler run.
* @param model The model to generate in the form of a sequence of packages. */
- def generate : Unit = {
+ def generate(universe: Universe): Unit = {
def copyResource(subPath: String) {
val bytes = new Streamable.Bytes {
diff --git a/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala b/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala
index 014dee3b20..66e2ba2260 100644
--- a/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala
@@ -47,6 +47,7 @@ abstract class HtmlPage { thisPage =>
<head>
<title>{ title }</title>
<meta http-equiv="content-type" content={ "text/html; charset=" + site.encoding }/>
+ <script type="text/javascript" src={ relativeLinkTo{List("jquery.js", "lib")} }></script>
{ headers }
</head>
{ body }
@@ -200,12 +201,6 @@ abstract class HtmlPage { thisPage =>
xml.Text(string)
}
- def typesToHtml(tpess: List[model.TypeEntity], hasLinks: Boolean, sep: NodeSeq): NodeSeq = tpess match {
- case Nil => NodeSeq.Empty
- case tpe :: Nil => typeToHtml(tpe, hasLinks)
- case tpe :: tpes => typeToHtml(tpe, hasLinks) ++ sep ++ typesToHtml(tpes, hasLinks, sep)
- }
-
/** Returns the HTML code that represents the template in `tpl` as a hyperlinked name. */
def templateToHtml(tpl: TemplateEntity) = tpl match {
case dTpl: DocTemplateEntity =>
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Index.scala b/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
index 8675058c24..784a92f1ff 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
@@ -26,18 +26,17 @@ class Index(universe: Universe) extends HtmlPage {
def headers =
<xml:group>
<link href={ relativeLinkTo(List("index.css", "lib")) } media="screen" type="text/css" rel="stylesheet"/>
- <script type="text/javascript" src={ relativeLinkTo{List("jquery.js", "lib")} }></script>
- <script type="text/javascript" src={ relativeLinkTo{List("scheduler.js", "lib")} }></script>
<script type="text/javascript" src={ relativeLinkTo{List("index.js", "lib")} }></script>
+ <script type="text/javascript" src={ relativeLinkTo{List("scheduler.js", "lib")} }></script>
</xml:group>
def body =
<body>
<div id="library">
- <img class='class icon' width="13" height="13" src='lib/class.png'/>
- <img class='trait icon' width="13" height="13" src='lib/trait.png'/>
- <img class='object icon' width="13" height="13" src='lib/object.png'/>
- <img class='package icon' width="13" height="13" src='lib/package.png'/>
+ <img class='class icon' src='lib/class.png'/>
+ <img class='trait icon' src='lib/trait.png'/>
+ <img class='object icon' src='lib/object.png'/>
+ <img class='package icon' src='lib/package.png'/>
</div>
<div id="browser">
<div id="filter"></div>
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala b/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
index 1e35a10e1d..72cfd9c662 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
@@ -23,9 +23,8 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
val headers =
<xml:group>
<link href={ relativeLinkTo(List("template.css", "lib")) } media="screen" type="text/css" rel="stylesheet"/>
- <script type="text/javascript" src={ relativeLinkTo{List("jquery.js", "lib")} }></script>
- <script type="text/javascript" src={ relativeLinkTo{List("tools.tooltip.js", "lib")} }></script>
<script type="text/javascript" src={ relativeLinkTo{List("template.js", "lib")} }></script>
+ <script type="text/javascript" src={ relativeLinkTo{List("tools.tooltip.js", "lib")} }></script>
</xml:group>
val valueMembers =
@@ -59,32 +58,17 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
<div id="template">
<div id="mbrsel">
- <div id='textfilter'><span class='pre'/><input type='text' accesskey='/'/><span class='post'/></div>
- { if (tpl.linearization.isEmpty) NodeSeq.Empty else
- <div id="order">
- <span class="filtertype">Ordering</span>
- <ol><li class="alpha in">Alphabetic</li><li class="inherit out">By inheritance</li></ol>
- </div>
- }
{ if (tpl.linearization.isEmpty) NodeSeq.Empty else
<div id="ancestors">
<span class="filtertype">Inherited</span>
<ol><li class="hideall">Hide All</li><li class="showall">Show all</li></ol>
- <ol id="linearization">{ (tpl :: tpl.linearizationTemplates) map { wte => <li class="in" name={ wte.qualifiedName }>{ wte.name }</li> } }</ol>
+ <ol id="linearization">{ tpl.linearization map { wte => <li class="in" name={ wte.qualifiedName }>{ wte.name }</li> } }</ol>
</div>
}
- {
- <div id="visbl">
- <span class="filtertype">Visibility</span>
- <ol><li class="public in">Public</li><li class="all out">All</li></ol>
- </div>
- }
- {
- <div id="impl">
- <span class="filtertype">Impl.</span>
- <ol><li class="concrete in">Concrete</li><li class="abstract in">Abstract</li></ol>
- </div>
- }
+ <div id="visbl">
+ <span class="filtertype">Visibility</span>
+ <ol><li class="public in">Public</li><li class="all out">All</li></ol>
+ </div>
</div>
{ if (constructors.isEmpty) NodeSeq.Empty else
@@ -95,77 +79,28 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
}
{ if (typeMembers.isEmpty) NodeSeq.Empty else
- <div id="types" class="types members">
+ <div id="types" class="members">
<h3>Type Members</h3>
<ol>{ typeMembers map (memberToHtml(_)) }</ol>
</div>
}
{ if (valueMembers.isEmpty) NodeSeq.Empty else
- <div id="values" class="values members">
+ <div id="values" class="members">
<h3>Value Members</h3>
<ol>{ valueMembers map (memberToHtml(_)) }</ol>
</div>
}
- {
- NodeSeq fromSeq (for ((superTpl, superType) <- tpl.linearization) yield
- <div class="parent" name={ superTpl.qualifiedName }>
- <h3>Inherited from {
- if (tpl.universe.settings.useStupidTypes.value)
- superTpl match {
- case dtpl: DocTemplateEntity =>
- val sig = signature(dtpl, false, true) \ "_"
- sig
- case tpl: TemplateEntity =>
- tpl.name
- }
- else
- typeToHtml(superType, true)
- }</h3>
- </div>
- )
- }
-
</div>
<div id="tooltip" ></div>
</body>
- def boundsToString(hi: Option[TypeEntity], lo: Option[TypeEntity]): String = {
- def bound0(bnd: Option[TypeEntity], pre: String): String = bnd match {
- case None => ""
- case Some(tpe) => pre ++ tpe.toString
- }
- bound0(hi, "<:") ++ bound0(lo, ">:")
- }
-
- def tparamsToString(tpss: List[TypeParam]): String =
- if (tpss.isEmpty) "" else {
- def tparam0(tp: TypeParam): String =
- tp.variance + tp.name + boundsToString(tp.hi, tp.lo)
- def tparams0(tpss: List[TypeParam]): String = (tpss: @unchecked) match {
- case tp :: Nil => tparam0(tp)
- case tp :: tps => tparam0(tp) ++ ", " ++ tparams0(tps)
- }
- "[" + tparams0(tpss) + "]"
- }
-
- def defParamsToString(d: MemberEntity with Def):String = {
- val namess = for( ps <- d.valueParams ) yield
- for( p <- ps ) yield p.resultType.name
- tparamsToString(d.typeParams) + namess.foldLeft("") { (s,names) => s + (names mkString("(",",",")")) }
- }
-
def memberToHtml(mbr: MemberEntity): NodeSeq = {
- val defParamsString = mbr match {
- case d:MemberEntity with Def => defParamsToString(d)
- case _ => ""
- }
- <li name={ mbr.definitionName } visbl={ if (mbr.visibility.isProtected) "prt" else "pub" }
- data-isabs={ mbr.isAbstract.toString }>
- <a id={ mbr.name +defParamsString +":"+ mbr.resultType.name}/>
+ val attributes: List[comment.Body] = Nil
+ <li name={ mbr.definitionName } visbl={ if (mbr.visibility.isProtected) "prt" else "pub" }>
{ signature(mbr, false) }
{ memberToCommentHtml(mbr, false) }
</li>
@@ -270,14 +205,6 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
}
} ++
{ mbr match {
- case dtpl: DocTemplateEntity if (isSelf && !dtpl.linearization.isEmpty) =>
- <div class="block">
- linear super types: { typesToHtml(dtpl.linearizationTypes, hasLinks = true, sep = xml.Text(", ")) }
- </div>
- case _ => NodeSeq.Empty
- }
- } ++
- { mbr match {
case dtpl: DocTemplateEntity if (isSelf && !dtpl.subClasses.isEmpty) =>
<div class="block">
known subclasses: { templatesToHtml(dtpl.subClasses, xml.Text(", ")) }
@@ -286,14 +213,6 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
}
} ++
{ mbr match {
- case dtpl: DocTemplateEntity if (isSelf && !dtpl.selfType.isEmpty) =>
- <div class="block">
- self type: { typeToHtml(dtpl.selfType.get, hasLinks = true) }
- </div>
- case _ => NodeSeq.Empty
- }
- } ++
- { mbr match {
case dtpl: DocTemplateEntity if (isSelf && dtpl.sourceUrl.isDefined) =>
val sourceUrl = tpl.sourceUrl.get
<div class="block">
@@ -360,7 +279,7 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
case None => NodeSeq.Empty
case Some(tpe) => xml.Text(pre) ++ typeToHtml(tpe, hasLinks)
}
- bound0(lo, " >: ") ++ bound0(hi, " <: ")
+ bound0(hi, " <: ") ++ bound0(lo, " >: ")
}
def visibility(mbr: MemberEntity): Option[comment.Paragraph] = {
@@ -385,30 +304,28 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
}
/** name, tparams, params, result */
- def signature(mbr: MemberEntity, isSelf: Boolean, isReduced: Boolean = false): NodeSeq = {
+ def signature(mbr: MemberEntity, isSelf: Boolean): NodeSeq = {
def inside(hasLinks: Boolean): NodeSeq =
<xml:group>
<span class="kind">{ kindToString(mbr) }</span>
<span class="symbol">
- <span class={"name" + (if (mbr.deprecation.isDefined) " deprecated" else "") }>{ if (mbr.isConstructor) tpl.name else mbr.name }</span>
- {
- def tparamsToHtml(mbr: Entity): NodeSeq = mbr match {
- case hk: HigherKinded =>
- val tpss = hk.typeParams
- if (tpss.isEmpty) NodeSeq.Empty else {
- def tparam0(tp: TypeParam): NodeSeq =
- <span name={ tp.name }>{ tp.variance + tp.name }{ tparamsToHtml(tp) }{ boundsToHtml(tp.hi, tp.lo, hasLinks)}</span>
- def tparams0(tpss: List[TypeParam]): NodeSeq = (tpss: @unchecked) match {
- case tp :: Nil => tparam0(tp)
- case tp :: tps => tparam0(tp) ++ Text(", ") ++ tparams0(tps)
- }
- <span class="tparams">[{ tparams0(tpss) }]</span>
+ <span class={"name" + (if (mbr.deprecation.isDefined) " deprecated" else "") }>{ if (mbr.isConstructor) tpl.name else mbr.name }</span>{
+ def tparamsToHtml(tpss: List[TypeParam]): NodeSeq =
+ if (tpss.isEmpty) NodeSeq.Empty else {
+ def tparam0(tp: TypeParam): NodeSeq =
+ <span name={ tp.name }>{ tp.variance + tp.name }{ boundsToHtml(tp.hi, tp.lo, hasLinks)}</span>
+ def tparams0(tpss: List[TypeParam]): NodeSeq = (tpss: @unchecked) match {
+ case tp :: Nil => tparam0(tp)
+ case tp :: tps => tparam0(tp) ++ Text(", ") ++ tparams0(tps)
}
- case _ => NodeSeq.Empty
+ <span class="tparams">[{ tparams0(tpss) }]</span>
+ }
+ mbr match {
+ case trt: Trait => tparamsToHtml(trt.typeParams)
+ case dfe: Def => tparamsToHtml(dfe.typeParams)
+ case _ => NodeSeq.Empty
}
- tparamsToHtml(mbr)
- }
- { if (isReduced) NodeSeq.Empty else {
+ }{
def paramsToHtml(vlsss: List[List[ValueParam]]): NodeSeq = {
def param0(vl: ValueParam): NodeSeq =
// notice the }{ in the next lines, they are necessary to avoid a undesired withspace in output
@@ -436,8 +353,7 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
case dfe: Def => paramsToHtml(dfe.valueParams)
case _ => NodeSeq.Empty
}
- }}
- { if (isReduced) NodeSeq.Empty else {
+ }{
mbr match {
case tpl: DocTemplateEntity if (!tpl.isPackage) =>
tpl.parentType match {
@@ -455,7 +371,7 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
<span class="result"> = { typeToHtml(alt.alias, hasLinks) }</span>
case _ => NodeSeq.Empty
}
- }}
+ }
</span>
</xml:group>
mbr match {
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.png
index 4127dbf3c2..f4cca45dc0 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.png
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psd b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psd
index 4d740f3b17..9fb3991b14 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psd
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psd
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.png
index 942736e44d..1fda869beb 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.png
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psd b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psd
index 63a1ae8349..860833d2b5 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psd
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psd
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css b/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css
index 0d30662da6..fc3f6d4c29 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css
@@ -47,39 +47,30 @@ h1 {
#textfilter {
position: relative;
display: block;
- height: 20px;
- margin-bottom: 5px;
}
-#textfilter > .pre {
+#textfilter:before {
display: block;
- position: absolute;
- top: 0;
- left: 0;
- height: 20px;
- width: 20px;
- background: url("filter_box_left.png");
+ content: url("filter_box_left.png");
}
-#textfilter > .post {
+#textfilter:after {
display: block;
- position: absolute;
- top: 0;
- right: 0;
- height: 20px;
- width: 20px;
- background: url("filter_box_right.png");
+ position: absolute;
+ top: 0;
+ right: 0;
+ content: url("filter_box_right.png");
}
#textfilter input {
display: block;
- position: absolute;
- top: 0;
- right: 20px;
- left: 20px;
- height: 16px;
- width: 246px;
- padding: 2px;
+ position: absolute;
+ top: 0;
+ left: 32px;
+ right: 16px;
+ height: 22px;
+ width: 232px;
+ padding: 5px;
font-weight: bold;
color: #993300;
background-color: white;
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js
index 4d361a5c5a..e7cf484fa2 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js
@@ -95,7 +95,7 @@ function prepareEntityList() {
/* Configures the text filter */
function configureTextFilter() {
scheduler.add("init", function() {
- $("#filter").append("<div id='textfilter'><span class='pre'/><input type='text' accesskey='/'/><span class='post'/></div>");
+ $("#filter").append("<div id='textfilter'><input type='text' accesskey='/'/></div>");
var input = $("#textfilter > input");
resizeFilterBlock();
input.bind("keyup", function(event) {
@@ -106,12 +106,6 @@ function configureTextFilter() {
});
input.focus(function(event) { input.select(); });
});
- scheduler.add("init", function() {
- $("#textfilter > .post").click(function(){
- $("#textfilter > input").attr("value", "");
- textFilter();
- });
- });
}
// Filters all focused templates and packages. This function should be made less-blocking.
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/old.css b/src/compiler/scala/tools/nsc/doc/html/resource/lib/old.css
new file mode 100644
index 0000000000..d3b7ae90ba
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/old.css
@@ -0,0 +1,206 @@
+@import "reset.css";
+
+/* Javadoc style sheet */
+
+/* Define colors, fonts and other style attributes here to override the defaults */
+
+/* Page background color */
+body { background-color: #FFFFFF }
+
+a:link.selfref, a:visited.selfref {
+ color: #555 !important;
+}
+
+a:link, a:visited {
+ font-family: sans-serif;
+ color:#000099;
+ text-decoration: none;
+}
+
+a:active, a:hover {
+ color:#990000;
+ text-decoration: underline;
+}
+
+a.noref {
+ text-decoration: none;
+ cursor:default;
+}
+
+
+
+font.FrameItemFont {
+ font-size:10pt;
+}
+
+a:link.selfref, a:visited.selfref {
+ font-family: sans-serif;
+ color: #555 !important;
+ }
+
+.a td {
+ background: #ddd;
+ color: #000;
+ font-family: sans-serif;
+ }
+
+h1 {
+ font-size: 200%;
+ font-weight: bold;
+ text-align: center;
+}
+
+h2 {
+ font-family: sans-serif;
+ border:solid 1px #999999;
+ padding:10px;
+ background-color:#EEEEEE;
+ color:#990000;
+ font-family:sans-serif;
+ font-weight:bold;
+ /*line-height:2em;*/
+}
+
+h3 {
+ font-family: sans-serif;
+ border: 1px solid #555555;
+ background: #cccccc;
+ color:#990000;
+ padding:0.2em;
+}
+
+table {
+ border: none;
+ border-spacing: 2px;
+}
+
+
+tr.TableHeadingColor td {
+ font-family: sans-serif;
+ font-size: 9pt;
+ border: 1px solid #555555;
+}
+
+tr.TableRowColor td {
+ font-family: sans-serif;
+ font-size: 9pt;
+ border: 1px solid #999999;
+ cell-padding: 2px;
+}
+
+dt {
+ font-family: sans-serif;
+ font-size: 10pt;
+ color:#990000;
+ font-weight: bold;
+}
+
+dd {
+ font-size:10pt;
+ margin-top:4px;
+ margin-bottom:4px;
+}
+
+dd code {
+ color:#333333;
+ font-size:9pt;
+ font-size:normal;
+}
+
+/* Table colors */
+.TableHeadingColor { background: #cccccc; color:#990000}
+.TableSubHeadingColor { background: #bbbbbb; color:black;}
+.TableRowColor { background: #EEEEEE }
+
+/* Font used in left-hand frame lists */
+.FrameTitleFont { font-weight: 900; font-size: normal; font-family: sans-serif; color:#FFFFFF }
+.FrameHeadingFont {
+ font-weight: bold;
+ font-size: normal;
+ font-family: sans-serif;
+ color:#990000;
+ line-height: 2em;
+ border: 1px solid #aaaaaa;
+ padding-left:4px;
+ padding-right:4px;
+ padding-top:2px;
+ padding-bottom:2px;
+}
+.FrameItemFont { font-size: normal; font-family: sans-serif; color:#FFFFFF }
+
+/* Example of smaller, sans-serif font in frames */
+/* .FrameItemFont { font-size: 10pt; font-family: Helvetica, Arial, sans-serif } */
+
+/* Navigation bar fonts and colors */
+
+.NavBar {
+ background-color:#ddd;
+ border:none;
+ padding: 2px;
+ font-family: Arial, Helvetica, sans-serif;
+ font-size: 8pt;
+ color:#000000;
+ text-transform: uppercase;
+}
+.NavBar td {
+ white-space:nowrap;
+}
+td.NavBarCell1 {
+ text-align: left;
+}
+td.NavBarCell2 {
+ text-align: center;
+ font-weight: bold
+}
+td.NavBarCell3 {
+ text-align: right;
+
+}
+
+li, dd {
+ padding-left:2em;
+}
+
+.off {
+ font-weight: normal;
+ font-style: italic;
+}
+
+.isInherited {
+ background-color: #ede1e1;
+}
+
+.isDeprecated td.name {
+ text-decoration: line-through;
+}
+.apiComments {
+ margin-top : .3em;
+ margin-bottom: .5em;
+ margin-left: 2em;
+ font-size: 90%;
+}
+
+#intro .apiComments {
+ margin: 2em;
+}
+
+.detailsBtn {
+ font-size: 80%;
+ margin-left: 2em;
+}
+
+code.signature {
+}
+
+code.signature * {
+ font-family:inherit;
+}
+
+pre.codeAsDoc {
+ border: dashed 1px #909090;
+ padding: 5px;
+}
+
+td.name, td.type, td.remarks {
+ white-space:nowrap;
+}
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/old.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/old.js
new file mode 100644
index 0000000000..a884d9e661
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/old.js
@@ -0,0 +1,126 @@
+var showInherited = true;
+
+var toggleInherited= function() {
+ showInherited = !showInherited;
+ $.cookie('showInherited', showInherited);
+ updateInherited();
+}
+
+var updateInherited = function() {
+ $("input.filter_inherited_cb").each(function(){this.checked = showInherited});
+ if (showInherited) {
+ $("tr.isInherited").show();
+ } else {
+ $("tr.isInherited").hide();
+ }
+}
+
+$(document).ready(function(){
+ parent.document.title=document.title;
+ showInherited = $.cookie('showInherited');
+ updateInherited();
+ $("div.apiCommentsDetails").hide();
+});
+
+var selectPackage = function(name) {
+ if(parent.navFrame) {
+ parent.navFrame.selectPackage(name);
+ }
+}
+
+/**
+ * Cookie plugin
+ *
+ * Copyright (c) 2006 Klaus Hartl (stilbuero.de)
+ * Dual licensed under the MIT and GPL licenses:
+ * http://www.opensource.org/licenses/mit-license.php
+ * http://www.gnu.org/licenses/gpl.html
+ *
+ */
+
+/**
+ * Create a cookie with the given name and value and other optional parameters.
+ *
+ * @example $.cookie('the_cookie', 'the_value');
+ * @desc Set the value of a cookie.
+ * @example $.cookie('the_cookie', 'the_value', { expires: 7, path: '/', domain: 'jquery.com', secure: true });
+ * @desc Create a cookie with all available options.
+ * @example $.cookie('the_cookie', 'the_value');
+ * @desc Create a session cookie.
+ * @example $.cookie('the_cookie', null);
+ * @desc Delete a cookie by passing null as value. Keep in mind that you have to use the same path and domain
+ * used when the cookie was set.
+ *
+ * @param String name The name of the cookie.
+ * @param String value The value of the cookie.
+ * @param Object options An object literal containing key/value pairs to provide optional cookie attributes.
+ * @option Number|Date expires Either an integer specifying the expiration date from now on in days or a Date object.
+ * If a negative value is specified (e.g. a date in the past), the cookie will be deleted.
+ * If set to null or omitted, the cookie will be a session cookie and will not be retained
+ * when the the browser exits.
+ * @option String path The value of the path atribute of the cookie (default: path of page that created the cookie).
+ * @option String domain The value of the domain attribute of the cookie (default: domain of page that created the cookie).
+ * @option Boolean secure If true, the secure attribute of the cookie will be set and the cookie transmission will
+ * require a secure protocol (like HTTPS).
+ * @type undefined
+ *
+ * @name $.cookie
+ * @cat Plugins/Cookie
+ * @author Klaus Hartl/klaus.hartl@stilbuero.de
+ */
+
+/**
+ * Get the value of a cookie with the given name.
+ *
+ * @example $.cookie('the_cookie');
+ * @desc Get the value of a cookie.
+ *
+ * @param String name The name of the cookie.
+ * @return The value of the cookie.
+ * @type String
+ *
+ * @name $.cookie
+ * @cat Plugins/Cookie
+ * @author Klaus Hartl/klaus.hartl@stilbuero.de
+ */
+jQuery.cookie = function(name, value, options) {
+ if (typeof value != 'undefined') { // name and value given, set cookie
+ options = options || {};
+ if (value === null) {
+ value = '';
+ options.expires = -1;
+ }
+ var expires = '';
+ if (options.expires && (typeof options.expires == 'number' || options.expires.toUTCString)) {
+ var date;
+ if (typeof options.expires == 'number') {
+ date = new Date();
+ date.setTime(date.getTime() + (options.expires * 24 * 60 * 60 * 1000));
+ } else {
+ date = options.expires;
+ }
+ expires = '; expires=' + date.toUTCString(); // use expires attribute, max-age is not supported by IE
+ }
+ // CAUTION: Needed to parenthesize options.path and options.domain
+ // in the following expressions, otherwise they evaluate to undefined
+ // in the packed version for some reason...
+ var path = options.path ? '; path=' + (options.path) : '';
+ var domain = options.domain ? '; domain=' + (options.domain) : '';
+ var secure = options.secure ? '; secure' : '';
+ document.cookie = [name, '=', encodeURIComponent(value), expires, path, domain, secure].join('');
+ } else { // only name given, get cookie
+ var cookieValue = null;
+ if (document.cookie && document.cookie != '') {
+ var cookies = document.cookie.split(';');
+ for (var i = 0; i < cookies.length; i++) {
+ var cookie = jQuery.trim(cookies[i]);
+ // Does this cookie string begin with the name we want?
+ if (cookie.substring(0, name.length + 1) == (name + '=')) {
+ cookieValue = decodeURIComponent(cookie.substring(name.length + 1));
+ break;
+ }
+ }
+ }
+ return cookieValue;
+ }
+};
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css
index 8cfd9e9ce1..0c17d9fa2a 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css
@@ -15,6 +15,7 @@ body {
}
a {
+ cursor: pointer;
text-decoration: underline;
color: #69481D; /* brown */
}
@@ -84,7 +85,7 @@ a:hover {
padding-left: 8px;
}
-#template .values > h3 {
+#values > h3 {
color: white;
padding: 4px;
background-color: #7996AC;
@@ -92,7 +93,7 @@ a:hover {
font-weight: bold;
}
-#template .types > h3 {
+#types > h3 {
padding: 4px;
color: white;
font-weight: bold;
@@ -108,18 +109,6 @@ a:hover {
background-color: #333;
}
-#template > div.parent > h3 {
- color: white;
- padding: 4px;
- background-color: #385E1A;
- font-size: 12pt;
- font-weight: bold;
-}
-
-#template > div.parent > h3 * {
- color: white;
-}
-
/* Member cells */
div.members > ol {
@@ -192,11 +181,11 @@ div.members > ol > li {
font-style: italic;
}
-#template .values .signature .name {
+#values .signature .name {
color: blue;
}
-#template .types .signature .name {
+#types .signature .name {
color: green;
}
@@ -310,15 +299,15 @@ p.comment {
p.shortcomment {
display: block;
margin-left: 8.7em;
- cursor: pointer;
+ cursor: help;
}
div.fullcomment {
+ display: block;
margin: 10px 0 10px 0;
}
#template div.fullcomment {
- display:none;
margin: 6px 0 6px 8.7em;
}
@@ -375,46 +364,6 @@ div.fullcomment dl.paramcmts > dd + dt + dd {
/* Members filter tool */
-#textfilter {
- position: relative;
- display: block;
- height: 20px;
- margin-bottom: 5px;
-}
-
-#textfilter > .pre {
- display: block;
- position: absolute;
- top: 0;
- left: 0;
- height: 20px;
- width: 20px;
- background: url("filter_box_left.png");
-}
-
-#textfilter > .post {
- display: block;
- position: absolute;
- top: 0;
- right: 0;
- height: 20px;
- width: 20px;
- background: url("filter_box_right.png");
-}
-
-#textfilter input {
- display: block;
- position: absolute;
- top: 0;
- right: 20px;
- left: 20px;
- height: 16px;
- padding: 2px;
- font-weight: bold;
- color: #993300;
- background-color: white;
-}
-
#mbrsel {
padding: 4px;
background-color: #B78E99; /* grayish pink */
@@ -443,7 +392,7 @@ div.fullcomment dl.paramcmts > dd + dt + dd {
padding: 4px 8px 4px 8px;
background-color: white;
display: inline-block;
- cursor: pointer;
+ cursor: crosshair;
}
#mbrsel > div > ol > li.in {
@@ -464,4 +413,4 @@ div.fullcomment dl.paramcmts > dd + dt + dd {
#mbrsel .showall {
color: #4C4C4C;
font-weight: bold;
-}
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js
index 506edd7746..a13c502ba5 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js
@@ -9,20 +9,6 @@ $(document).ready(function(){
prefilters.removeClass("in");
prefilters.addClass("out");
filter();
-
- var input = $("#textfilter > input");
- input.bind("keyup", function(event) {
- if (event.keyCode == 27) { // escape
- input.attr("value", "");
- }
- filter();
- });
- input.focus(function(event) { input.select(); });
- $("#textfilter > .post").click(function(){
- $("#textfilter > input").attr("value", "");
- filter();
- });
-
$("#ancestors > ol > li").click(function(){
if ($(this).hasClass("in")) {
$(this).removeClass("in");
@@ -61,39 +47,6 @@ $(document).ready(function(){
filter();
};
});
- $("#impl > ol > li.concrete").click(function() {
- if ($(this).hasClass("out")) {
- $(this).removeClass("out").addClass("in");
- $("li[data-isabs='false']").show();
- } else {
- $(this).removeClass("in").addClass("out");
- $("li[data-isabs='false']").hide();
- }
- });
- $("#impl > ol > li.abstract").click(function() {
- if ($(this).hasClass("out")) {
- $(this).removeClass("out").addClass("in");
- $("li[data-isabs='true']").show();
- } else {
- $(this).removeClass("in").addClass("out");
- $("li[data-isabs='true']").hide();
- }
- });
- $("#order > ol > li.alpha").click(function() {
- if ($(this).hasClass("out")) {
- $(this).removeClass("out").addClass("in");
- $("#order > ol > li.inherit").removeClass("in").addClass("out");
- orderAlpha();
- };
- })
- $("#order > ol > li.inherit").click(function() {
- if ($(this).hasClass("out")) {
- $(this).removeClass("out").addClass("in");
- $("#order > ol > li.alpha").removeClass("in").addClass("out");
- orderInherit();
- };
- });
- initInherit();
//http://flowplayer.org/tools/tooltip.html
$(".extype").tooltip({
tip: "#tooltip",
@@ -102,6 +55,7 @@ $(document).ready(function(){
$(this.getTip()).text(this.getTrigger().attr("name"));
}
});
+ $("#template div.fullcomment").hide();
var docAllSigs = $("#template .signature");
function commentShowFct(fullComment){
var vis = $(":visible", fullComment);
@@ -115,7 +69,7 @@ $(document).ready(function(){
var docShowSigs = docAllSigs.filter(function(){
return $("+ div.fullcomment", $(this)).length > 0;
});
- docShowSigs.css("cursor", "pointer");
+ docShowSigs.css("cursor", "help");
docShowSigs.click(function(){
commentShowFct($("+ div.fullcomment", $(this)));
});
@@ -134,7 +88,7 @@ $(document).ready(function(){
var docToggleSigs = docAllSigs.filter(function(){
return $("+ p.shortcomment", $(this)).length > 0;
});
- docToggleSigs.css("cursor", "pointer");
+ docToggleSigs.css("cursor", "help");
docToggleSigs.click(function(){
commentToggleFct($("+ p.shortcomment", $(this)));
});
@@ -143,79 +97,9 @@ $(document).ready(function(){
});
});
-function orderAlpha() {
- $("#template > div.parent").hide();
- $("#ancestors").show();
- filter();
-};
-
-function orderInherit() {
- $("#template > div.parent").show();
- $("#ancestors").hide();
- filter();
-};
-
-/** Prepares the DOM for inheritance-based display. To do so it will:
- * - hide all statically-generated parents headings;
- * - copy all members from the value and type members lists (flat members) to corresponding lists nested below the
- * parent headings (inheritance-grouped members);
- * - initialises a control variable used by the filter method to control whether filtering happens on flat members
- * or on inheritance-grouped members. */
-function initInherit() {
- // parents is a map from fully-qualified names to the DOM node of parent headings.
- var parents = new Object();
- $("#template > div.parent").each(function(){
- parents[$(this).attr("name")] = $(this);
- });
- //
- $("#types > ol > li").each(function(){
- var qualName = $(this).attr("name");
- var owner = qualName.slice(0, qualName.indexOf("#"));
- var name = qualName.slice(qualName.indexOf("#") + 1);
- var parent = parents[owner];
- if (parent != undefined) {
- var types = $("> .types > ol", parent);
- if (types.length == 0) {
- parent.append("<div class='types members'><h3>Type Members</h3><ol></ol></div>");
- types = $("> .types > ol", parent);
- }
- types.append($(this).clone());
- }
- });
- $("#values > ol > li").each(function(){
- var qualName = $(this).attr("name");
- var owner = qualName.slice(0, qualName.indexOf("#"));
- var name = qualName.slice(qualName.indexOf("#") + 1);
- var parent = parents[owner];
- if (parent != undefined) {
- var values = $("> .values > ol", parent);
- if (values.length == 0) {
- parent.append("<div class='values members'><h3>Value Members</h3><ol></ol></div>");
- values = $("> .values > ol", parent);
- }
- values.append($(this).clone());
- }
- });
- $("#template > div.parent").each(function(){
- if ($("> div.members", this).length == 0) { $(this).remove(); };
- });
- $("#template > div.parent").each(function(){
- $(this).hide();
- });
-};
-
function filter() {
- var query = $("#textfilter > input").attr("value").toLowerCase();
- var queryRegExp = new RegExp(query, "i");
- var inheritHides = null
- if ($("#order > ol > li.inherit").hasClass("in")) {
- inheritHides = $("#linearization > li:gt(0)");
- }
- else {
- inheritHides = $("#linearization > li.out");
- }
var outOwners =
- inheritHides.map(function(){
+ $("#mbrsel ol#linearization > li.out").map(function(){
var r = $(this).attr("name");
return r
}).get();
@@ -223,36 +107,24 @@ function filter() {
$(".members > ol > li").each(function(){
var vis1 = $(this).attr("visbl");
var qualName1 = $(this).attr("name");
+ var owner1 = qualName1.slice(0, qualName1.indexOf("#"));
//var name1 = qualName1.slice(qualName1.indexOf("#") + 1);
var showByOwned = true;
- if ($(this).parents(".parent").length == 0) {
- // owner filtering must not happen in "inherited from" member lists
- var owner1 = qualName1.slice(0, qualName1.indexOf("#"));
- for (out in outOwners) {
- if (outOwners[out] == owner1) {
- showByOwned = false;
- };
+ for (out in outOwners) {
+ if (outOwners[out] == owner1) {
+ showByOwned = false;
};
};
- var showByVis = true;
+ var showByVis = true
if (vis1 == "prt") {
showByVis = prtVisbl;
};
- var showByName = true;
- if (query != "") {
- var content = $(this).attr("name") + $("> .fullcomment .cmt", this).text();
- showByName = queryRegExp.test(content);
- };
- if (showByOwned && showByVis && showByName) {
+ if (showByOwned && showByVis) {
$(this).show();
}
else {
$(this).hide();
};
});
- $(".members").each(function(){
- $(this).show();
- if ($(" > ol > li:visible", this).length == 0) { $(this).hide(); }
- });
return false
};
diff --git a/src/compiler/scala/tools/nsc/doc/model/Entity.scala b/src/compiler/scala/tools/nsc/doc/model/Entity.scala
index fc84ed8f6c..b5f12d312d 100644
--- a/src/compiler/scala/tools/nsc/doc/model/Entity.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/Entity.scala
@@ -17,15 +17,13 @@ trait Entity {
def toRoot: List[Entity]
def qualifiedName: String
override def toString = qualifiedName
- def universe: Universe
}
-/** A class, trait, object or package. A package is represented as an instance
- * of the `Package` subclass. A class, trait, object or package may be
- * directly an instance of `WeakTemplateEntity` if it is not ''documentable''
- * (that is, if there is no documentation page for it in the current site),
- * otherwise, it will be represented as an instance of the `TemplateEntity`
- * subclass. */
+
+/** A class, trait, object or package. A package is represented as an instance of the `Package` subclass. A class,
+ * trait, object or package may be directly an instance of `WeakTemplateEntity` if it is not ''documentable'' (that
+ * is, if there is no documentation page for it in the current site), otherwise, it will be represented as an instance
+ * of the `TemplateEntity` subclass. */
trait TemplateEntity extends Entity {
def isPackage: Boolean
def isRootPackage: Boolean
@@ -33,7 +31,6 @@ trait TemplateEntity extends Entity {
def isClass: Boolean
def isObject: Boolean
def isDocTemplate: Boolean
- def selfType : Option[TypeEntity]
}
trait NoDocTemplate extends TemplateEntity
@@ -54,27 +51,22 @@ trait MemberEntity extends Entity {
def isLazyVal: Boolean
def isVar: Boolean
def isImplicit: Boolean
- def isAbstract: Boolean
def isConstructor: Boolean
def isAliasType: Boolean
def isAbstractType: Boolean
def isTemplate: Boolean
}
-trait HigherKinded extends Entity {
- def typeParams: List[TypeParam]
-}
-
-/** A ''documentable'' class, trait or object (that is, a documentation page
- * will be generated for it in the current site). */
+/** A ''documentable'' class, trait or object (that is, a documentation page will be generated for it in the current
+ * site). */
trait DocTemplateEntity extends TemplateEntity with MemberEntity {
def toRoot: List[DocTemplateEntity]
def inSource: Option[(io.AbstractFile, Int)]
def sourceUrl: Option[java.net.URL]
+ def typeParams: List[TypeParam]
def parentType: Option[TypeEntity]
- def linearization: List[(TemplateEntity, TypeEntity)]
- def linearizationTemplates: List[TemplateEntity]
- def linearizationTypes: List[TypeEntity]
+ def parentTemplates: List[TemplateEntity]
+ def linearization: List[TemplateEntity]
def subClasses: List[DocTemplateEntity]
def members: List[MemberEntity]
def templates: List[DocTemplateEntity]
@@ -101,12 +93,12 @@ trait DocTemplateEntity extends TemplateEntity with MemberEntity {
}
/** A ''documentable'' trait. */
-trait Trait extends DocTemplateEntity with HigherKinded {
+trait Trait extends DocTemplateEntity {
def valueParams : List[List[ValueParam]]
}
/** A ''documentable'' class. */
-trait Class extends Trait with HigherKinded {
+trait Class extends Trait {
def primaryConstructor: Option[Constructor]
def constructors: List[Constructor]
def isCaseClass: Boolean
@@ -115,8 +107,7 @@ trait Class extends Trait with HigherKinded {
/** A ''documentable'' object. */
trait Object extends DocTemplateEntity
-/** A package that contains at least one ''documentable'' class, trait,
- * object or package. */
+/** A package that contains at least one ''documentable'' class, trait, object or package. */
trait Package extends Object {
def inTemplate: Package
def toRoot: List[Package]
@@ -131,7 +122,8 @@ trait NonTemplateMemberEntity extends MemberEntity {
}
/** A method (`def`) of a ''documentable'' class, trait or object. */
-trait Def extends NonTemplateMemberEntity with HigherKinded {
+trait Def extends NonTemplateMemberEntity {
+ def typeParams: List[TypeParam]
def valueParams : List[List[ValueParam]]
}
@@ -140,28 +132,30 @@ trait Constructor extends NonTemplateMemberEntity {
def valueParams : List[List[ValueParam]]
}
-/** A value (`val`), lazy val (`lazy val`) or variable (`var`) of a
- * ''documentable'' class, trait or object. */
+/** A value (`val`), lazy val (`lazy val`) or variable (`var`) of a ''documentable'' class, trait or object. */
trait Val extends NonTemplateMemberEntity
/** An abstract type of a ''documentable'' class, trait or object. */
-trait AbstractType extends NonTemplateMemberEntity with HigherKinded {
+trait AbstractType extends NonTemplateMemberEntity {
+ // TODO: typeParams
def lo: Option[TypeEntity]
def hi: Option[TypeEntity]
}
/** An abstract type of a ''documentable'' class, trait or object. */
-trait AliasType extends NonTemplateMemberEntity with HigherKinded {
+trait AliasType extends NonTemplateMemberEntity {
+ // TODO: typeParams
def alias: TypeEntity
}
trait ParameterEntity extends Entity {
+ def inTemplate: DocTemplateEntity
def isTypeParam: Boolean
def isValueParam: Boolean
}
/** A type parameter to a class or trait or to a method. */
-trait TypeParam extends ParameterEntity with HigherKinded {
+trait TypeParam extends ParameterEntity {
def variance: String
def lo: Option[TypeEntity]
def hi: Option[TypeEntity]
@@ -181,26 +175,26 @@ sealed trait Visibility {
}
/** The visibility of `private[this]` members. */
-case class PrivateInInstance() extends Visibility
+case class PrivateInInstance extends Visibility
/** The visibility of `protected[this]` members. */
-case class ProtectedInInstance() extends Visibility {
+case class ProtectedInInstance extends Visibility {
override def isProtected = true
}
-/** The visibility of `private[owner]` members. An unqualified private members
- * is encoded with `owner` equal to the members's `inTemplate`. */
+/** The visibility of `private[owner]` members. An unqualified private members is encoded with `owner` equal to the
+ * members's `inTemplate`. */
case class PrivateInTemplate(owner: TemplateEntity) extends Visibility
-/** The visibility of `protected[owner]` members. An unqualified protected
- * members is encoded with `owner` equal to the members's `inTemplate`.
- * Note that whilst the member is visible in any template owned by `owner`,
- * it is only visible in subclasses of the member's `inTemplate`. */
+/** The visibility of `protected[owner]` members. An unqualified protected members is encoded with `owner` equal to the
+ * members's `inTemplate`.
+ * Note that whilst the member is visible in any template owned by `owner`, it is only visible in subclasses of the
+ * member's `inTemplate`. */
case class ProtectedInTemplate(owner: TemplateEntity) extends Visibility {
override def isProtected = true
}
/** The visibility of public members. */
-case class Public() extends Visibility {
+case class Public extends Visibility {
override def isPublic = true
}
diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
index 87242eebf5..e2a25d7ea4 100644
--- a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
@@ -22,18 +22,13 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { thisFactory
def templatesCount = templatesCache.size - droppedPackages
private var modelFinished = false
- private var universe: Universe = null
/** */
def makeModel: Universe = {
- val universe = new Universe { thisUniverse =>
- thisFactory.universe = thisUniverse
- val settings = thisFactory.settings
- val rootPackage =
- makeRootPackage getOrElse { throw new Error("no documentable class found in compilation units") }
- }
+ val rootPackage =
+ makeRootPackage getOrElse { throw new Error("no documentable class found in compilation units") }
+ val universe = new Universe(settings, rootPackage)
modelFinished = true
- thisFactory.universe = null
universe
}
@@ -54,23 +49,20 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { thisFactory
/** Provides a default implementation for instances of the `Entity` type. */
abstract class EntityImpl(val sym: Symbol, inTpl: => TemplateImpl) extends Entity {
val name = optimize(sym.nameString)
- def inTemplate: TemplateImpl = inTpl
+ def inTemplate = inTpl
def toRoot: List[EntityImpl] = this :: inTpl.toRoot
def qualifiedName = name
- val universe = thisFactory.universe
}
/** Provides a default implementation for instances of the `WeakTemplateEntity` type. It must be instantiated as a
* `SymbolicEntity` to access the compiler symbol that underlies the entity. */
trait TemplateImpl extends EntityImpl with TemplateEntity {
- override def qualifiedName: String =
- if (inTemplate.isRootPackage) name else optimize(inTemplate.qualifiedName + "." + name)
+ override def qualifiedName = if (inTemplate.isRootPackage) name else optimize(inTemplate.qualifiedName + "." + name)
def isPackage = sym.isPackage
def isTrait = sym.isTrait
def isClass = sym.isClass && !sym.isTrait
def isObject = sym.isModule && !sym.isPackage
def isRootPackage = false
- def selfType = if (sym.thisSym eq sym) None else Some(makeType(sym.thisSym.typeOfThis, this))
}
/** Provides a default implementation for instances of the `WeakTemplateEntity` type. It must be instantiated as a
@@ -125,14 +117,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { thisFactory
def inheritedFrom =
if (inTemplate.sym == this.sym.owner || inTemplate.sym.isPackage) Nil else
makeTemplate(this.sym.owner) :: (sym.allOverriddenSymbols map { os => makeTemplate(os.owner) })
- def resultType = {
- def resultTpe(tpe: Type): Type = tpe match { // similar to finalResultType, except that it leaves singleton types alone
- case PolyType(_, res) => resultTpe(res)
- case MethodType(_, res) => resultTpe(res)
- case _ => tpe
- }
- makeType(resultTpe(sym.tpe), inTemplate, sym)
- }
+ def resultType = makeType(sym.tpe.finalResultType, inTemplate, sym)
def isDef = false
def isVal = false
def isLazyVal = false
@@ -141,9 +126,6 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { thisFactory
def isConstructor = false
def isAliasType = false
def isAbstractType = false
- def isAbstract =
- ((!sym.isTrait && ((sym hasFlag Flags.ABSTRACT) || (sym hasFlag Flags.DEFERRED))) ||
- sym.isAbstractClass || sym.isAbstractType) && !sym.isSynthetic
def isTemplate = false
}
@@ -155,7 +137,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { thisFactory
* * The owner of the template (as a full template);
* * All ancestors of the template (as weak templates);
* * All non-package members (including other templates, as full templates). */
- abstract class DocTemplateImpl(sym: Symbol, inTpl: => DocTemplateImpl) extends MemberImpl(sym, inTpl) with TemplateImpl with HigherKindedImpl with DocTemplateEntity {
+ abstract class DocTemplateImpl(sym: Symbol, inTpl: => DocTemplateImpl) extends MemberImpl(sym, inTpl) with TemplateImpl with DocTemplateEntity {
//if (inTpl != null) println("mbr " + sym + " in " + (inTpl.toRoot map (_.sym)).mkString(" > "))
templatesCache += (sym -> this)
lazy val definitionName = optimize(inDefinitionTemplates.head.qualifiedName + "." + name)
@@ -173,27 +155,22 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { thisFactory
}
else None
}
- def parentType = {
- if (sym.isPackage) None else {
- val tps =
- (sym.tpe.parents filter (_ != ScalaObjectClass.tpe)) map { _.asSeenFrom(sym.thisType, sym) }
- Some(makeType(RefinedType(tps, EmptyScope), inTpl))
- }
- }
- val linearization: List[(TemplateEntity, TypeEntity)] = {
- val acs = sym.ancestors filter { _ != ScalaObjectClass }
- val tps = acs map { cls => makeType(sym.info.baseType(cls), this) }
- val tpls = acs map { makeTemplate(_) }
- tpls map {
+ def typeParams = if (sym.isClass) sym.typeParams map (makeTypeParam(_, this)) else Nil
+ def parentTemplates = sym.info.parents map { x: Type => makeTemplate(x.typeSymbol) }
+ def parentType =
+ if (sym.isPackage) None else
+ Some(makeType(RefinedType(sym.tpe.parents filter (_ != ScalaObjectClass.tpe), EmptyScope)))
+ val linearization = {
+ sym.info.parents map { prt =>
+ makeTemplate(prt.typeSymbol) match {
case dtpl: DocTemplateImpl => dtpl.registerSubClass(this)
case _ =>
+ }
}
- tpls zip tps
+ sym.ancestors filter (_ != ScalaObjectClass) map { makeTemplate(_) }
}
- def linearizationTemplates = linearization map { _._1 }
- def linearizationTypes = linearization map { _._2 }
private lazy val subClassesCache = mutable.Buffer.empty[DocTemplateEntity]
- def registerSubClass(sc: DocTemplateEntity): Unit = {
+ def registerSubClass(sc: DocTemplateEntity) = {
assert(subClassesCache != null)
subClassesCache += sc
}
@@ -236,28 +213,21 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { thisFactory
}}
}
- abstract class ParameterImpl(sym: Symbol, inTpl: => TemplateImpl) extends EntityImpl(sym, inTpl) with ParameterEntity {
+ abstract class ParameterImpl(sym: Symbol, inTpl: => DocTemplateImpl) extends EntityImpl(sym, inTpl) with ParameterEntity {
override def inTemplate = inTpl
}
private trait TypeBoundsImpl extends EntityImpl {
- def lo = sym.info.bounds match {
- case TypeBounds(lo, hi) if lo.typeSymbol != NothingClass =>
- Some(makeType(appliedType(lo, sym.info.typeParams map {_.tpe}), inTemplate))
+ def lo = sym.info.normalize match {
+ case TypeBounds(lo, hi) if lo.typeSymbol != NothingClass => Some(makeType(lo, inTemplate, sym))
case _ => None
}
- def hi = sym.info.bounds match {
- case TypeBounds(lo, hi) if hi.typeSymbol != AnyClass =>
- Some(makeType(appliedType(hi, sym.info.typeParams map {_.tpe}), inTemplate))
+ def hi = sym.info.normalize match {
+ case TypeBounds(lo, hi) if hi.typeSymbol != AnyClass => Some(makeType(hi, inTemplate, sym))
case _ => None
}
}
- trait HigherKindedImpl extends EntityImpl with HigherKinded {
- def typeParams =
- sym.typeParams map (makeTypeParam(_, inTemplate))
- }
-
/* ============== MAKER METHODS ============== */
/** */
@@ -372,8 +342,10 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { thisFactory
override def isVar = true
})
else if (bSym.isMethod && !bSym.isGetterOrSetter && !bSym.isConstructor && !bSym.isModule)
- Some(new NonTemplateParamMemberImpl(bSym, inTpl) with HigherKindedImpl with Def {
+ Some(new NonTemplateParamMemberImpl(bSym, inTpl) with Def {
override def isDef = true
+ def typeParams =
+ sym.tpe.typeParams map (makeTypeParam(_, inTpl))
})
else if (bSym.isConstructor)
Some(new NonTemplateParamMemberImpl(bSym, inTpl) with Constructor {
@@ -385,13 +357,13 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { thisFactory
override def isVal = true
})
else if (bSym.isAbstractType)
- Some(new NonTemplateMemberImpl(bSym, inTpl) with TypeBoundsImpl with HigherKindedImpl with AbstractType {
+ Some(new NonTemplateMemberImpl(bSym, inTpl) with TypeBoundsImpl with AbstractType {
override def isAbstractType = true
})
else if (bSym.isAliasType)
- Some(new NonTemplateMemberImpl(bSym, inTpl) with HigherKindedImpl with AliasType {
+ Some(new NonTemplateMemberImpl(bSym, inTpl) with AliasType {
override def isAliasType = true
- def alias = makeType(appliedType(sym.tpe, sym.info.typeParams map {_.tpe}).normalize, inTpl, sym)
+ def alias = makeType(sym.tpe, inTpl, sym)
})
else if (bSym.isPackage)
inTpl match { case inPkg: PackageImpl => makePackage(bSym, inPkg) }
@@ -413,8 +385,8 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { thisFactory
}
/** */
- def makeTypeParam(aSym: Symbol, inTpl: => TemplateImpl): TypeParam =
- new ParameterImpl(aSym, inTpl) with TypeBoundsImpl with HigherKindedImpl with TypeParam {
+ def makeTypeParam(aSym: Symbol, inTpl: => DocTemplateImpl): TypeParam = {
+ new ParameterImpl(aSym, inTpl) with TypeBoundsImpl with TypeParam {
def isTypeParam = true
def isValueParam = false
def variance: String = {
@@ -423,6 +395,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { thisFactory
else ""
}
}
+ }
/** */
def makeValueParam(aSym: Symbol, inTpl: => DocTemplateImpl): ValueParam = {
@@ -453,21 +426,14 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { thisFactory
}
/** */
- def makeType(aType: Type, inTpl: => TemplateImpl, dclSym: Symbol): TypeEntity = {
+ def makeType(aType: Type, seeInTpl: => TemplateImpl, dclSym: Symbol): TypeEntity = {
def ownerTpl(sym: Symbol): Symbol =
if (sym.isClass || sym.isModule || sym == NoSymbol) sym else ownerTpl(sym.owner)
- val tpe =
- if (thisFactory.settings.useStupidTypes.value) aType else {
- def ownerTpl(sym: Symbol): Symbol =
- if (sym.isClass || sym.isModule || sym == NoSymbol) sym else ownerTpl(sym.owner)
- val fixedSym = if (inTpl.sym.isModule) inTpl.sym.moduleClass else inTpl.sym
- aType.asSeenFrom(fixedSym.thisType, ownerTpl(dclSym))
- }
- makeType(tpe, inTpl)
+ makeType(aType.asSeenFrom(seeInTpl.sym.thisType, ownerTpl(dclSym)))
}
/** */
- def makeType(aType: Type, inTpl: => TemplateImpl): TypeEntity =
+ def makeType(aType: Type): TypeEntity =
new TypeEntity {
private val nameBuffer = new StringBuilder
private var refBuffer = new immutable.TreeMap[Int, (TemplateEntity, Int)]
@@ -480,14 +446,9 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { thisFactory
nameBuffer append sep
appendTypes0(tps, sep)
}
- private def checkFunctionType(tpe: TypeRef): Boolean = {
- val TypeRef(_, sym, args) = tpe
- (args.length > 0) && (args.length - 1 <= definitions.MaxFunctionArity) &&
- (sym == definitions.FunctionClass(args.length - 1))
- }
- private def appendType0(tpe: Type): Unit = tpe match {
+ private def appendType0(tpe: Type): Unit = tpe.normalize match {
/* Type refs */
- case tp: TypeRef if (checkFunctionType(tp)) =>
+ case tp: TypeRef if (definitions.isFunctionType(tp)) =>
nameBuffer append '('
appendTypes0(tp.args.init, ", ")
nameBuffer append ") ⇒ "
@@ -525,7 +486,11 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { thisFactory
}
/* Polymorphic types */
case PolyType(tparams, result) if (!tparams.isEmpty) =>
- throw new Error("Polymorphic type '" + tpe + "' cannot be printed as a type")
+ appendType0(result)
+ nameBuffer append '['
+ appendTypes0(tparams map (_.tpe), ", ") // TODO: actually print the polytype's symbols (not just types)
+ nameBuffer append ']'
+ /* Eval-by-name types */
case PolyType(tparams, result) if (tparams.isEmpty) =>
nameBuffer append '⇒'
appendType0(result)
diff --git a/src/compiler/scala/tools/nsc/doc/model/comment/Comment.scala b/src/compiler/scala/tools/nsc/doc/model/comment/Comment.scala
index 2a463959e5..7fe2e58991 100644
--- a/src/compiler/scala/tools/nsc/doc/model/comment/Comment.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/comment/Comment.scala
@@ -69,3 +69,4 @@ abstract class Comment {
(version map ("@version " + _.toString)).mkString
}
+
diff --git a/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala b/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala
index 6fe1fe06a4..475fbf584e 100644
--- a/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala
@@ -71,29 +71,14 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
case "/h3" => " ===\n"
case "h4" | "h5" | "h6" => "\n==== "
case "/h4" | "/h5" | "/h6" => " ====\n"
+ case "code" | "/code" => "`"
case "li" => "\n * - "
case _ => ""
}
- /** Javadoc tags that should be replaced by something useful, such as wiki syntax, or that should be dropped. */
- protected val JavadocTags =
- new Regex("""\{\@(code|docRoot|inheritDoc|link|linkplain|literal|value)([^}]*)\}""")
-
- /** Maps a javadoc tag to a useful wiki replacement, or an empty string if it cannot be salvaged. */
- protected def javadocReplacement(mtch: Regex.Match): String = mtch.group(1) match {
- case "code" => "`" + mtch.group(2) + "`"
- case "docRoot" => ""
- case "inheritDoc" => ""
- case "link" => "`" + mtch.group(2) + "`"
- case "linkplain" => "`" + mtch.group(2) + "`"
- case "literal" => mtch.group(2)
- case "value" => "`" + mtch.group(2) + "`"
- case _ => ""
- }
-
/** Safe HTML tags that can be kept. */
protected val SafeTags =
- new Regex("""((<code( [^>]*)?>.*</code>)|(</?(abbr|acronym|address|area|a|bdo|big|blockquote|br|button|b|caption|cite|col|colgroup|dd|del|dfn|em|fieldset|form|hr|img|input|ins|i|kbd|label|legend|link|map|object|optgroup|option|param|pre|q|samp|select|small|span|strong|sub|sup|table|tbody|td|textarea|tfoot|th|thead|tr|tt|var)( [^>]*)?/?>))""")
+ new Regex("""(</?(abbr|acronym|address|area|a|bdo|big|blockquote|br|button|b|caption|code|cite|col|colgroup|dd|del|dfn|em|fieldset|form|hr|img|input|ins|i|kbd|label|legend|link|map|object|optgroup|option|param|pre|q|samp|select|small|span|strong|sub|sup|table|tbody|td|textarea|tfoot|th|thead|tr|tt|var)( [^>]*)?/?>)""")
protected val safeTagMarker = '\u000E'
@@ -140,9 +125,8 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
}
val strippedComment = comment.trim.stripPrefix("/*").stripSuffix("*/")
val safeComment = DangerousTags.replaceAllIn(strippedComment, { htmlReplacement(_) })
- val javadoclessComment = JavadocTags.replaceAllIn(safeComment, { javadocReplacement(_) })
val markedTagComment =
- SafeTags.replaceAllIn(javadoclessComment, { mtch =>
+ SafeTags.replaceAllIn(safeComment, { mtch =>
java.util.regex.Matcher.quoteReplacement(safeTagMarker + mtch.matched + safeTagMarker)
})
markedTagComment.lines.toList map (cleanLine(_))
diff --git a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala b/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
index c6106a9865..26f7fb1115 100644
--- a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
+++ b/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
@@ -134,9 +134,6 @@ trait CompilerControl { self: Global =>
/** Tell the compile server to shutdown, and do not restart again */
def askShutdown() = scheduler raise ShutdownReq
- /** Ask for a computation to be done quickly on the presentation compiler thread */
- def ask[A](op: () => A): A = scheduler doQuickly op
-
// ---------------- Interpreted exceptions -------------------
object CancelActionReq extends ControlThrowable
diff --git a/src/compiler/scala/tools/nsc/interactive/Global.scala b/src/compiler/scala/tools/nsc/interactive/Global.scala
index cb2f4a0340..2c174860e4 100644
--- a/src/compiler/scala/tools/nsc/interactive/Global.scala
+++ b/src/compiler/scala/tools/nsc/interactive/Global.scala
@@ -110,16 +110,10 @@ self =>
// ----------------- Polling ---------------------------------------
/** Called from runner thread and signalDone:
- * Poll for interrupts and execute them immediately.
- * Then, poll for exceptions and execute them.
- * Then, poll for work reload/typedTreeAt/doFirst commands during background checking.
+ * Poll for exceptions.
+ * Poll for work reload/typedTreeAt/doFirst commands during background checking.
*/
def pollForWork() {
- scheduler.pollInterrupt() match {
- case Some(ir) =>
- ir.execute(); pollForWork()
- case _ =>
- }
scheduler.pollThrowable() match {
case Some(ex @ CancelActionReq) => if (acting) throw ex
case Some(ex @ FreshRunReq) =>
diff --git a/src/compiler/scala/tools/nsc/interpreter/Completion.scala b/src/compiler/scala/tools/nsc/interpreter/Completion.scala
index 22a95a4bf8..58ce85f1f6 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Completion.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/Completion.scala
@@ -100,12 +100,10 @@ class Completion(val repl: Interpreter) extends CompletionOutput {
def imported(tp: Type) = new ImportCompletion(tp)
}
- class TypeMemberCompletion(val tp: Type) extends CompletionAware
- with CompilerCompletion {
+ class TypeMemberCompletion(val tp: Type) extends CompletionAware with CompilerCompletion {
def excludeEndsWith: List[String] = Nil
def excludeStartsWith: List[String] = List("<") // <byname>, <repeated>, etc.
- def excludeNames: List[String] =
- anyref.methodNames.filterNot(anyRefMethodsToShow contains) ++ List("_root_")
+ def excludeNames: List[String] = anyref.methodNames -- anyRefMethodsToShow ++ List("_root_")
def methodSignatureString(sym: Symbol) = {
def asString = new MethodSymbolOutput(sym).methodString()
@@ -300,8 +298,7 @@ class Completion(val repl: Interpreter) extends CompletionOutput {
private var lastCursor: Int = -1
// Does this represent two consecutive tabs?
- def isConsecutiveTabs(buf: String, cursor: Int) =
- cursor == lastCursor && buf == lastBuf
+ def isConsecutiveTabs(buf: String, cursor: Int) = cursor == lastCursor && buf == lastBuf
// Longest common prefix
def commonPrefix(xs: List[String]) =
@@ -312,7 +309,7 @@ class Completion(val repl: Interpreter) extends CompletionOutput {
override def complete(_buf: String, cursor: Int, candidates: JList[String]): Int = {
val buf = onull(_buf)
verbosity = if (isConsecutiveTabs(buf, cursor)) verbosity + 1 else 0
- DBG("\ncomplete(%s, %d) last = (%s, %d), verbosity: %s".format(buf, cursor, lastBuf, lastCursor, verbosity))
+ DBG("complete(%s, %d) last = (%s, %d), verbosity: %s".format(buf, cursor, lastBuf, lastCursor, verbosity))
// we don't try lower priority completions unless higher ones return no results.
def tryCompletion(p: Parsed, completionFunction: Parsed => List[String]): Option[Int] = {
@@ -346,21 +343,7 @@ class Completion(val repl: Interpreter) extends CompletionOutput {
def regularCompletion = tryCompletion(mkDotted, topLevelFor)
def fileCompletion = tryCompletion(mkUndelimited, FileCompletion completionsFor _.buffer)
- /** This is the kickoff point for all manner of theoretically possible compiler
- * unhappiness - fault may be here or elsewhere, but we don't want to crash the
- * repl regardless. Hopefully catching Exception is enough, but because the
- * compiler still throws some Errors it may not be.
- */
- try {
- (lastResultCompletion orElse regularCompletion orElse fileCompletion) getOrElse cursor
- }
- catch {
- case ex: Exception =>
- DBG("Error: complete(%s, %s, _) provoked %s".format(_buf, cursor, ex))
- candidates add " "
- candidates add "<completion error>"
- cursor
- }
+ (lastResultCompletion orElse regularCompletion orElse fileCompletion) getOrElse cursor
}
}
}
diff --git a/src/compiler/scala/tools/nsc/io/AbstractFile.scala b/src/compiler/scala/tools/nsc/io/AbstractFile.scala
index 4d86dbf114..5d4f7b8464 100644
--- a/src/compiler/scala/tools/nsc/io/AbstractFile.scala
+++ b/src/compiler/scala/tools/nsc/io/AbstractFile.scala
@@ -17,7 +17,8 @@ import scala.collection.mutable.ArrayBuffer
* @author Philippe Altherr
* @version 1.0, 23/03/2004
*/
-object AbstractFile {
+object AbstractFile
+{
/** Returns "getFile(new File(path))". */
def getFile(path: String): AbstractFile = getFile(Path(path))
def getFile(path: Path): AbstractFile = getFile(path.toFile)
diff --git a/src/compiler/scala/tools/nsc/io/File.scala b/src/compiler/scala/tools/nsc/io/File.scala
index e9741ed5cb..d8e1410375 100644
--- a/src/compiler/scala/tools/nsc/io/File.scala
+++ b/src/compiler/scala/tools/nsc/io/File.scala
@@ -33,18 +33,13 @@ object File {
// this is a workaround for http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6503430
// we are using a static initializer to statically initialize a java class so we don't
- // trigger java.lang.InternalErrors later when using it concurrently. We ignore all
- // the exceptions so as not to cause spurious failures when no write access is available,
- // e.g. google app engine.
- try {
+ // trigger java.lang.InternalErrors later when using it concurrently.
+ {
val tmp = JFile.createTempFile("bug6503430", null, null)
val in = new FileInputStream(tmp).getChannel()
val out = new FileOutputStream(tmp, true).getChannel()
out.transferFrom(in, 0, 0)
- tmp.delete()
- }
- catch {
- case _: IllegalArgumentException | _: IllegalStateException | _: IOException | _: SecurityException => ()
+ ()
}
}
import File._
diff --git a/src/compiler/scala/tools/nsc/io/NullPrintStream.scala b/src/compiler/scala/tools/nsc/io/NullPrintStream.scala
deleted file mode 100644
index 9340796a83..0000000000
--- a/src/compiler/scala/tools/nsc/io/NullPrintStream.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2010 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package io
-
-import java.io.{ PrintStream, ByteArrayOutputStream }
-
-/** A sink for when you want to discard all output.
- */
-class NullPrintStream extends PrintStream(new ByteArrayOutputStream()) { }
-
-object NullPrintStream extends NullPrintStream {
- def setOut() = Console setOut this
- def setErr() = Console setErr this
- def setOutAndErr() = { setOut() ; setErr() }
-}
diff --git a/src/compiler/scala/tools/nsc/io/Path.scala b/src/compiler/scala/tools/nsc/io/Path.scala
index 4373cc64a5..afef38be3c 100644
--- a/src/compiler/scala/tools/nsc/io/Path.scala
+++ b/src/compiler/scala/tools/nsc/io/Path.scala
@@ -26,14 +26,15 @@ import scala.util.Random.alphanumeric
* @since 2.8
*/
-object Path {
+object Path
+{
private val ZipMagicNumber = List[Byte](80, 75, 3, 4)
- private def magicNumberIsZip(f: Path) = f.isFile && (f.toFile.bytes().take(4).toList == ZipMagicNumber)
/** If examineFile is true, it will look at the first four bytes of the file
* and see if the magic number indicates it may be a jar or zip.
*/
- def isJarOrZip(f: Path): Boolean = isJarOrZip(f, true)
+ private def magicNumberIsZip(f: Path) = f.isFile && (f.toFile.bytes().take(4).toList == ZipMagicNumber)
+ def isJarOrZip(f: Path): Boolean = isJarOrZip(f, false)
def isJarOrZip(f: Path, examineFile: Boolean): Boolean =
f.hasExtension("zip", "jar") || (examineFile && magicNumberIsZip(f))
diff --git a/src/compiler/scala/tools/nsc/io/ZipArchive.scala b/src/compiler/scala/tools/nsc/io/ZipArchive.scala
index e65e0040c0..4be11fc9a8 100644
--- a/src/compiler/scala/tools/nsc/io/ZipArchive.scala
+++ b/src/compiler/scala/tools/nsc/io/ZipArchive.scala
@@ -15,13 +15,13 @@ import PartialFunction._
import scala.collection.mutable.{ Map, HashMap }
import scala.collection.JavaConversions.asIterator
-import annotation.tailrec
/**
* @author Philippe Altherr
* @version 1.0, 23/03/2004
*/
-object ZipArchive {
+object ZipArchive
+{
def fromPath(path: Path): ZipArchive = fromFile(path.toFile)
/**
@@ -48,24 +48,15 @@ object ZipArchive {
def fromURL(url: URL): AbstractFile = new URLZipArchive(url)
private[io] class ZipEntryTraversableClass(in: InputStream) extends Traversable[ZipEntry] {
- val zis = () => new ZipInputStream(in)
+ val zis = new ZipInputStream(in)
def foreach[U](f: ZipEntry => U) = {
- var in: ZipInputStream = null
- @tailrec def loop(): Unit = {
- val entry = in.getNextEntry()
- if (entry != null) {
- f(entry)
- in.closeEntry()
- loop()
- }
- }
-
- try {
- in = zis()
- loop()
+ def loop(x: ZipEntry): Unit = if (x != null) {
+ f(x)
+ zis.closeEntry()
+ loop(zis.getNextEntry())
}
- finally in.close()
+ loop(zis.getNextEntry())
}
}
}
@@ -79,7 +70,7 @@ private[io] trait ZipContainer extends AbstractFile
/** Abstract types */
type SourceType // InputStream or AbstractFile
type CreationType // InputStream or ZipFile
- type ZipTrav = Traversable[ZipEntry] { def zis: () => ZipInputStream }
+ type ZipTrav = Traversable[ZipEntry] { def zis: ZipInputStream }
/** Abstract values */
protected val creationSource: CreationType
@@ -194,7 +185,8 @@ private[io] trait ZipContainer extends AbstractFile
* @author Philippe Altherr
* @version 1.0, 23/03/2004
*/
-final class ZipArchive(file: File, val archive: ZipFile) extends PlainFile(file) with ZipContainer {
+final class ZipArchive(file: File, val archive: ZipFile) extends PlainFile(file) with ZipContainer
+{
self =>
type SourceType = AbstractFile
@@ -244,7 +236,7 @@ final class ZipArchive(file: File, val archive: ZipFile) extends PlainFile(file)
private def zipTraversableFromZipFile(z: ZipFile): ZipTrav =
new Iterable[ZipEntry] {
- def zis: () => ZipInputStream = null // not valid for this type
+ def zis: ZipInputStream = null // not valid for this type
def iterator = asIterator(z.entries())
}
}
@@ -262,7 +254,7 @@ final class URLZipArchive(url: URL) extends AbstractFile with ZipContainer
type CreationType = InputStream
protected lazy val creationSource = input
- protected lazy val root = new ZipRootCreator(x => byteInputStream(x.traverser.zis()))()
+ protected lazy val root = new ZipRootCreator(x => byteInputStream(x.traverser.zis))()
protected def DirEntryConstructor = (_, name, path) => new DirEntry(name, path)
protected def FileEntryConstructor = new FileEntry(_, _, _, _)
diff --git a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
index f9e7a1bdcf..77997c4565 100644
--- a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
+++ b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
@@ -886,7 +886,6 @@ trait ParallelMatching extends ast.TreeDSL
}
case _: SingletonType if useEqTest =>
val eqTest = REF(tpe.termSymbol) MEMBER_== scrutTree
-
// See ticket #1503 for the motivation behind checking for a binding.
// The upshot is that it is unsound to assume equality means the right
// type, but if the value doesn't appear on the right hand side of the
diff --git a/src/compiler/scala/tools/nsc/plugins/Plugin.scala b/src/compiler/scala/tools/nsc/plugins/Plugin.scala
index 49bbb12a9a..1b7e208334 100644
--- a/src/compiler/scala/tools/nsc/plugins/Plugin.scala
+++ b/src/compiler/scala/tools/nsc/plugins/Plugin.scala
@@ -133,7 +133,7 @@ object Plugin {
val alljars = (jars ::: (for {
dir <- dirs if dir.isDirectory
entry <- dir.toDirectory.files.toList sortBy (_.name)
- if Path.isJarOrZip(entry)
+ if entry.extension == "jar"
pdesc <- loadDescription(entry)
if !(ignoring contains pdesc.name)
} yield entry)).distinct
diff --git a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala
index dcefd8bb16..12ae5c9d0e 100644
--- a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala
@@ -24,7 +24,7 @@ trait StandardScalaSettings {
val extdirs = PathSetting ("-extdirs", "dirs", "Override location of installed extensions", Defaults.scalaExtDirs)
val javabootclasspath = PathSetting ("-javabootclasspath", "path", "Override java boot classpath.", Defaults.javaBootClassPath)
val javaextdirs = PathSetting ("-javaextdirs", "path", "Override java extdirs classpath.", Defaults.javaExtDirs)
- val sourcepath = PathSetting ("-sourcepath", "path", "Specify where to find input source files", "") // Defaults.scalaSourcePath
+ val sourcepath = StringSetting ("-sourcepath", "path", "Specify where to find input source files", "")
/** Other settings.
*/
diff --git a/src/compiler/scala/tools/nsc/symtab/Definitions.scala b/src/compiler/scala/tools/nsc/symtab/Definitions.scala
index 1684b5f071..4a16f774ec 100644
--- a/src/compiler/scala/tools/nsc/symtab/Definitions.scala
+++ b/src/compiler/scala/tools/nsc/symtab/Definitions.scala
@@ -37,9 +37,6 @@ trait Definitions extends reflect.generic.StandardDefinitions {
lazy val ScalaPackage = getModule("scala")
lazy val ScalaPackageClass = ScalaPackage.tpe.typeSymbol
- lazy val RuntimePackage = getModule("scala.runtime")
- lazy val RuntimePackageClass = RuntimePackage.tpe.typeSymbol
-
lazy val ScalaCollectionImmutablePackage: Symbol = getModule("scala.collection.immutable")
lazy val ScalaCollectionImmutablePackageClass: Symbol = ScalaCollectionImmutablePackage.tpe.typeSymbol
@@ -129,8 +126,6 @@ trait Definitions extends reflect.generic.StandardDefinitions {
lazy val BeanGetterTargetClass = getClass("scala.annotation.target.beanGetter")
lazy val BeanSetterTargetClass = getClass("scala.annotation.target.beanSetter")
lazy val ParamTargetClass = getClass("scala.annotation.target.param")
- lazy val ScalaInlineClass = getClass("scala.inline")
- lazy val ScalaNoInlineClass = getClass("scala.noinline")
// fundamental reference classes
lazy val ScalaObjectClass = getClass("scala.ScalaObject")
@@ -219,12 +214,12 @@ trait Definitions extends reflect.generic.StandardDefinitions {
def Array_length = getMember(ArrayClass, nme.length)
lazy val Array_clone = getMember(ArrayClass, nme.clone_)
lazy val ArrayModule = getModule("scala.Array")
+ def ArrayModule_apply = getMember(ArrayModule, nme.apply)
// reflection / structural types
lazy val SoftReferenceClass = getClass("java.lang.ref.SoftReference")
lazy val WeakReferenceClass = getClass("java.lang.ref.WeakReference")
lazy val MethodClass = getClass(sn.MethodAsObject)
- def methodClass_setAccessible = getMember(MethodClass, nme.setAccessible)
lazy val EmptyMethodCacheClass = getClass("scala.runtime.EmptyMethodCache")
lazy val MethodCacheClass = getClass("scala.runtime.MethodCache")
def methodCache_find = getMember(MethodCacheClass, nme.find_)
@@ -450,6 +445,7 @@ trait Definitions extends reflect.generic.StandardDefinitions {
lazy val BooleanBeanPropertyAttr: Symbol = getClass(sn.BooleanBeanProperty)
lazy val AnnotationDefaultAttr: Symbol = {
+ val RuntimePackageClass = getModule("scala.runtime").tpe.typeSymbol
val attr = newClass(RuntimePackageClass, nme.AnnotationDefaultATTR, List(AnnotationClass.typeConstructor))
// This attribute needs a constructor so that modifiers in parsed Java code make sense
attr.info.decls enter (attr newConstructor NoPosition setInfo MethodType(Nil, attr.tpe))
diff --git a/src/compiler/scala/tools/nsc/symtab/StdNames.scala b/src/compiler/scala/tools/nsc/symtab/StdNames.scala
index 8c4078e91e..ed72fc16fa 100644
--- a/src/compiler/scala/tools/nsc/symtab/StdNames.scala
+++ b/src/compiler/scala/tools/nsc/symtab/StdNames.scala
@@ -335,7 +335,6 @@ trait StdNames extends reflect.generic.StdNames { self: SymbolTable =>
val sameElements = newTermName("sameElements")
val scala_ = newTermName("scala")
val self = newTermName("self")
- val setAccessible = newTermName("setAccessible")
val synchronized_ = newTermName("synchronized")
val tail = newTermName("tail")
val toArray = newTermName("toArray")
diff --git a/src/compiler/scala/tools/nsc/symtab/Symbols.scala b/src/compiler/scala/tools/nsc/symtab/Symbols.scala
index e386508bdd..3157e5cc20 100644
--- a/src/compiler/scala/tools/nsc/symtab/Symbols.scala
+++ b/src/compiler/scala/tools/nsc/symtab/Symbols.scala
@@ -776,7 +776,6 @@ trait Symbols extends reflect.generic.Symbols { self: SymbolTable =>
assert(phaseId(infos.validFrom) <= phase.id)
if (phaseId(infos.validFrom) == phase.id) infos = infos.prev
infos = TypeHistory(currentPeriod, info, infos)
- validTo = if (info.isComplete) currentPeriod else NoPeriod
this
}
@@ -1962,7 +1961,7 @@ trait Symbols extends reflect.generic.Symbols { self: SymbolTable =>
newTypeName(rawname+"$trait") // (part of DEVIRTUALIZE)
} else if (phase.flatClasses && rawowner != NoSymbol && !rawowner.isPackageClass) {
if (flatname == nme.EMPTY) {
- assert(rawowner.isClass, "fatal: %s has owner %s, but a class owner is required".format(rawname+idString, rawowner))
+ assert(rawowner.isClass, "fatal: %s has owner %s, but a class owner is required".format(rawname, rawowner))
flatname = newTypeName(compactify(rawowner.name.toString() + "$" + rawname))
}
flatname
diff --git a/src/compiler/scala/tools/nsc/symtab/Types.scala b/src/compiler/scala/tools/nsc/symtab/Types.scala
index 9e10d3a408..2eca101aab 100644
--- a/src/compiler/scala/tools/nsc/symtab/Types.scala
+++ b/src/compiler/scala/tools/nsc/symtab/Types.scala
@@ -394,10 +394,9 @@ trait Types extends reflect.generic.Types { self: SymbolTable =>
/** Replace formal type parameter symbols with actual type arguments.
*
- * Amounts to substitution except for higher-kinded types. (See overridden method in TypeRef) -- @M
+ * Amounts to substitution except for higher-kinded types. (See overridden method in TypeRef) -- @M (contact adriaan.moors at cs.kuleuven.be)
*/
- def instantiateTypeParams(formals: List[Symbol], actuals: List[Type]): Type =
- if(formals.length == actuals.length) this.subst(formals, actuals) else ErrorType
+ def instantiateTypeParams(formals: List[Symbol], actuals: List[Type]): Type = this.subst(formals, actuals)
/** If this type is an existential, turn all existentially bound variables to type skolems.
* @param owner The owner of the created type skolems
@@ -1328,7 +1327,7 @@ trait Types extends reflect.generic.Types { self: SymbolTable =>
* to take the intersection of their bounds
*/
override def normalize = {
- if (isHigherKinded) {
+ if (isHigherKinded)
PolyType(
typeParams,
RefinedType(
@@ -1338,7 +1337,6 @@ trait Types extends reflect.generic.Types { self: SymbolTable =>
},
decls,
typeSymbol))
- }
else super.normalize
}
@@ -1707,9 +1705,8 @@ A type's typeSymbol should never be inspected directly.
if (substTps.length == typeParams.length)
typeRef(pre, sym, actuals)
- else if(formals.length == actuals.length) // partial application (needed in infer when bunching type arguments from classes and methods together)
+ else // partial application (needed in infer when bunching type arguments from classes and methods together)
typeRef(pre, sym, dummyArgs).subst(formals, actuals)
- else ErrorType
}
else
super.instantiateTypeParams(formals, actuals)
@@ -1728,15 +1725,21 @@ A type's typeSymbol should never be inspected directly.
if (sym == clazz && !args.isEmpty) args.head else this
def normalize0: Type =
- if (isHigherKinded) {
+ if (sym.isAliasType) { // beta-reduce
+ if (sym.info.typeParams.length == args.length || !isHigherKinded) {
+ /* !isHigherKinded && sym.info.typeParams.length != args.length only happens when compiling e.g.,
+ `val x: Class' with -Xgenerics, while `type Class = java.lang.Class' had already been compiled without -Xgenerics */
+ val xform = transform(sym.info.resultType)
+ assert(xform ne this, this)
+ xform.normalize // cycles have been checked in typeRef
+ } else { // should rarely happen, if at all
+ PolyType(sym.info.typeParams, transform(sym.info.resultType).normalize) // eta-expand -- for regularity, go through sym.info for typeParams
+ // @M TODO: should not use PolyType, as that's the type of a polymorphic value -- we really want a type *function*
+ }
+ } else if (isHigherKinded) {
// @M TODO: should not use PolyType, as that's the type of a polymorphic value -- we really want a type *function*
// @M: initialize (by sym.info call) needed (see test/files/pos/ticket0137.scala)
PolyType(sym.info.typeParams, typeRef(pre, sym, dummyArgs)) // must go through sym.info for typeParams
- } else if (sym.isAliasType) { // beta-reduce
- if(sym.info.typeParams.length == args.length) // don't do partial application
- transform(sym.info.resultType).normalize // cycles have been checked in typeRef
- else
- ErrorType
} else if (sym.isRefinementClass) {
sym.info.normalize // @MO to AM: OK?
//@M I think this is okay, but changeset 12414 (which fixed #1241) re-introduced another bug (#2208)
@@ -5083,41 +5086,37 @@ A type's typeSymbol should never be inspected directly.
case List(tp) =>
Some(tp)
case TypeRef(_, sym, _) :: rest =>
- val pres = tps map (_.prefix) // prefix normalizes automatically
+ val pres = tps map (_.prefix)
val pre = if (variance == 1) lub(pres, depth) else glb(pres, depth)
- val argss = tps map (_.normalize.typeArgs) // symbol equality (of the tp in tps) was checked using typeSymbol, which normalizes, so should normalize before retrieving arguments
+ val argss = tps map (_.typeArgs)
val capturedParams = new ListBuffer[Symbol]
- try {
- val args = (sym.typeParams, argss.transpose).zipped map {
- (tparam, as) =>
- if (depth == 0)
- if (tparam.variance == variance) AnyClass.tpe
- else if (tparam.variance == -variance) NothingClass.tpe
- else NoType
- else
- if (tparam.variance == variance) lub(as, decr(depth))
- else if (tparam.variance == -variance) glb(as, decr(depth))
- else {
- val l = lub(as, decr(depth))
- val g = glb(as, decr(depth))
- if (l <:< g) l
- else { // Martin: I removed this, because incomplete. Not sure there is a good way to fix it. For the moment we
- // just err on the conservative side, i.e. with a bound that is too high.
- // if(!(tparam.info.bounds contains tparam)){ //@M can't deal with f-bounds, see #2251
- val qvar = commonOwner(as) freshExistential "" setInfo TypeBounds(g, l)
- capturedParams += qvar
- qvar.tpe
- }
+ val args = (sym.typeParams, argss.transpose).zipped map {
+ (tparam, as) =>
+ if (depth == 0)
+ if (tparam.variance == variance) AnyClass.tpe
+ else if (tparam.variance == -variance) NothingClass.tpe
+ else NoType
+ else
+ if (tparam.variance == variance) lub(as, decr(depth))
+ else if (tparam.variance == -variance) glb(as, decr(depth))
+ else {
+ val l = lub(as, decr(depth))
+ val g = glb(as, decr(depth))
+ if (l <:< g) l
+ else { // Martin: I removed this, because incomplete. Not sure there is a good way to fix it. For the moment we
+ // just err on the conservative side, i.e. with a bound that is too high.
+ // if(!(tparam.info.bounds contains tparam)){ //@M can't deal with f-bounds, see #2251
+ val qvar = commonOwner(as) freshExistential "" setInfo TypeBounds(g, l)
+ capturedParams += qvar
+ qvar.tpe
}
- }
+ }
+ }
+ try {
if (args contains NoType) None
else Some(existentialAbstraction(capturedParams.toList, typeRef(pre, sym, args)))
} catch {
case ex: MalformedType => None
- case ex: IndexOutOfBoundsException => // transpose freaked out because of irregular argss
- // catching just in case (shouldn't happen, but also doesn't cost us)
- if (settings.debug.value) log("transposed irregular matrix!?"+ (tps, argss))
- None
}
case SingleType(_, sym) :: rest =>
val pres = tps map (_.prefix)
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index b7110b66df..9c382439bc 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -386,7 +386,7 @@ abstract class ClassfileParser {
val start = starts(index)
if (in.buf(start).toInt != CONSTANT_UTF8) errorBadTag(start)
val len = in.getChar(start + 1)
- bytesBuffer ++= in.buf.view(start + 3, len)
+ bytesBuffer ++= (in.buf, start + 3, len)
}
val bytes = bytesBuffer.toArray
val decodedLength = reflect.generic.ByteCodecs.decode(bytes)
diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
index 9b569fa45e..c3c60253b9 100644
--- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala
+++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
@@ -8,7 +8,7 @@ package transform
import symtab._
import Flags._
-import scala.collection._
+import scala.collection.mutable.{ListBuffer, HashMap}
abstract class CleanUp extends Transform with ast.TreeDSL {
import global._
@@ -22,12 +22,11 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
new CleanUpTransformer(unit)
class CleanUpTransformer(unit: CompilationUnit) extends Transformer {
- private val newStaticMembers = mutable.Buffer.empty[Tree]
- private val newStaticInits = mutable.Buffer.empty[Tree]
- private val symbolsStoredAsStatic = mutable.Map.empty[String, Symbol]
+ private val newDefs = new ListBuffer[Tree]
+ private val newInits = new ListBuffer[Tree]
- //private val classConstantMeth = new HashMap[String, Symbol]
- //private val symbolStaticFields = new HashMap[String, (Symbol, Tree, Tree)]
+ private val classConstantMeth = new HashMap[String, Symbol]
+ private val symbolStaticFields = new HashMap[String, (Symbol, Tree, Tree)]
private var localTyper: analyzer.Typer = null
@@ -117,10 +116,10 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
currentClass.info.decls enter varSym
val varDef = typedPos( VAL(varSym) === forInit )
- newStaticMembers append transform(varDef)
+ newDefs append transform(varDef)
val varInit = typedPos( REF(varSym) === forInit )
- newStaticInits append transform(varInit)
+ newInits append transform(varInit)
varSym
}
@@ -134,7 +133,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
currentClass.info.decls enter methSym
val methDef = typedPos( DefDef(methSym, { forBody(Pair(methSym, methSym.paramss(0))) }) )
- newStaticMembers append transform(methDef)
+ newDefs append transform(methDef)
methSym
}
@@ -216,12 +215,13 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
case POLY_CACHE =>
/* Implementation of the cache is as follows for method "def xyz(a: A, b: B)"
- (SoftReference so that it does not interfere with classloader garbage collection, see ticket
+ (but with the addition of a SoftReference wrapped around the MethodCache holder
+ so that it does not interfere with classloader garbage collection, see ticket
#2365 for details):
var reflParams$Cache: Array[Class[_]] = Array[JClass](classOf[A], classOf[B])
- var reflPoly$Cache: SoftReference[scala.runtime.MethodCache] = new SoftReference(new EmptyMethodCache())
+ var reflPoly$Cache: scala.runtime.MethodCache = new EmptyMethodCache()
def reflMethod$Method(forReceiver: JClass[_]): JMethod = {
var method: JMethod = reflPoly$Cache.find(forReceiver)
@@ -229,8 +229,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
return method
else {
method = forReceiver.getMethod("xyz", reflParams$Cache)
- method.setAccessible(true) // issue #2381
- reflPoly$Cache = new SoftReference(reflPoly$Cache.get.add(forReceiver, method))
+ reflPoly$Cache = reflPoly$Cache.add(forReceiver, method)
return method
}
}
@@ -258,7 +257,6 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
def cacheRHS = ((getPolyCache DOT methodCache_add)(REF(forReceiverSym), REF(methodSym)))
BLOCK(
REF(methodSym) === methodSymRHS,
- (REF(methodSym) DOT methodClass_setAccessible)(LIT(true)),
REF(reflPolyCacheSym) === gen.mkSoftRef(cacheRHS),
Return(REF(methodSym))
)
@@ -518,25 +516,39 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
/* Some cleanup transformations add members to templates (classes, traits, etc).
* When inside a template (i.e. the body of one of its members), two maps
- * (newStaticMembers and newStaticInits) are available in the tree transformer. Any mapping from
- * a symbol to a MemberDef (DefDef, ValDef, etc.) that is in newStaticMembers once the
+ * (newDefs and newInits) are available in the tree transformer. Any mapping from
+ * a symbol to a MemberDef (DefDef, ValDef, etc.) that is in newDefs once the
* transformation of the template is finished will be added as a member to the
- * template. Any mapping from a symbol to a tree that is in newStaticInits, will be added
+ * template. Any mapping from a symbol to a tree that is in newInits, will be added
* as a statement of the form "symbol = tree" to the beginning of the default
* constructor. */
case Template(parents, self, body) =>
localTyper = typer.atOwner(tree, currentClass)
- newStaticMembers.clear
- newStaticInits.clear
- symbolsStoredAsStatic.clear
- val transformedTemplate: Template =
- if (!forMSIL) {
- var newBody =
- transformTrees(body)
- treeCopy.Template(tree, parents, self, transformTrees(newStaticMembers.toList) ::: newBody)
- }
- else super.transform(tree).asInstanceOf[Template]
- addStaticInits(transformedTemplate) // postprocess to include static ctors
+ val transformedTemplate = if (!forMSIL) {
+ classConstantMeth.clear
+ newDefs.clear
+ newInits.clear
+ var newBody =
+ transformTrees(body)
+ val firstConstructor =
+ treeInfo.firstConstructor(newBody)
+ newBody =
+ transformTrees(newDefs.toList) ::: (
+ for (member <- newBody) yield member match {
+ case thePrimaryConstructor@DefDef(mods, name, tparams, vparamss, tpt, rhs) if (thePrimaryConstructor == firstConstructor) =>
+ val newRhs = rhs match {
+ case theRhs@Block(stats, expr) =>
+ treeCopy.Block(theRhs, transformTrees(newInits.toList) ::: stats, expr)
+ }
+ treeCopy.DefDef(thePrimaryConstructor, mods, name, tparams, vparamss, tpt, newRhs)
+ case notThePrimaryConstructor =>
+ notThePrimaryConstructor
+ }
+ )
+ treeCopy.Template(tree, parents, self, newBody)
+ }
+ else super.transform(tree)
+ applySymbolFieldInitsToStaticCtor(transformedTemplate.asInstanceOf[Template]) // postprocess to include static ctors
case Literal(c) if (c.tag == ClassTag) && !forMSIL=>
val tpe = c.typeValue
@@ -616,7 +628,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
List(Literal(Constant(symname: String)))) =>
// add the symbol name to a map if it's not there already
val rhs = gen.mkCast(Apply(gen.scalaDot(nme.Symbol), List(Literal(Constant(symname)))), symbolType)
- val staticFieldSym = getSymbolStaticField(symapp.pos, symname, rhs, symapp)
+ val (staticFieldSym, sfdef, sfinit) = getSymbolStaticField(symapp.pos, symname, rhs, symapp)
// create a reference to a static field
val ntree = typedWithPos(symapp.pos)(REF(staticFieldSym))
@@ -630,8 +642,8 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
* If it doesn't exist, i.e. the symbol is encountered the first time,
* it creates a new static field definition and initialization and returns it.
*/
- private def getSymbolStaticField(pos: Position, symname: String, rhs: Tree, tree: Tree): Symbol =
- symbolsStoredAsStatic.getOrElseUpdate(symname, {
+ private def getSymbolStaticField(pos: Position, symname: String, rhs: Tree, tree: Tree): (Symbol, Tree, Tree) =
+ symbolStaticFields.getOrElseUpdate(symname, {
val freshname = unit.fresh.newName(pos, "symbol$")
val theTyper = typer.atOwner(tree, currentClass)
@@ -646,14 +658,20 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
val stfieldInit = theTyper.typed { atPos(pos)(REF(stfieldSym) === rhs) }
// add field definition to new defs
- newStaticMembers append stfieldDef
- newStaticInits append stfieldInit
+ newDefs append stfieldDef
- stfieldSym
+ (stfieldSym, stfieldDef, stfieldInit)
})
+ /* returns a list of all trees for symbol static fields, and clear the list */
+ private def flushSymbolFieldsInitializations: List[Tree] = {
+ val fields = (symbolStaticFields.valuesIterator map (_._3)).toList
+ symbolStaticFields.clear
+ fields
+ }
+
/* finds the static ctor DefDef tree within the template if it exists. */
- private def findStaticCtor(template: Template): Option[Tree] =
+ def findStaticCtor(template: Template): Option[Tree] =
template.body find {
case defdef @ DefDef(mods, nme.CONSTRUCTOR, tparam, vparam, tp, rhs) => defdef.symbol hasFlag STATIC
case _ => false
@@ -662,10 +680,11 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
/* changes the template for the class so that it contains a static constructor with symbol fields inits,
* augments an existing static ctor if one already existed.
*/
- private def addStaticInits(template: Template): Template =
- if (newStaticInits.isEmpty)
- template
+ def applySymbolFieldInitsToStaticCtor(template: Template): Template = {
+ val symbolInitTrees = flushSymbolFieldsInitializations
+ if (symbolInitTrees.isEmpty) template
else {
+ val theTyper = typer.atOwner(template, currentClass)
val newCtor = findStaticCtor(template) match {
// in case there already were static ctors - augment existing ones
// currently, however, static ctors aren't being generated anywhere else
@@ -674,10 +693,10 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
val newBlock = rhs match {
case block @ Block(stats, expr) =>
// need to add inits to existing block
- treeCopy.Block(block, newStaticInits.toList ::: stats, expr)
+ treeCopy.Block(block, symbolInitTrees ::: stats, expr)
case term: TermTree =>
// need to create a new block with inits and the old term
- treeCopy.Block(term, newStaticInits.toList, term)
+ treeCopy.Block(term, symbolInitTrees, term)
}
treeCopy.DefDef(ctor, mods, name, tparams, vparamss, tpt, newBlock)
case None =>
@@ -685,13 +704,13 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
val staticCtorSym = currentClass.newConstructor(template.pos)
.setFlag(STATIC)
.setInfo(UnitClass.tpe)
- val rhs = Block(newStaticInits.toList, Literal(()))
+ val rhs = Block(symbolInitTrees, Literal(()))
val staticCtorTree = DefDef(staticCtorSym, rhs)
- localTyper.typed { atPos(template.pos)(staticCtorTree) }
+ theTyper.typed { atPos(template.pos)(staticCtorTree) }
}
treeCopy.Template(template, template.parents, template.self, newCtor :: template.body)
}
-
+ }
} // CleanUpTransformer
}
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index 8c2f79374c..4c000ce3f7 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -228,7 +228,7 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
/** The Java signature of type 'info', for symbol sym. The symbol is used to give the right return
* type for constructors.
*/
- def javaSig(sym0: Symbol, info: Type): Option[String] = atPhase(currentRun.erasurePhase) {
+ def javaSig(sym: Symbol, info: Type): Option[String] = atPhase(currentRun.erasurePhase) {
def jsig(tp: Type): String = jsig2(false, List(), tp)
@@ -260,11 +260,7 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
"."+sym.name
if (sym == ArrayClass)
ARRAY_TAG.toString+(args map jsig).mkString
- else if (sym.isTypeParameterOrSkolem &&
- // only refer to type params that will actually make it into the sig, this excludes:
- !sym.owner.isTypeParameterOrSkolem && // higher-order type parameters (!sym.owner.isTypeParameterOrSkolem), and parameters of methods
- (!sym0.isClass || sym.owner.isClass) // if we're generating the sig for a class, type params must be owned by a class (not a method -- #3249)
- )
+ else if (sym.isTypeParameterOrSkolem && !sym.owner.isTypeParameterOrSkolem /*not a higher-order type parameter, as these are suppressed*/)
TVAR_TAG.toString+sym.name+";"
else if (sym == AnyClass || sym == AnyValClass || sym == SingletonClass)
jsig(ObjectClass.tpe)
@@ -306,7 +302,7 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
(if (toplevel) "<"+(tparams map paramSig).mkString+">" else "")+jsig(restpe)
case MethodType(params, restpe) =>
"("+(params map (_.tpe) map jsig).mkString+")"+
- (if (restpe.typeSymbol == UnitClass || sym0.isConstructor) VOID_TAG.toString else jsig(restpe))
+ (if (restpe.typeSymbol == UnitClass || sym.isConstructor) VOID_TAG.toString else jsig(restpe))
case RefinedType(parents, decls) if (!parents.isEmpty) =>
jsig(parents.head)
case ClassInfoType(parents, _, _) =>
@@ -314,7 +310,7 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
case AnnotatedType(_, atp, _) =>
jsig(atp)
case BoundedWildcardType(bounds) =>
- println("something's wrong: "+sym0+":"+sym0.tpe+" has a bounded wildcard type")
+ println("something's wrong: "+sym+":"+sym.tpe+" has a bounded wildcard type")
jsig(bounds.hi)
case _ =>
val etp = erasure(tp)
@@ -324,7 +320,7 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
}
if (needsJavaSig(info)) {
try {
- //println("Java sig of "+sym0+" is "+jsig2(true, List(), sym0.info))//DEBUG
+ //println("Java sig of "+sym+" is "+jsig2(true, List(), sym.info))//DEBUG
Some(jsig2(true, List(), info))
} catch {
case ex: UnknownSig => None
@@ -499,7 +495,6 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
}
/** Generate a synthetic cast operation from <code>tree.tpe</code> to <code>pt</code>.
- * @pre pt eq pt.normalize
*/
private def cast(tree: Tree, pt: Type): Tree =
tree AS_ATTR pt
@@ -510,7 +505,7 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
/** Adapt <code>tree</code> to expected type <code>pt</code>.
*
* @param tree the given tree
- * @param pt the expected type
+ * @param pt the expected type.
* @return the adapted tree
*/
private def adaptToType(tree: Tree, pt: Type): Tree = {
@@ -926,154 +921,154 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
* </ul>
*/
private val preTransformer = new Transformer {
- def preErase(tree: Tree): Tree = tree match {
- case ClassDef(mods, name, tparams, impl) =>
- if (settings.debug.value)
- log("defs of " + tree.symbol + " = " + tree.symbol.info.decls)
- treeCopy.ClassDef(tree, mods, name, List(), impl)
- case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
- treeCopy.DefDef(tree, mods, name, List(), vparamss, tpt, rhs)
- case TypeDef(_, _, _, _) =>
- EmptyTree
- case Apply(instanceOf @ TypeApply(fun @ Select(qual, name), args @ List(arg)), List()) // !!! todo: simplify by having GenericArray also extract trees
- if ((fun.symbol == Any_isInstanceOf || fun.symbol == Object_isInstanceOf) &&
- unboundedGenericArrayLevel(arg.tpe) > 0) =>
- val level = unboundedGenericArrayLevel(arg.tpe)
- def isArrayTest(arg: Tree) =
- gen.mkRuntimeCall("isArray", List(arg, Literal(Constant(level))))
- typedPos(tree.pos) {
- if (level == 1) isArrayTest(qual)
+ override def transform(tree: Tree): Tree = {
+ if (tree.symbol == ArrayClass && !tree.isType) return tree // !!! needed?
+ val tree1 = tree match {
+ case ClassDef(mods, name, tparams, impl) =>
+ if (settings.debug.value)
+ log("defs of " + tree.symbol + " = " + tree.symbol.info.decls)
+ treeCopy.ClassDef(tree, mods, name, List(), impl)
+ case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
+ treeCopy.DefDef(tree, mods, name, List(), vparamss, tpt, rhs)
+ case TypeDef(_, _, _, _) =>
+ EmptyTree
+ case Apply(instanceOf @ TypeApply(fun @ Select(qual, name), args @ List(arg)), List()) // !!! todo: simplify by having GenericArray also extract trees
+ if ((fun.symbol == Any_isInstanceOf || fun.symbol == Object_isInstanceOf) &&
+ unboundedGenericArrayLevel(arg.tpe) > 0) =>
+ val level = unboundedGenericArrayLevel(arg.tpe)
+ def isArrayTest(arg: Tree) =
+ gen.mkRuntimeCall("isArray", List(arg, Literal(Constant(level))))
+ typedPos(tree.pos) {
+ if (level == 1) isArrayTest(qual)
+ else
+ gen.evalOnce(qual, currentOwner, unit) { qual1 =>
+ gen.mkAnd(
+ Apply(TypeApply(Select(qual1(), fun.symbol),
+ List(TypeTree(erasure(arg.tpe)))),
+ List()),
+ isArrayTest(qual1()))
+ }
+ }
+ case TypeApply(fun, args) if (fun.symbol.owner != AnyClass &&
+ fun.symbol != Object_asInstanceOf &&
+ fun.symbol != Object_isInstanceOf) =>
+ // leave all other type tests/type casts, remove all other type applications
+ fun
+ case Apply(fn @ Select(qual, name), args) if (fn.symbol.owner == ArrayClass) =>
+ if (unboundedGenericArrayLevel(qual.tpe.widen) == 1)
+ // convert calls to apply/update/length on generic arrays to
+ // calls of ScalaRunTime.array_xxx method calls
+ typedPos(tree.pos) { gen.mkRuntimeCall("array_"+name, qual :: args) }
else
- gen.evalOnce(qual, currentOwner, unit) { qual1 =>
- gen.mkAnd(
- Apply(TypeApply(Select(qual1(), fun.symbol),
- List(TypeTree(erasure(arg.tpe)))),
- List()),
- isArrayTest(qual1()))
- }
- }
- case TypeApply(fun, args) if (fun.symbol.owner != AnyClass &&
- fun.symbol != Object_asInstanceOf &&
- fun.symbol != Object_isInstanceOf) =>
- // leave all other type tests/type casts, remove all other type applications
- preErase(fun)
- case Apply(fn @ Select(qual, name), args) if (fn.symbol.owner == ArrayClass) =>
- if (unboundedGenericArrayLevel(qual.tpe.widen) == 1)
- // convert calls to apply/update/length on generic arrays to
- // calls of ScalaRunTime.array_xxx method calls
- typedPos(tree.pos) { gen.mkRuntimeCall("array_"+name, qual :: args) }
- else
- // store exact array erasure in map to be retrieved later when we might
- // need to do the cast in adaptMember
- treeCopy.Apply(
- tree,
- SelectFromArray(qual, name, erasure(qual.tpe)).copyAttrs(fn),
- args)
+ // store exact array erasure in map to be retrieved later when we might
+ // need to do the cast in adaptMember
+ treeCopy.Apply(
+ tree,
+ SelectFromArray(qual, name, erasure(qual.tpe)).copyAttrs(fn),
+ args)
- case Apply(fn @ Select(qual, _), Nil) if (fn.symbol == Any_## || fn.symbol == Object_##) =>
- Apply(gen.mkAttributedRef(scalaRuntimeHash), List(qual))
+ case Apply(fn @ Select(qual, _), Nil) if (fn.symbol == Any_## || fn.symbol == Object_##) =>
+ Apply(gen.mkAttributedRef(scalaRuntimeHash), List(qual))
- case Apply(fn, args) =>
- if (fn.symbol == Any_asInstanceOf)
- fn match {
- case TypeApply(Select(qual, _), List(targ)) =>
- if (qual.tpe <:< targ.tpe) {
- atPos(tree.pos) { Typed(qual, TypeTree(targ.tpe)) }
- } else if (isNumericValueClass(qual.tpe.typeSymbol) &&
- isNumericValueClass(targ.tpe.typeSymbol)) {
- // convert numeric type casts
- val cname = newTermName("to" + targ.tpe.typeSymbol.name)
- val csym = qual.tpe.member(cname)
- assert(csym != NoSymbol)
- atPos(tree.pos) { Apply(Select(qual, csym), List()) }
- } else
- tree
- }
- // todo: also handle the case where the singleton type is buried in a compound
- else if (fn.symbol == Any_isInstanceOf)
- fn match {
- case TypeApply(sel @ Select(qual, name), List(targ)) =>
- def mkIsInstanceOf(q: () => Tree)(tp: Type): Tree =
- Apply(
- TypeApply(
- Select(q(), Object_isInstanceOf) setPos sel.pos,
- List(TypeTree(tp) setPos targ.pos)) setPos fn.pos,
- List()) setPos tree.pos
- targ.tpe match {
- case SingleType(_, _) | ThisType(_) | SuperType(_, _) =>
- val cmpOp = if (targ.tpe <:< AnyValClass.tpe) Any_equals else Object_eq
- atPos(tree.pos) {
- Apply(Select(qual, cmpOp), List(gen.mkAttributedQualifier(targ.tpe)))
- }
- case RefinedType(parents, decls) if (parents.length >= 2) =>
- gen.evalOnce(qual, currentOwner, unit) { q =>
+ case Apply(fn, args) =>
+ if (fn.symbol == Any_asInstanceOf)
+ fn match {
+ case TypeApply(Select(qual, _), List(targ)) =>
+ if (qual.tpe <:< targ.tpe) {
+ atPos(tree.pos) { Typed(qual, TypeTree(targ.tpe)) }
+ } else if (isNumericValueClass(qual.tpe.typeSymbol) &&
+ isNumericValueClass(targ.tpe.typeSymbol)) {
+ // convert numeric type casts
+ val cname = newTermName("to" + targ.tpe.typeSymbol.name)
+ val csym = qual.tpe.member(cname)
+ assert(csym != NoSymbol)
+ atPos(tree.pos) { Apply(Select(qual, csym), List()) }
+ } else
+ tree
+ }
+ // todo: also handle the case where the singleton type is buried in a compound
+ else if (fn.symbol == Any_isInstanceOf)
+ fn match {
+ case TypeApply(sel @ Select(qual, name), List(targ)) =>
+ def mkIsInstanceOf(q: () => Tree)(tp: Type): Tree =
+ Apply(
+ TypeApply(
+ Select(q(), Object_isInstanceOf) setPos sel.pos,
+ List(TypeTree(tp) setPos targ.pos)) setPos fn.pos,
+ List()) setPos tree.pos
+ targ.tpe match {
+ case SingleType(_, _) | ThisType(_) | SuperType(_, _) =>
+ val cmpOp = if (targ.tpe <:< AnyValClass.tpe) Any_equals else Object_eq
atPos(tree.pos) {
- parents map mkIsInstanceOf(q) reduceRight gen.mkAnd
+ Apply(Select(qual, cmpOp), List(gen.mkAttributedQualifier(targ.tpe)))
}
- }
- case _ =>
- tree
- }
- case _ => tree
- }
- else {
- def doDynamic(fn: Tree, qual: Tree): Tree = {
- if (fn.symbol.owner.isRefinementClass && fn.symbol.allOverriddenSymbols.isEmpty)
- ApplyDynamic(qual, args) setSymbol fn.symbol setPos tree.pos
- else tree
- }
- fn match {
- case Select(qual, _) => doDynamic(fn, qual)
- case TypeApply(fni@Select(qual, _), _) => doDynamic(fni, qual)// type parameters are irrelevant in case of dynamic call
- case _ =>
- tree
+ case RefinedType(parents, decls) if (parents.length >= 2) =>
+ gen.evalOnce(qual, currentOwner, unit) { q =>
+ atPos(tree.pos) {
+ parents map mkIsInstanceOf(q) reduceRight gen.mkAnd
+ }
+ }
+ case _ =>
+ tree
+ }
+ case _ => tree
+ }
+ else {
+ def doDynamic(fn: Tree, qual: Tree): Tree = {
+ if (fn.symbol.owner.isRefinementClass && fn.symbol.allOverriddenSymbols.isEmpty)
+ ApplyDynamic(qual, args) setSymbol fn.symbol setPos tree.pos
+ else tree
+ }
+ fn match {
+ case Select(qual, _) => doDynamic(fn, qual)
+ case TypeApply(fni@Select(qual, _), _) => doDynamic(fni, qual)// type parameters are irrelevant in case of dynamic call
+ case _ =>
+ tree
+ }
}
- }
- case Select(_, _) =>
- // println("preXform: "+ (tree, tree.symbol, tree.symbol.owner, tree.symbol.owner.isRefinementClass))
- if (tree.symbol.owner.isRefinementClass) {
- val overridden = tree.symbol.allOverriddenSymbols
- assert(!overridden.isEmpty, tree.symbol)
- tree.symbol = overridden.head
- }
- tree
+ case Select(_, _) =>
+ if (tree.symbol.owner.isRefinementClass) {
+ val overridden = tree.symbol.allOverriddenSymbols
+ assert(!overridden.isEmpty, tree.symbol)
+ tree.symbol = overridden.head
+ }
+ tree
- case Template(parents, self, body) =>
- assert(!currentOwner.isImplClass)
- //Console.println("checking no dble defs " + tree)//DEBUG
- checkNoDoubleDefs(tree.symbol.owner)
- treeCopy.Template(tree, parents, emptyValDef, addBridges(body, currentOwner))
+ case Template(parents, self, body) =>
+ assert(!currentOwner.isImplClass)
+ //Console.println("checking no dble defs " + tree)//DEBUG
+ checkNoDoubleDefs(tree.symbol.owner)
+ treeCopy.Template(tree, parents, emptyValDef, addBridges(body, currentOwner))
- case Match(selector, cases) =>
- Match(Typed(selector, TypeTree(selector.tpe)), cases)
+ case Match(selector, cases) =>
+ Match(Typed(selector, TypeTree(selector.tpe)), cases)
- case Literal(ct) if ct.tag == ClassTag
- && ct.typeValue.typeSymbol != definitions.UnitClass =>
- treeCopy.Literal(tree, Constant(erasure(ct.typeValue)))
+ case Literal(ct) if ct.tag == ClassTag
+ && ct.typeValue.typeSymbol != definitions.UnitClass =>
+ treeCopy.Literal(tree, Constant(erasure(ct.typeValue)))
- case _ =>
- tree
- }
+ case _ =>
+ tree
+ }
+ tree1 match {
+ case EmptyTree | TypeTree() =>
+ tree1 setType erasure(tree1.tpe)
+ case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
+ val result = super.transform(tree1) setType null
+ tpt.tpe = erasure(tree.symbol.tpe).resultType
+ result
+ case _ =>
+ case class LoopControl(count: Int, ex : AssertionError) extends Throwable(ex.getMessage) with ControlThrowable
- override def transform(tree: Tree): Tree =
- if (tree.symbol == ArrayClass && !tree.isType) tree // !!! needed?
- else {
- val tree1 = preErase(tree)
- // println("preErase: "+ tree +" = "+ tree1)
- val res = tree1 match {
- case EmptyTree | TypeTree() =>
- tree1 setType erasure(tree1.tpe)
- case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
- val result = super.transform(tree1) setType null
- tpt.tpe = erasure(tree1.symbol.tpe).resultType
- result
- case _ =>
- super.transform(tree1) setType null
- }
- // println("xform: "+ res)
- res
+ try super.transform(tree1) setType null
+ catch {
+ case LoopControl(n, ex) if n <= 5 =>
+ Console.println(tree1)
+ throw LoopControl(n + 1, ex)
+ }
}
+ }
}
/** The main transform function: Pretransfom the tree, and then
@@ -1088,4 +1083,4 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
}
}
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala
index d1b3142c8a..c228ee0e46 100644
--- a/src/compiler/scala/tools/nsc/transform/Mixin.scala
+++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala
@@ -231,27 +231,12 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
for (member <- impl.info.decls.toList) {
if (isForwarded(member)) {
val imember = member.overriddenSymbol(iface)
- // atPhase(currentRun.erasurePhase){
- // println(""+(clazz, iface, clazz.typeParams, iface.typeParams, imember, clazz.thisType.baseType(iface), clazz.thisType.baseType(iface).memberInfo(imember), imember.info substSym(iface.typeParams, clazz.typeParams) ))
- // }
- // Console.println("mixin member "+member+":"+member.tpe+member.locationString+" "+imember+" "+imember.overridingSymbol(clazz)+" to "+clazz+" with scope "+clazz.info.decls)//DEBUG
+ //Console.println("mixin member "+member+":"+member.tpe+member.locationString+" "+imember+" "+imember.overridingSymbol(clazz)+" to "+clazz+" with scope "+clazz.info.decls)//DEBUG
if (imember.overridingSymbol(clazz) == NoSymbol &&
clazz.info.findMember(member.name, 0, lateDEFERRED, false).alternatives.contains(imember)) {
- val newSym = atPhase(currentRun.erasurePhase){
- val res = imember.cloneSymbol(clazz)
- // since we used the member (imember) from the interface that represents the trait that's being mixed in,
- // have to instantiate the interface type params (that may occur in imember's info) as they are seen from the class
- // we can't use the member that we get from the implementation class, as it's a clone that was made after erasure,
- // and thus it does not know its info at the beginning of erasure anymore
- // optimize: no need if iface has no typeparams
- if(iface.typeParams nonEmpty) res.setInfo(clazz.thisType.baseType(iface).memberInfo(imember))
- res
- } // clone before erasure got rid of type info we'll need to generate a javaSig
- // now we'll have the type info at (the beginning of) erasure in our history,
- newSym.updateInfo(imember.info.cloneInfo(newSym)) // and now newSym has the info that's been transformed to fit this period (no need for asSeenFrom as phase.erasedTypes)
val member1 = addMember(
clazz,
- newSym setPos clazz.pos resetFlag (DEFERRED | lateDEFERRED))
+ member.cloneSymbol(clazz) setPos clazz.pos resetFlag (DEFERRED | lateDEFERRED))
member1.asInstanceOf[TermSymbol] setAlias member;
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index 1350ab3bb4..d9b60b9ca1 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -612,12 +612,12 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
if (settings.debug.value) log("normalizeMember: " + sym.fullName)
if (sym.isMethod && !atPhase(currentRun.typerPhase)(sym.typeParams.isEmpty)) {
var (stps, tps) = splitParams(sym.info.typeParams)
- val unusedStvars = stps filterNot (specializedTypeVars(sym.info).toList contains)
+ val unusedStvars = stps -- specializedTypeVars(sym.info).toList
if (unusedStvars.nonEmpty && currentRun.compiles(sym) && !sym.isSynthetic) {
reporter.warning(sym.pos, "%s %s unused or used in non-specializable positions."
.format(unusedStvars.mkString("", ", ", ""), if (unusedStvars.length == 1) "is" else "are"))
unusedStvars foreach (_.removeAnnotation(SpecializedClass))
- stps = stps filterNot (unusedStvars contains)
+ stps = stps -- unusedStvars
tps = tps ::: unusedStvars
}
val res = sym :: (for (env <- specializations(stps) if needsSpecialization(env, sym)) yield {
@@ -644,8 +644,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
} else List(sym)
}
- /** Specialize member `m' w.r.t. to the outer environment and the type
- * parameters of the innermost enclosing class.
+ /** Specialize member `m' w.r.t. to the outer environment and the type parameters of
+ * the innermost enclosing class.
*
* Turns 'private' into 'protected' for members that need specialization.
*
@@ -714,7 +714,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
def checkOverriddenTParams(overridden: Symbol) {
if (currentRun.compiles(overriding))
- for ((baseTvar, derivedTvar) <- overridden.info.typeParams.zip(overriding.info.typeParams);
+ for (val (baseTvar, derivedTvar) <- overridden.info.typeParams.zip(overriding.info.typeParams);
val missing = missingSpecializations(baseTvar, derivedTvar)
if missing.nonEmpty)
reporter.error(derivedTvar.pos,
diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
index c7a3e6a778..9b54dd9428 100644
--- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala
+++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
@@ -102,7 +102,7 @@ abstract class TailCalls extends Transform
var tailPos = false
/** The reason this method could not be optimized. */
- var tailrecFailReason = "reason indeterminate"
+ var tailrecFailReason = "it contains a recursive call not in tail position"
/** Is the label accessed? */
var accessed = false
@@ -153,13 +153,6 @@ abstract class TailCalls extends Transform
/** A possibly polymorphic apply to be considered for tail call transformation.
*/
def rewriteApply(target: Tree, fun: Tree, targs: List[Tree], args: List[Tree]) = {
- def receiver = fun match {
- case Select(qual, _) => Some(qual)
- case _ => None
- }
-
- def receiverIsSame = receiver exists (enclosingType.widen =:= _.tpe.widen)
- def receiverIsSuper = receiver exists (enclosingType.widen <:< _.tpe.widen)
def isRecursiveCall = ctx.currentMethod eq fun.symbol
def isMandatory = ctx.currentMethod hasAnnotation TailrecClass
def isEligible = ctx.currentMethod.isEffectivelyFinal
@@ -167,6 +160,9 @@ abstract class TailCalls extends Transform
def matchesTypeArgs = ctx.tparams sameElements (targs map (_.tpe.typeSymbol))
def defaultTree = treeCopy.Apply(tree, target, transformArgs)
+ def sameTypeOfThis(receiver: Tree) =
+ receiver.tpe.widen =:= enclosingType.widen
+
/** Records failure reason in Context for reporting.
*/
def cannotRewrite(reason: String) = {
@@ -175,10 +171,6 @@ abstract class TailCalls extends Transform
defaultTree
}
- def notRecursiveReason() =
- if (receiverIsSuper) "it contains a recursive call targetting a supertype"
- else "it contains a recursive call not in tail position"
-
def rewriteTailCall(receiver: Tree, otherArgs: List[Tree]): Tree = {
log("Rewriting tail recursive method call at: " + fun.pos)
@@ -186,16 +178,15 @@ abstract class TailCalls extends Transform
typed { atPos(fun.pos)(Apply(Ident(ctx.label), receiver :: otherArgs)) }
}
- if (!isRecursiveCall) cannotRewrite(notRecursiveReason())
+ if (!isRecursiveCall) defaultTree
else if (!isEligible) cannotRewrite("it is neither private nor final so can be overridden")
else if (!ctx.tailPos) cannotRewrite("it contains a recursive call not in tail position")
else if (!matchesTypeArgs) cannotRewrite("it is called recursively with different type arguments")
- else receiver match {
- case Some(qual) =>
- if (forMSIL) cannotRewrite("it cannot be optimized on MSIL")
- else if (!receiverIsSame) cannotRewrite("it changes type of 'this' on a polymorphic recursive call")
- else rewriteTailCall(qual, transformArgs)
- case _ => rewriteTailCall(This(currentClass), transformArgs)
+ else fun match {
+ case Select(_, _) if forMSIL => cannotRewrite("it cannot be optimized on MSIL")
+ case Select(qual, _) if !sameTypeOfThis(qual) => cannotRewrite("it changes type of 'this' on a polymorphic recursive call")
+ case Select(qual, _) => rewriteTailCall(qual, transformArgs)
+ case _ => rewriteTailCall(This(currentClass), transformArgs)
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index 8e3722dd99..0270323133 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -62,8 +62,6 @@ abstract class UnCurry extends InfoTransform with TypingTransformers {
case MethodType(params, ExistentialType(tparams, restpe @ MethodType(_, _))) =>
assert(false, "unexpected curried method types with intervening existential")
tp0
- case MethodType(h :: t, restpe) if h.isImplicit =>
- apply(MethodType(h.cloneSymbol.resetFlag(IMPLICIT) :: t, restpe))
case PolyType(List(), restpe) => // nullary method type
apply(MethodType(List(), restpe))
case PolyType(tparams, restpe) => // polymorphic nullary method type, since it didn't occur in a higher-kinded position
@@ -400,7 +398,7 @@ abstract class UnCurry extends InfoTransform with TypingTransformers {
val predef = gen.mkAttributedRef(PredefModule)
val meth =
if ((elemtp <:< AnyRefClass.tpe) && !isPhantomClass(elemtp.typeSymbol))
- TypeApply(Select(predef, "wrapRefArray"), List(TypeTree(elemtp)))
+ Select(predef, "wrapRefArray")
else if (isValueClass(elemtp.typeSymbol))
Select(predef, "wrap"+elemtp.typeSymbol.name+"Array")
else
diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
index 80f833f03d..63e5a9fb25 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
@@ -74,7 +74,7 @@ trait Analyzer extends AnyRef
val runsRightAfter = Some("packageobjects")
def newPhase(_prev: Phase): StdPhase = new StdPhase(_prev) {
override def keepsTypeParams = false
- resetTyper() // this does not in fact to the reset for each compilation run!
+ resetTyper()
override def run {
val start = startTimer(typerNanos)
currentRun.units foreach applyPhase
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index 7452ac678b..3abaf4f337 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -478,7 +478,7 @@ self: Analyzer =>
// filter out failures from type inference, don't want to remove them from undetParams!
// we must be conservative in leaving type params in undetparams
- val AdjustedTypeArgs(okParams, okArgs) = adjustTypeArgs(undetParams, targs) // prototype == WildcardType: want to remove all inferred Nothing's
+ val (okParams, okArgs, _) = adjustTypeArgs(undetParams, targs) // prototype == WildcardType: want to remove all inferred Nothing's
val subst = new TreeTypeSubstituter(okParams, okArgs)
subst traverse itree2
@@ -810,7 +810,7 @@ self: Analyzer =>
def mot(tp0: Type): Tree = {
val tp1 = tp0.normalize
tp1 match {
- case ThisType(_) | SingleType(_, _) if !(tp1 exists {tp => tp.typeSymbol.isExistentiallyBound}) => // can't generate a reference to a value that's abstracted over by an existential
+ case ThisType(_) | SingleType(_, _) =>
manifestFactoryCall("singleType", tp, gen.mkAttributedQualifier(tp1))
case ConstantType(value) =>
manifestOfType(tp1.deconst, full)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index ed5f3b0e9a..a2594a060b 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -246,9 +246,6 @@ trait Infer {
/** Check that <code>sym</code> is defined and accessible as a member of
* tree <code>site</code> with type <code>pre</code> in current context.
- *
- * Note: pre is not refchecked -- moreover, refchecking the resulting tree may not refcheck pre,
- * since pre may not occur in its type (callers should wrap the result in a TypeTreeWithDeferredRefCheck)
*/
def checkAccessible(tree: Tree, sym: Symbol, pre: Type, site: Tree): Tree =
if (sym.isError) {
@@ -522,64 +519,48 @@ trait Infer {
tvars map (tvar => WildcardType)
}
- object AdjustedTypeArgs {
- type Result = collection.mutable.LinkedHashMap[Symbol, Option[Type]]
-
- def unapply(m: Result): Some[(List[Symbol], List[Type])] = Some(toLists(
- m collect {case (p, Some(a)) => (p, a)} unzip ))
-
- object Undets {
- def unapply(m: Result): Some[(List[Symbol], List[Type], List[Symbol])] = Some(toLists{
- val (ok, nok) = m.map{case (p, a) => (p, a.getOrElse(null))}.partition(_._2 ne null)
- val (okArgs, okTparams) = ok.unzip
- (okArgs, okTparams, nok.keys)
- })
- }
-
- object AllArgsAndUndets {
- def unapply(m: Result): Some[(List[Symbol], List[Type], List[Type], List[Symbol])] = Some(toLists{
- val (ok, nok) = m.map{case (p, a) => (p, a.getOrElse(null))}.partition(_._2 ne null)
- val (okArgs, okTparams) = ok.unzip
- (okArgs, okTparams, m.values.map(_.getOrElse(NothingClass.tpe)), nok.keys)
- })
- }
-
- @inline private def toLists[A1, A2](pxs: (Iterable[A1], Iterable[A2])) = (pxs._1.toList, pxs._2.toList)
- @inline private def toLists[A1, A2, A3](pxs: (Iterable[A1], Iterable[A2], Iterable[A3])) = (pxs._1.toList, pxs._2.toList, pxs._3.toList)
- @inline private def toLists[A1, A2, A3, A4](pxs: (Iterable[A1], Iterable[A2], Iterable[A3], Iterable[A4])) = (pxs._1.toList, pxs._2.toList, pxs._3.toList, pxs._4.toList)
- }
-
/** Retract arguments that were inferred to Nothing because inference failed. Correct types for repeated params.
*
* We detect Nothing-due-to-failure by only retracting a parameter if either:
* - it occurs in an invariant/contravariant position in `restpe`
* - `restpe == WildcardType`
*
- * Retracted parameters are mapped to None.
- * TODO:
- * - make sure the performance hit of storing these in a map is acceptable (it's going to be a small map in 90% of the cases, I think)
- * - refactor further up the callstack so that we don't have to do this post-factum adjustment?
+ * Retracted parameters are collected in `uninstantiated`.
*
* Rewrite for repeated param types: Map T* entries to Seq[T].
- * @return map from tparams to inferred arg, if inference was successful, tparams that map to None are considered left undetermined
- * type parameters that are inferred as `scala.Nothing' and that are not covariant in <code>restpe</code> are taken to be undetermined
+ * @return (okTparams, okArgs, leftUndet)
+ * * okTparams, okArgs: lists of tparam symbols and their inferred types
+ * * leftUndet a list of remaining uninstantiated type parameters after inference
+ * (type parameters mapped by the constraint solver to `scala.Nothing'
+ * and not covariant in <code>restpe</code> are taken to be
+ * uninstantiated. Maps all those type arguments to their
+ * corresponding type parameters).
*/
- def adjustTypeArgs(tparams: List[Symbol], targs: List[Type], restpe: Type = WildcardType): AdjustedTypeArgs.Result = {
+ def adjustTypeArgs(tparams: List[Symbol], targs: List[Type], restpe: Type = WildcardType): (List[Symbol], List[Type], List[Symbol]) = {
@inline def notCovariantIn(tparam: Symbol, restpe: Type) =
(varianceInType(restpe)(tparam) & COVARIANT) == 0 // tparam occurred non-covariantly (in invariant or contravariant position)
- (tparams, targs).zipped.map{ (tparam, targ) =>
+ val leftUndet = new ListBuffer[Symbol]
+ val okParams = new ListBuffer[Symbol]
+ val okArgs = new ListBuffer[Type]
+
+ (tparams, targs).zipped foreach { (tparam, targ) =>
if (targ.typeSymbol == NothingClass &&
(isWildcard(restpe) || notCovariantIn(tparam, restpe))) {
- tparam -> None
+ leftUndet += tparam
+ // don't add anything to okArgs, it'll be filtered out later anyway
+ // used `tparam.tpeHK` as dummy before
} else {
- tparam -> Some(
+ okParams += tparam
+ okArgs += (
if (targ.typeSymbol == RepeatedParamClass) targ.baseType(SeqClass)
else if (targ.typeSymbol == JavaRepeatedParamClass) targ.baseType(ArrayClass)
else targ.widen
)
}
- }(collection.breakOut)
+ }
+
+ (okParams.toList, okArgs.toList, leftUndet.toList)
}
/** Return inferred type arguments, given type parameters, formal parameters,
@@ -595,12 +576,18 @@ trait Infer {
* @param restp the result type of the method
* @param argtpes the argument types of the application
* @param pt the expected return type of the application
- * @return @see adjustTypeArgs
+ * @return (okTparams, okArgs, leftUndet)
+ * * okTparams, okArgs: lists of tparam symbols and their inferred types
+ * * leftUndet a list of remaining uninstantiated type parameters after inference
+ * (type parameters mapped by the constraint solver to `scala.Nothing'
+ * and not covariant in <code>restpe</code> are taken to be
+ * uninstantiated. Maps all those type arguments to their
+ * corresponding type parameters).
* @throws NoInstance
*/
def methTypeArgs(tparams: List[Symbol], formals: List[Type], restpe: Type,
- argtpes: List[Type], pt: Type): AdjustedTypeArgs.Result = {
+ argtpes: List[Type], pt: Type): (List[Symbol], List[Type], List[Symbol]) = {
val tvars = tparams map freshVar
if (inferInfo)
println("methTypeArgs tparams = "+tparams+
@@ -774,7 +761,7 @@ trait Infer {
isCompatibleArgs(argtpes, formals) && isWeaklyCompatible(restpe, pt)
} else {
try {
- val AdjustedTypeArgs.Undets(okparams, okargs, leftUndet) = methTypeArgs(undetparams, formals, restpe, argtpes, pt)
+ val (okparams, okargs, leftUndet) = methTypeArgs(undetparams, formals, restpe, argtpes, pt)
// #2665: must use weak conformance, not regular one (follow the monomorphic case above)
(exprTypeArgs(leftUndet, restpe.instantiateTypeParams(okparams, okargs), pt, isWeaklyCompatible) ne null) &&
isWithinBounds(NoPrefix, NoSymbol, okparams, okargs)
@@ -1050,20 +1037,12 @@ trait Infer {
*/
def checkKindBounds(tparams: List[Symbol], targs: List[Type], pre: Type, owner: Symbol): List[String] = {
def transform(tp: Type, clazz: Symbol): Type = tp.asSeenFrom(pre, clazz) // instantiate type params that come from outside the abstract type we're currently checking
- def transformedBounds(p: Symbol, o: Symbol) = transform(p.info.instantiateTypeParams(tparams, targs).bounds, o)
// check that the type parameters <arg>hkargs</arg> to a higher-kinded type conform to the expected params <arg>hkparams</arg>
- def checkKindBoundsHK(hkargs: List[Symbol], arg: Symbol, param: Symbol, paramowner: Symbol, underHKParams: List[Symbol], withHKArgs: List[Symbol]): (List[(Symbol, Symbol)], List[(Symbol, Symbol)], List[(Symbol, Symbol)]) = {
- def bindHKParams(tp: Type) = tp.substSym(underHKParams, withHKArgs)
+ def checkKindBoundsHK(hkargs: List[Symbol], arg: Symbol, param: Symbol, paramowner: Symbol): (List[(Symbol, Symbol)], List[(Symbol, Symbol)], List[(Symbol, Symbol)]) = {
// @M sometimes hkargs != arg.typeParams, the symbol and the type may have very different type parameters
val hkparams = param.typeParams
- if(printTypings) {
- println("checkKindBoundsHK expected: "+ param +" with params "+ hkparams +" by definition in "+ paramowner)
- println("checkKindBoundsHK supplied: "+ arg +" with params "+ hkargs +" from "+ owner)
- println("checkKindBoundsHK under params: "+ underHKParams +" with args "+ withHKArgs)
- }
-
if(hkargs.length != hkparams.length) {
if(arg == AnyClass || arg == NothingClass) (Nil, Nil, Nil) // Any and Nothing are kind-overloaded
else (List((arg, param)), Nil, Nil)
@@ -1086,16 +1065,10 @@ trait Infer {
// substSym(hkparams, hkargs) --> these types are going to be compared as types of kind *
// --> their arguments use different symbols, but are conceptually the same
// (could also replace the types by polytypes, but can't just strip the symbols, as ordering is lost then)
- if (!(bindHKParams(transformedBounds(hkparam, paramowner)) <:< transform(hkarg.info.bounds, owner)))
+ if (!(transform(hkparam.info.instantiateTypeParams(tparams, targs).bounds.substSym(hkparams, hkargs), paramowner) <:< transform(hkarg.info.bounds, owner)))
stricterBound(hkarg, hkparam)
-
- if(printTypings) {
- println("checkKindBoundsHK base case: "+ hkparam +" declared bounds: "+ transformedBounds(hkparam, paramowner) +" after instantiating earlier hkparams: "+ bindHKParams(transformedBounds(hkparam, paramowner)))
- println("checkKindBoundsHK base case: "+ hkarg +" has bounds: "+ transform(hkarg.info.bounds, owner))
- }
} else {
- if(printTypings) println("checkKindBoundsHK recursing to compare params of "+ hkparam +" with "+ hkarg)
- val (am, vm, sb) = checkKindBoundsHK(hkarg.typeParams, hkarg, hkparam, paramowner, underHKParams ++ hkparam.typeParams, withHKArgs ++ hkarg.typeParams)
+ val (am, vm, sb) = checkKindBoundsHK(hkarg.typeParams, hkarg, hkparam, paramowner)
arityMismatches(am)
varianceMismatches(vm)
stricterBounds(sb)
@@ -1123,11 +1096,11 @@ trait Infer {
val errors = new ListBuffer[String]
(tparams zip targs).foreach{ case (tparam, targ) if (targ.isHigherKinded || !tparam.typeParams.isEmpty) =>
- // @M must use the typeParams of the type targ, not the typeParams of the symbol of targ!!
- val tparamsHO = targ.typeParams
+ // @M must use the typeParams of the type targ, not the typeParams of the symbol of targ!!
+ val tparamsHO = targ.typeParams
val (arityMismatches, varianceMismatches, stricterBounds) =
- checkKindBoundsHK(tparamsHO, targ.typeSymbolDirect, tparam, tparam.owner, tparam.typeParams, tparamsHO) // NOTE: *not* targ.typeSymbol, which normalizes
+ checkKindBoundsHK(tparamsHO, targ.typeSymbolDirect, tparam, tparam.owner) // NOTE: *not* targ.typeSymbol, which normalizes
if (!(arityMismatches.isEmpty && varianceMismatches.isEmpty && stricterBounds.isEmpty)){
errors += (targ+"'s type parameters do not match "+tparam+"'s expected parameters: "+
(for ((a, p) <- arityMismatches)
@@ -1182,16 +1155,14 @@ trait Infer {
" tparams = "+tparams+"\n"+
" pt = "+pt)
val targs = exprTypeArgs(tparams, tree.tpe, pt)
+ val (okParams, okArgs, leftUndet) = // TODO AM: is this pattern match too expensive? should we push it down into the else of the if below?
+ if (keepNothings || (targs eq null)) (tparams, targs, List()) //@M: adjustTypeArgs fails if targs==null, neg/t0226
+ else adjustTypeArgs(tparams, targs)
- if (keepNothings || (targs eq null)) { //@M: adjustTypeArgs fails if targs==null, neg/t0226
- substExpr(tree, tparams, targs, pt)
- List()
- } else {
- val AdjustedTypeArgs.Undets(okParams, okArgs, leftUndet) = adjustTypeArgs(tparams, targs)
- if (inferInfo) println("inferred expr instance for "+ tree +" --> (okParams, okArgs, leftUndet)= "+(okParams, okArgs, leftUndet))
- substExpr(tree, okParams, okArgs, pt)
- leftUndet
- }
+ if (inferInfo) println("inferred expr instance for "+ tree +" --> (okParams, okArgs, leftUndet)= "+(okParams, okArgs, leftUndet))
+
+ substExpr(tree, okParams, okArgs, pt)
+ leftUndet
}
/** Substitite free type variables `undetparams' of polymorphic argument
@@ -1213,15 +1184,14 @@ trait Infer {
}
}
- /** Substitute free type variables <code>undetparams</code> of application
+ /** Substitite free type variables <code>undetparams</code> of application
* <code>fn(args)</code>, given prototype <code>pt</code>.
*
* @param fn ...
* @param undetparams ...
* @param args ...
* @param pt ...
- * @return The type parameters that remain uninstantiated,
- * and that thus have not been substituted.
+ * @return Return the list of type parameters that remain uninstantiated.
*/
def inferMethodInstance(fn: Tree, undetparams: List[Symbol],
args: List[Tree], pt0: Type): List[Symbol] = fn.tpe match {
@@ -1236,8 +1206,8 @@ trait Infer {
val formals = formalTypes(params0 map (_.tpe), args.length)
val argtpes = actualTypes(args map (_.tpe.deconst), formals.length)
val restpe = fn.tpe.resultType(argtpes)
- val AdjustedTypeArgs.AllArgsAndUndets(okparams, okargs, allargs, leftUndet) = methTypeArgs(undetparams, formals, restpe, argtpes, pt)
- checkBounds(fn.pos, NoPrefix, NoSymbol, undetparams, allargs, "inferred ")
+ val (okparams, okargs, leftUndet) = methTypeArgs(undetparams, formals, restpe, argtpes, pt)
+ checkBounds(fn.pos, NoPrefix, NoSymbol, okparams, okargs, "inferred ")
val treeSubst = new TreeTypeSubstituter(okparams, okargs)
treeSubst.traverse(fn)
treeSubst.traverseTrees(args)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index 756863f8f9..d56b8ed944 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -287,11 +287,13 @@ trait Namers { self: Analyzer =>
* class definition tree.
* @return the companion object symbol.
*/
- def ensureCompanionObject(tree: ClassDef, creator: => Tree): Symbol = {
- val m: Symbol = context.scope.lookup(tree.name.toTermName).filter(! _.isSourceMethod)
- if (m.isModule && inCurrentScope(m) && currentRun.compiles(m)) m
- else enterSyntheticSym(creator)
- }
+ def ensureCompanionObject(tree: ClassDef, creator: => Tree): Symbol = {
+ val m: Symbol = context.scope.lookup(tree.name.toTermName).filter(! _.isSourceMethod)
+ if (m.isModule && inCurrentScope(m) && currentRun.compiles(m)) m
+ else
+ /*util.trace("enter synthetic companion object for "+currentRun.compiles(m)+":")*/(
+ enterSyntheticSym(creator))
+ }
private def enterSymFinishWith(tree: Tree, tparams: List[TypeDef]) {
val sym = tree.symbol
@@ -348,9 +350,6 @@ trait Namers { self: Analyzer =>
tree.symbol = enterClassSymbol(tree)
finishWith(tparams)
if (mods.isCase) {
- if (treeInfo.firstConstructorArgs(impl.body).size > MaxFunctionArity)
- context.error(tree.pos, "Implementation restriction: case classes cannot have more than " + MaxFunctionArity + " parameters.")
-
val m = ensureCompanionObject(tree, caseModuleDef(tree))
caseClassOfModuleClass(m.moduleClass) = tree
}
@@ -990,8 +989,6 @@ trait Namers { self: Analyzer =>
val module = companionModuleOf(meth.owner, context)
module.initialize // call type completer (typedTemplate), adds the
// module's templateNamer to classAndNamerOfModule
- if (!classAndNamerOfModule.contains(module))
- return // fix #3649 (prevent crash in erroneous source code)
val (cdef, nmr) = classAndNamerOfModule(module)
moduleNamer = Some(cdef, nmr)
(cdef, nmr)
@@ -1067,7 +1064,24 @@ trait Namers { self: Analyzer =>
case tp =>
tp
}
- polyType(tparamSyms, tp)
+
+ def verifyOverriding(other: Symbol): Boolean = {
+ if(other.unsafeTypeParams.length != tparamSyms.length) {
+ context.error(tpsym.pos,
+ "The kind of "+tpsym.keyString+" "+tpsym.varianceString + tpsym.nameString+
+ " does not conform to the expected kind of " + other.defString + other.locationString + ".")
+ false
+ } else true
+ }
+
+ // @M: make sure overriding in refinements respects rudimentary kinding
+ // have to do this early, as otherwise we might get crashes: (see neg/bug1275.scala)
+ // suppose some parameterized type member is overridden by a type member w/o params,
+ // then appliedType will be called on a type that does not expect type args --> crash
+ if (tpsym.owner.isRefinementClass && // only needed in refinements
+ !tpsym.allOverriddenSymbols.forall{verifyOverriding(_)})
+ ErrorType
+ else polyType(tparamSyms, tp)
}
/** Given a case class
diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
index 91fe113019..f1d0537f46 100644
--- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
@@ -341,7 +341,7 @@ trait NamesDefaults { self: Analyzer =>
*
* Example: given
* def foo(x: Int = 2, y: String = "def")
- * foo(y = "lt")
+ * foo(1)
* the argument list (y = "lt") is transformed to (y = "lt", x = foo$default$1())
*/
def addDefaults(givenArgs: List[Tree], qual: Option[Tree], targs: List[Tree],
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index ca642d3931..4b5de16a31 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -66,15 +66,6 @@ abstract class RefChecks extends InfoTransform {
}
}
- val toScalaRepeatedParam = new TypeMap {
- def apply(tp: Type): Type = tp match {
- case tp @ TypeRef(pre, JavaRepeatedParamClass, args) =>
- typeRef(pre, RepeatedParamClass, args)
- case _ =>
- mapOver(tp)
- }
- }
-
class RefCheckTransformer(unit: CompilationUnit) extends Transformer {
var localTyper: analyzer.Typer = typer;
@@ -399,8 +390,9 @@ abstract class RefChecks extends InfoTransform {
}
printMixinOverrideErrors()
- // Verifying a concrete class has nothing unimplemented.
- if (clazz.isClass && !clazz.isTrait && !(clazz hasFlag ABSTRACT)) {
+ // 2. Check that only abstract classes have deferred members
+ if (clazz.isClass && !clazz.isTrait) {
+ def isClazzAbstract = clazz hasFlag ABSTRACT
val abstractErrors = new ListBuffer[String]
def abstractErrorMessage =
// a little formatting polish
@@ -420,56 +412,30 @@ abstract class RefChecks extends InfoTransform {
def javaErasedOverridingSym(sym: Symbol): Symbol =
clazz.tpe.nonPrivateMemberAdmitting(sym.name, BRIDGE).filter(other =>
- !other.isDeferred && other.isJavaDefined && {
- def uncurryAndErase(tp: Type) = erasure.erasure(uncurry.transformInfo(sym, tp)) // #3622: erasure operates on uncurried types -- note on passing sym in both cases: only sym.isType is relevant for uncurry.transformInfo
- val tp1 = uncurryAndErase(clazz.thisType.memberType(sym))
- val tp2 = uncurryAndErase(clazz.thisType.memberType(other))
+ !other.isDeferred &&
+ (other hasFlag JAVA) && {
+ val tp1 = erasure.erasure(clazz.thisType.memberType(sym))
+ val tp2 = erasure.erasure(clazz.thisType.memberType(other))
atPhase(currentRun.erasurePhase.next)(tp1 matches tp2)
})
def ignoreDeferred(member: Symbol) =
isAbstractTypeWithoutFBound(member) ||
- (member.isJavaDefined && javaErasedOverridingSym(member) != NoSymbol)
-
- // 2. Check that only abstract classes have deferred members
- def checkNoAbstractMembers() = {
- // Avoid spurious duplicates: first gather any missing members.
- def memberList = clazz.tpe.nonPrivateMembersAdmitting(VBRIDGE)
- val (missing, rest) = memberList partition (m => m.isDeferred && !ignoreDeferred(m))
- // Group missing members by the underlying symbol.
- val grouped = missing groupBy (analyzer underlying _ name)
-
- for (member <- missing) {
- def undefined(msg: String) = abstractClassError(false, infoString(member) + " is not defined" + msg)
- val underlying = analyzer.underlying(member)
-
- // Give a specific error message for abstract vars based on why it fails:
- // It could be unimplemented, have only one accessor, or be uninitialized.
- if (underlying.isVariable) {
- // If both getter and setter are missing, squelch the setter error.
- val isMultiple = grouped(underlying.name).size > 1
- // TODO: messages shouldn't be spread over two files, and varNotice is not a clear name
- if (member.isSetter && isMultiple) ()
- else undefined(
- if (member.isSetter) "\n(Note that an abstract var requires a setter in addition to the getter)"
- else if (member.isGetter && !isMultiple) "\n(Note that an abstract var requires a getter in addition to the setter)"
- else analyzer.varNotice(member)
- )
- }
- else undefined("")
+ ((member hasFlag JAVA) && javaErasedOverridingSym(member) != NoSymbol)
+
+ for (member <- clazz.tpe.nonPrivateMembersAdmitting(VBRIDGE))
+ if (member.isDeferred && !isClazzAbstract && !ignoreDeferred(member)) {
+ abstractClassError(
+ false, infoString(member) + " is not defined" + analyzer.varNotice(member))
+ } else if ((member hasFlag ABSOVERRIDE) && member.isIncompleteIn(clazz)) {
+ val other = member.superSymbol(clazz);
+ abstractClassError(true,
+ infoString(member) + " is marked `abstract' and `override'" +
+ (if (other != NoSymbol)
+ " and overrides incomplete superclass member " + infoString(other)
+ else ", but no concrete implementation could be found in a base class"))
}
- // Check the remainder for invalid absoverride.
- for (member <- rest ; if ((member hasFlag ABSOVERRIDE) && member.isIncompleteIn(clazz))) {
- val other = member.superSymbol(clazz)
- val explanation =
- if (other != NoSymbol) " and overrides incomplete superclass member " + infoString(other)
- else ", but no concrete implementation could be found in a base class"
-
- abstractClassError(true, infoString(member) + " is marked `abstract' and `override'" + explanation)
- }
- }
-
// 3. Check that concrete classes do not have deferred definitions
// that are not implemented in a subclass.
// Note that this is not the same as (2); In a situation like
@@ -492,9 +458,7 @@ abstract class RefChecks extends InfoTransform {
if (!parents.isEmpty && parents.head.typeSymbol.hasFlag(ABSTRACT))
checkNoAbstractDecls(parents.head.typeSymbol)
}
-
- checkNoAbstractMembers()
- if (abstractErrors.isEmpty)
+ if (abstractErrors.isEmpty && !isClazzAbstract)
checkNoAbstractDecls(clazz)
if (abstractErrors.nonEmpty)
@@ -1021,7 +985,7 @@ abstract class RefChecks extends InfoTransform {
private def checkAnnotations(tpes: List[Type], pos: Position) = tpes foreach (tp => checkTypeRef(tp, pos))
private def doTypeTraversal(tree: Tree)(f: Type => Unit) = if (!inPattern) tree.tpe foreach f
- private def applyRefchecksToAnnotations(tree: Tree): Unit = {
+ private def applyRefchecksToAnnotations(tree: Tree) = {
def applyChecks(annots: List[AnnotationInfo]) = {
checkAnnotations(annots map (_.atp), tree.pos)
transformTrees(annots flatMap (_.args))
@@ -1029,18 +993,10 @@ abstract class RefChecks extends InfoTransform {
tree match {
case m: MemberDef => applyChecks(m.symbol.annotations)
- case tpt@TypeTree() =>
- if(tpt.original != null) {
- tpt.original foreach {
- case dc@TypeTreeWithDeferredRefCheck() => applyRefchecksToAnnotations(dc.check()) // #2416
- case _ =>
- }
- }
-
- doTypeTraversal(tree) {
- case AnnotatedType(annots, _, _) => applyChecks(annots)
- case _ =>
- }
+ case TypeTree() => doTypeTraversal(tree) {
+ case AnnotatedType(annots, _, _) => applyChecks(annots)
+ case _ =>
+ }
case _ =>
}
}
@@ -1150,6 +1106,7 @@ abstract class RefChecks extends InfoTransform {
// type bounds (bug #935), issues deprecation warnings for symbols used
// inside annotations.
applyRefchecksToAnnotations(tree)
+
var result: Tree = tree match {
case DefDef(mods, name, tparams, vparams, tpt, EmptyTree) if tree.symbol.hasAnnotation(NativeAttr) =>
tree.symbol.resetFlag(DEFERRED)
@@ -1170,17 +1127,7 @@ abstract class RefChecks extends InfoTransform {
if (bridges.nonEmpty) treeCopy.Template(tree, parents, self, body ::: bridges)
else tree
- case dc@TypeTreeWithDeferredRefCheck() => assert(false, "adapt should have turned dc: TypeTreeWithDeferredRefCheck into tpt: TypeTree, with tpt.original == dc"); dc
- case tpt@TypeTree() =>
- if(tpt.original != null) {
- tpt.original foreach {
- case dc@TypeTreeWithDeferredRefCheck() =>
- transform(dc.check()) // #2416 -- only call transform to do refchecks, but discard results
- // tpt has the right type if the deferred checks are ok
- case _ =>
- }
- }
-
+ case TypeTree() =>
val existentialParams = new ListBuffer[Symbol]
doTypeTraversal(tree) { // check all bounds, except those that are
// existential type parameters
diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
index 541ef35b4b..ea2cfd6204 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
@@ -272,13 +272,12 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
case _ => Nil
}
-
assert(clazz != NoSymbol, sym)
if (settings.debug.value) log("Decided for host class: " + clazz)
val accName = nme.protName(sym.originalName)
val hasArgs = sym.tpe.paramTypes != Nil
- val memberType = refchecks.toScalaRepeatedParam(sym.tpe) // fix for #2413
+ val memberType = sym.tpe // transform(sym.tpe)
// if the result type depends on the this type of an enclosing class, the accessor
// has to take an object of exactly this type, otherwise it's more general
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 043c41fc10..14f416673b 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -555,8 +555,6 @@ trait Typers { self: Analyzer =>
* If symbol refers to package object, insert `.package` as second to last selector.
* (exception for some symbols in scala package which are dealiased immediately)
* Call checkAccessible, which sets tree's attributes.
- * Also note that checkAccessible looks up sym on pre without checking that pre is well-formed
- * (illegal type applications in pre will be skipped -- that's why typedSelect wraps the resulting tree in a TreeWithDeferredChecks)
* @return modified tree and new prefix type
*/
private def makeAccessible(tree: Tree, sym: Symbol, pre: Type, site: Tree): (Tree, Type) =
@@ -2183,17 +2181,6 @@ trait Typers { self: Analyzer =>
def isNamedApplyBlock(tree: Tree) =
context.namedApplyBlockInfo exists (_._1 == tree)
- def callToCompanionConstr(context: Context, calledFun: Symbol) = {
- if (calledFun.isConstructor) {
- val methCtx = context.enclMethod
- if (methCtx != NoContext) {
- val contextFun = methCtx.tree.symbol
- contextFun.isPrimaryConstructor && contextFun.owner.isModuleClass &&
- companionModuleOf(calledFun.owner, context).moduleClass == contextFun.owner
- } else false
- } else false
- }
-
def doTypedApply(tree: Tree, fun0: Tree, args: List[Tree], mode: Int, pt: Type): Tree = {
var fun = fun0
if (fun.hasSymbol && (fun.symbol hasFlag OVERLOADED)) {
@@ -2331,10 +2318,7 @@ trait Typers { self: Analyzer =>
case _ => false
}
val (allArgs, missing) = addDefaults(args, qual, targs, previousArgss, params, fun.pos.focus, context)
- val funSym = fun1 match { case Block(_, expr) => expr.symbol }
- if (allArgs.length != args.length && callToCompanionConstr(context, funSym)) {
- errorTree(tree, "module extending its companion class cannot use default constructor arguments")
- } else if (allArgs.length == formals.length) {
+ if (allArgs.length == formals.length) {
// useful when a default doesn't match parameter type, e.g. def f[T](x:T="a"); f[Int]()
val note = "Error occurred in an application involving default arguments."
if (!(context.diagnostic contains note)) context.diagnostic = note :: context.diagnostic
@@ -2564,10 +2548,7 @@ trait Typers { self: Analyzer =>
annotationError
}
- /** Calling constfold right here is necessary because some trees (negated
- * floats and literals in particular) are not yet folded.
- */
- def tryConst(tr: Tree, pt: Type) = typed(constfold(tr), EXPRmode, pt) match {
+ def tryConst(tr: Tree, pt: Type) = typed(tr, EXPRmode, pt) match {
// null cannot be used as constant value for classfile annotations
case l @ Literal(c) if !(l.isErroneous || c.value == null) =>
Some(LiteralAnnotArg(c))
@@ -2580,10 +2561,6 @@ trait Typers { self: Analyzer =>
* an error message is reported and None is returned.
*/
def tree2ConstArg(tree: Tree, pt: Type): Option[ClassfileAnnotArg] = tree match {
- case Apply(Select(New(tpt), nme.CONSTRUCTOR), args) if (pt.typeSymbol == ArrayClass) =>
- error(tree.pos, "Array constants have to be specified using the `Array(...)' factory method")
- None
-
case ann @ Apply(Select(New(tpt), nme.CONSTRUCTOR), args) =>
val annInfo = typedAnnotation(ann, mode, NoSymbol, pt.typeSymbol, true)
if (annInfo.atp.isErroneous) {
@@ -2596,10 +2573,11 @@ trait Typers { self: Analyzer =>
// and Array.apply(x: Int, xs: Int*): Array[Int] (and similar)
case Apply(fun, args) =>
val typedFun = typed(fun, funMode(mode), WildcardType)
- if (typedFun.symbol.owner == ArrayModule.moduleClass && typedFun.symbol.name == nme.apply)
+ if (typedFun.symbol.owner == ArrayModule.moduleClass &&
+ typedFun.symbol.name == nme.apply)
pt match {
- case TypeRef(_, ArrayClass, targ :: _) =>
- trees2ConstArg(args, targ)
+ case TypeRef(_, sym, argts) if (sym == ArrayClass && !argts.isEmpty) =>
+ trees2ConstArg(args, argts.head)
case _ =>
// For classfile annotations, pt can only be T:
// BT = Int, .., String, Class[_], JavaAnnotClass
@@ -2805,6 +2783,16 @@ trait Typers { self: Analyzer =>
res
}
+ class SymInstance(val sym: Symbol, val tp: Type) {
+ override def equals(other: Any): Boolean = other match {
+ case that: SymInstance =>
+ this.sym == that.sym && this.tp =:= that.tp
+ case _ =>
+ false
+ }
+ override def hashCode: Int = sym.hashCode * 41 + tp.hashCode
+ }
+
/** convert skolems to existentials */
def packedType(tree: Tree, owner: Symbol): Type = {
def defines(tree: Tree, sym: Symbol) =
@@ -2840,7 +2828,7 @@ trait Typers { self: Analyzer =>
}
}
// add all local symbols of `tp' to `localSyms'
- // TODO: expand higher-kinded types into individual copies for each instance.
+ // expanding higher-kinded types into individual copies for each instance.
def addLocals(tp: Type) {
val remainingSyms = new ListBuffer[Symbol]
def addIfLocal(sym: Symbol, tp: Type) {
@@ -2949,11 +2937,6 @@ trait Typers { self: Analyzer =>
errorTree(tree, treeSymTypeMsg(fun)+" does not take type parameters.")
}
- private[this] var typingIndent: String = ""
- @inline final def deindentTyping() = if (printTypings) typingIndent = typingIndent.substring(0, typingIndent.length() - 2)
- @inline final def indentTyping() = if (printTypings) typingIndent += " "
- @inline final def printTyping(s: => String) = if (printTypings) println(typingIndent+s)
-
/**
* @param tree ...
* @param mode ...
@@ -3271,7 +3254,7 @@ trait Typers { self: Analyzer =>
def errorInResult(tree: Tree) = treesInResult(tree) exists (_.pos == ex.pos)
if (fun :: tree :: args exists errorInResult) {
- printTyping("second try for: "+fun+" and "+args)
+ if (printTypings) println("second try for: "+fun+" and "+args)
val Select(qual, name) = fun
val args1 = tryTypedArgs(args, argMode(fun, mode), ex)
val qual1 =
@@ -3281,8 +3264,9 @@ trait Typers { self: Analyzer =>
val tree1 = Apply(Select(qual1, name) setPos fun.pos, args1) setPos tree.pos
return typed1(tree1, mode | SNDTRYmode, pt)
}
- } else printTyping("no second try for "+fun+" and "+args+" because error not in result:"+ex.pos+"!="+tree.pos)
-
+ } else if (printTypings) {
+ println("no second try for "+fun+" and "+args+" because error not in result:"+ex.pos+"!="+tree.pos)
+ }
reportTypeError(tree.pos, ex)
setError(tree)
}
@@ -3521,9 +3505,9 @@ trait Typers { self: Analyzer =>
case Select(_, _) => treeCopy.Select(tree, qual, name)
case SelectFromTypeTree(_, _) => treeCopy.SelectFromTypeTree(tree, qual, name)
}
+ //if (name.toString == "Elem") println("typedSelect "+qual+":"+qual.tpe+" "+sym+"/"+tree1+":"+tree1.tpe)
val (tree2, pre2) = makeAccessible(tree1, sym, qual.tpe, qual)
val result = stabilize(tree2, pre2, mode, pt)
-
def isPotentialNullDeference() = {
phase.id <= currentRun.typerPhase.id &&
!sym.isConstructor &&
@@ -3534,20 +3518,7 @@ trait Typers { self: Analyzer =>
if (settings.Xchecknull.value && isPotentialNullDeference && unit != null)
unit.warning(tree.pos, "potential null pointer dereference: "+tree)
- result match {
- // could checkAccessible (called by makeAccessible) potentially have skipped checking a type application in qual?
- case SelectFromTypeTree(qual@TypeTree(), name) if qual.tpe.typeArgs nonEmpty => // TODO: somehow the new qual is not checked in refchecks
- treeCopy.SelectFromTypeTree(
- result,
- (TypeTreeWithDeferredRefCheck(){ () => val tp = qual.tpe; val sym = tp.typeSymbolDirect
- // will execute during refchecks -- TODO: make private checkTypeRef in refchecks public and call that one?
- checkBounds(qual.pos, tp.prefix, sym.owner, sym.typeParams, tp.typeArgs, "")
- qual // you only get to see the wrapped tree after running this check :-p
- }) setType qual.tpe,
- name)
- case _ =>
- result
- }
+ result
}
}
@@ -3680,7 +3651,6 @@ trait Typers { self: Analyzer =>
else atPos(tree.pos)(Select(qual, name))
// atPos necessary because qualifier might come from startContext
val (tree2, pre2) = makeAccessible(tree1, defSym, pre, qual)
- // assert(pre.typeArgs isEmpty) // no need to add #2416-style check here, right?
stabilize(tree2, pre2, mode, pt)
}
}
@@ -3706,7 +3676,7 @@ trait Typers { self: Analyzer =>
} else {
val tparams = tpt1.symbol.typeParams
if (tparams.length == args.length) {
- // @M: kind-arity checking is done here and in adapt, full kind-checking is in checkKindBounds (in Infer)
+ // @M: kind-arity checking is done here and in adapt, full kind-checking is in checkKindBounds (in Infer)
val args1 =
if(!tpt1.symbol.rawInfo.isComplete)
args mapConserve (typedHigherKindedType(_, mode))
@@ -3717,8 +3687,11 @@ trait Typers { self: Analyzer =>
//@M! the polytype denotes the expected kind
}
val argtypes = args1 map (_.tpe)
-
- (args, tparams).zipped foreach { (arg, tparam) => arg match {
+ val owntype = if (tpt1.symbol.isClass || tpt1.symbol.isNonClassType)
+ // @M! added the latter condition
+ appliedType(tpt1.tpe, argtypes)
+ else tpt1.tpe.instantiateTypeParams(tparams, argtypes)
+ (args, tparams).zipped map { (arg, tparam) => arg match {
// note: can't use args1 in selector, because Bind's got replaced
case Bind(_, _) =>
if (arg.symbol.isAbstractType)
@@ -3728,17 +3701,7 @@ trait Typers { self: Analyzer =>
glb(List(arg.symbol.info.bounds.hi, tparam.info.bounds.hi.subst(tparams, argtypes))))
case _ =>
}}
-
- val result = TypeTree(appliedType(tpt1.tpe, argtypes)) setOriginal(tree) // setPos tree.pos (done by setOriginal)
- if(tpt1.tpe.isInstanceOf[PolyType]) // did the type application (performed by appliedType) involve an unchecked beta-reduction?
- (TypeTreeWithDeferredRefCheck(){ () =>
- // wrap the tree and include the bounds check -- refchecks will perform this check (that the beta reduction was indeed allowed) and unwrap
- // we can't simply use original in refchecks because it does not contains types
- // (and the only typed trees we have have been mangled so they're not quite the original tree anymore)
- checkBounds(result.pos, tpt1.tpe.prefix, tpt1.symbol.owner, tpt1.symbol.typeParams, argtypes, "")
- result // you only get to see the wrapped tree after running this check :-p
- }).setType(result.tpe)
- else result
+ TypeTree(owntype) setOriginal(tree) // setPos tree.pos
} else if (tparams.length == 0) {
errorTree(tree, tpt1.tpe+" does not take type parameters")
} else {
@@ -3789,17 +3752,12 @@ trait Typers { self: Analyzer =>
docComments(sym) = comment
comment.defineVariables(sym)
val typer1 = newTyper(context.makeNewScope(tree, context.owner))
- for (useCase <- comment.useCases) {
+ for (useCase <- comment.useCases)
typer1.silent(_.typedUseCase(useCase)) match {
case ex: TypeError =>
unit.warning(useCase.pos, ex.msg)
case _ =>
}
- for (useCaseSym <- useCase.defined) {
- if (sym.name != useCaseSym.name)
- unit.warning(useCase.pos, "@usecase " + useCaseSym.name.decode + " does not match commented symbol: " + sym.name.decode)
- }
- }
}
typed(defn, mode, pt)
@@ -4050,7 +4008,7 @@ trait Typers { self: Analyzer =>
case SelectFromTypeTree(qual, selector) =>
val qual1 = typedType(qual, mode)
if (qual1.tpe.isVolatile) error(tree.pos, "illegal type selection from volatile type "+qual.tpe)
- typedSelect(qual1, selector)
+ typedSelect(typedType(qual, mode), selector)
case CompoundTypeTree(templ) =>
typedCompoundTypeTree(templ)
@@ -4066,7 +4024,6 @@ trait Typers { self: Analyzer =>
case etpt @ ExistentialTypeTree(_, _) =>
newTyper(context.makeNewScope(tree, context.owner)).typedExistentialTypeTree(etpt, mode)
- case dc@TypeTreeWithDeferredRefCheck() => dc // TODO: should we re-type the wrapped tree? then we need to change TypeTreeWithDeferredRefCheck's representation to include the wrapped tree explicitly (instead of in its closure)
case tpt @ TypeTree() =>
if (tpt.original != null)
tree setType typedType(tpt.original, mode).tpe
@@ -4089,7 +4046,8 @@ trait Typers { self: Analyzer =>
* @param pt ...
* @return ...
*/
- def typed(tree: Tree, mode: Int, pt: Type): Tree = { indentTyping()
+ def typed(tree: Tree, mode: Int, pt: Type): Tree = {
+
def dropExistential(tp: Type): Type = tp match {
case ExistentialType(tparams, tpe) =>
if (settings.debug.value) println("drop ex "+tree+" "+tp)
@@ -4115,15 +4073,15 @@ trait Typers { self: Analyzer =>
tree.tpe = null
if (tree.hasSymbol) tree.symbol = NoSymbol
}
- printTyping("typing "+tree+", pt = "+pt+", undetparams = "+context.undetparams+", implicits-enabled = "+context.implicitsEnabled+", silent = "+context.reportGeneralErrors) //DEBUG
+ if (printTypings) println("typing "+tree+", pt = "+pt+", undetparams = "+context.undetparams+", implicits-enabled = "+context.implicitsEnabled+", silent = "+context.reportGeneralErrors) //DEBUG
var tree1 = if (tree.tpe ne null) tree else typed1(tree, mode, dropExistential(pt))
- printTyping("typed "+tree1+":"+tree1.tpe+(if (isSingleType(tree1.tpe)) " with underlying "+tree1.tpe.widen else "")+", undetparams = "+context.undetparams+", pt = "+pt) //DEBUG
+ if (printTypings) println("typed "+tree1+":"+tree1.tpe+(if (isSingleType(tree1.tpe)) " with underlying "+tree1.tpe.widen else "")+", undetparams = "+context.undetparams+", pt = "+pt) //DEBUG
tree1.tpe = addAnnotations(tree1, tree1.tpe)
val result = if (tree1.isEmpty) tree1 else adapt(tree1, mode, pt, tree)
- printTyping("adapted "+tree1+":"+tree1.tpe.widen+" to "+pt+", "+context.undetparams) //DEBUG
+ if (printTypings) println("adapted "+tree1+":"+tree1.tpe.widen+" to "+pt+", "+context.undetparams) //DEBUG
// for (t <- tree1.tpe) assert(t != WildcardType)
// if ((mode & TYPEmode) != 0) println("type: "+tree1+" has type "+tree1.tpe)
if (phase.id <= currentRun.typerPhase.id) signalDone(context.asInstanceOf[analyzer.Context], tree, result)
@@ -4131,7 +4089,7 @@ trait Typers { self: Analyzer =>
} catch {
case ex: TypeError =>
tree.tpe = null
- printTyping("caught "+ex+" in typed: "+tree) //DEBUG
+ if (printTypings) println("caught "+ex+" in typed: "+tree) //DEBUG
reportTypeError(tree.pos, ex)
setError(tree)
case ex: Exception =>
@@ -4143,7 +4101,6 @@ trait Typers { self: Analyzer =>
throw ex
}
finally {
- deindentTyping()
if (Statistics.enabled) {
val t = currentTime()
microsByType(pendingTreeTypes.head) += ((t - typerTime) / 1000).toInt
diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
index 735cb4a3c4..5bbda13acd 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
@@ -130,31 +130,21 @@ trait Unapplies extends ast.TreeDSL
}
}
- /** The module corresponding to a case class; overrides toString to show the module's name
+ /** The module corresponding to a case class; without any member definitions
*/
def caseModuleDef(cdef: ClassDef): ModuleDef = {
- // > MaxFunctionArity is caught in Namers, but for nice error reporting instead of
- // an abrupt crash we trim the list here.
- def primaries = constrParamss(cdef).head take MaxFunctionArity map (_.tpt)
- def inheritFromFun = !cdef.mods.isAbstract && cdef.tparams.isEmpty && constrParamss(cdef).length == 1
- def createFun = gen.scalaFunctionConstr(primaries, toIdent(cdef), abstractFun = true)
+ def inheritFromFun = !(cdef.mods hasFlag ABSTRACT) && cdef.tparams.isEmpty && constrParamss(cdef).length == 1
+ def createFun = gen.scalaFunctionConstr(constrParamss(cdef).head map (_.tpt), toIdent(cdef), abstractFun = true)
def parents = if (inheritFromFun) List(createFun) else Nil
- def toString = DefDef(
- Modifiers(OVERRIDE | FINAL),
- nme.toString_,
- Nil,
- List(Nil),
- TypeTree(),
- Literal(Constant(cdef.name.decode)))
-
- companionModuleDef(cdef, parents ::: List(gen.scalaScalaObjectConstr), List(toString))
+
+ companionModuleDef(cdef, parents ::: List(gen.scalaScalaObjectConstr))
}
- def companionModuleDef(cdef: ClassDef, parents: List[Tree], body: List[Tree] = Nil): ModuleDef = atPos(cdef.pos.focus) {
+ def companionModuleDef(cdef: ClassDef, parents: List[Tree]): ModuleDef = atPos(cdef.pos.focus) {
ModuleDef(
Modifiers(cdef.mods.flags & AccessFlags | SYNTHETIC, cdef.mods.privateWithin),
cdef.name.toTermName,
- Template(parents, emptyValDef, NoMods, Nil, List(Nil), body, cdef.impl.pos.focus))
+ Template(parents, emptyValDef, NoMods, Nil, List(Nil), Nil, cdef.impl.pos.focus))
}
private val caseMods = Modifiers(SYNTHETIC | CASE)
diff --git a/src/compiler/scala/tools/nsc/util/HashSet.scala b/src/compiler/scala/tools/nsc/util/HashSet.scala
index 8e0c2e2e59..aa6e19538c 100644
--- a/src/compiler/scala/tools/nsc/util/HashSet.scala
+++ b/src/compiler/scala/tools/nsc/util/HashSet.scala
@@ -11,17 +11,19 @@ class HashSet[T >: Null <: AnyRef](val label: String, initialCapacity: Int) exte
def this(label: String) = this(label, 16)
def this() = this(16)
+ private var capacity = initialCapacity
private var used = 0
- private var table = new Array[AnyRef](initialCapacity)
+ private var table = new Array[AnyRef](capacity)
// System.err.println("Created: " + this)
def size: Int = used
def clear() {
+ capacity = initialCapacity
used = 0
- table = new Array[AnyRef](initialCapacity)
+ table = new Array[AnyRef](capacity)
}
- private def index(x: Int): Int = math.abs(x % table.length)
+ private def index(x: Int): Int = math.abs(x % capacity)
def findEntryOrUpdate(x: T): T = {
var h = index(x.##)
@@ -35,7 +37,7 @@ class HashSet[T >: Null <: AnyRef](val label: String, initialCapacity: Int) exte
}
table(h) = x
used += 1
- if (used > (table.length >> 2)) growTable()
+ if (used > (capacity >> 2)) growTable()
x
}
@@ -59,14 +61,14 @@ class HashSet[T >: Null <: AnyRef](val label: String, initialCapacity: Int) exte
}
table(h) = x
used += 1
- if (used > (table.length >> 2)) growTable()
+ if (used > (capacity >> 2)) growTable()
}
def iterator = new Iterator[T] {
private var i = 0
def hasNext: Boolean = {
- while (i < table.length && (table(i) eq null)) i += 1
- i < table.length
+ while (i < capacity && (table(i) eq null)) i += 1
+ i < capacity
}
def next: T =
if (hasNext) { i += 1; table(i - 1).asInstanceOf[T] }
@@ -86,11 +88,12 @@ class HashSet[T >: Null <: AnyRef](val label: String, initialCapacity: Int) exte
private def growTable() {
val oldtable = table
val growthFactor =
- if (table.length <= initialCapacity) 8
- else if (table.length <= (initialCapacity * 8)) 4
+ if (capacity <= initialCapacity) 8
+ else if (capacity <= (initialCapacity * 8)) 4
else 2
- table = new Array[AnyRef](table.length * growthFactor)
+ capacity *= growthFactor
+ table = new Array[AnyRef](capacity)
var i = 0
while (i < oldtable.length) {
val entry = oldtable(i)
@@ -98,5 +101,5 @@ class HashSet[T >: Null <: AnyRef](val label: String, initialCapacity: Int) exte
i += 1
}
}
- override def toString() = "HashSet %s(%d / %d)".format(label, used, table.length)
+ override def toString() = "HashSet %s(%d / %d)".format(label, used, capacity)
}
diff --git a/src/compiler/scala/tools/nsc/util/InterruptReq.scala b/src/compiler/scala/tools/nsc/util/InterruptReq.scala
deleted file mode 100644
index aa7804acbe..0000000000
--- a/src/compiler/scala/tools/nsc/util/InterruptReq.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-package scala.tools.nsc
-package util
-
-/** A class of work items to be used in interrupt requests.
- */
-abstract class InterruptReq {
- /** The result type of the operation
- */
- type R
-
- /** The operation to be performed */
- protected val todo: () => R
-
- /** The result provided */
- private var result: Option[R] = None
-
- /** To be called from interrupted server to execute demanded task */
- def execute(): Unit = synchronized {
- result = Some(todo())
- notify()
- }
-
- /** To be called from interrupting client to get result fo interrupt */
- def getResult(): R = synchronized {
- while (result.isEmpty) wait()
- result.get
- }
-}
diff --git a/src/compiler/scala/tools/nsc/util/WorkScheduler.scala b/src/compiler/scala/tools/nsc/util/WorkScheduler.scala
index 5f33ea4aaa..e309b19b76 100644
--- a/src/compiler/scala/tools/nsc/util/WorkScheduler.scala
+++ b/src/compiler/scala/tools/nsc/util/WorkScheduler.scala
@@ -9,21 +9,22 @@ class WorkScheduler {
private var todo = new Queue[Action]
private var throwables = new Queue[Throwable]
- private var interruptReqs = new Queue[InterruptReq]
- /** Called from server: block until one of todo list, throwables or interruptReqs is nonempty */
+ /** Called from server: block until todo list is nonempty */
def waitForMoreWork() = synchronized {
- while (todo.isEmpty && throwables.isEmpty && interruptReqs.isEmpty) { wait() }
+ while (todo.isEmpty) { wait() }
}
- /** called from Server: test whether one of todo list, throwables, or InterruptReqs is nonempty */
+ /** called from Server: test whether todo list is nonempty */
def moreWork(): Boolean = synchronized {
- todo.nonEmpty || throwables.nonEmpty || interruptReqs.nonEmpty
+ todo.nonEmpty
}
/** Called from server: get first action in todo list, and pop it off */
def nextWorkItem(): Option[Action] = synchronized {
- if (todo.isEmpty) None else Some(todo.dequeue())
+ if (!todo.isEmpty) {
+ Some(todo.dequeue())
+ } else None
}
/** Called from server: return optional exception posted by client
@@ -40,22 +41,6 @@ class WorkScheduler {
}
}
- def pollInterrupt(): Option[InterruptReq] = synchronized {
- if (interruptReqs.isEmpty) None else Some(interruptReqs.dequeue())
- }
-
- /** Called from client: have interrupt executed by server and return result */
- def doQuickly[A](op: () => A): A = {
- val ir = new InterruptReq {
- type R = A
- val todo = op
- }
- synchronized {
- interruptReqs enqueue ir
- }
- ir.getResult()
- }
-
/** Called from client: have action executed by server */
def postWorkItem(action: Action) = synchronized {
todo enqueue action
@@ -75,4 +60,3 @@ class WorkScheduler {
postWorkItem { () => }
}
}
-
diff --git a/src/compiler/scala/tools/nsc/util/package.scala b/src/compiler/scala/tools/nsc/util/package.scala
index c38b2c5031..92d4eab54f 100644
--- a/src/compiler/scala/tools/nsc/util/package.scala
+++ b/src/compiler/scala/tools/nsc/util/package.scala
@@ -11,28 +11,6 @@ package object util {
/** Apply a function and return the passed value */
def returning[T](x: T)(f: T => Unit): T = { f(x) ; x }
- /** All living threads. */
- def allThreads(): List[Thread] = {
- val num = Thread.activeCount()
- val tarray = new Array[Thread](num)
- val got = Thread.enumerate(tarray)
-
- tarray take got toList
- }
-
- /** Execute code and then wait for all Threads created during its
- * execution to complete.
- */
- def waitingForThreads[T](body: => T) = {
- val ts1 = allThreads()
- val result = body
- val ts2 = allThreads()
- val newThreads = (ts2.toSet -- ts1) filterNot (_.isDaemon())
-
- newThreads foreach (_.join())
- result
- }
-
/** Generate a string using a routine that wants to write on a stream. */
def stringFromWriter(writer: PrintWriter => Unit): String = {
val stringWriter = new StringWriter()
diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
index b9afcda3ec..0c124c9c19 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
@@ -14,7 +14,6 @@ abstract class CPSAnnotationChecker extends CPSUtils {
import definitions._
//override val verbose = true
- @inline override final def vprintln(x: =>Any): Unit = if (verbose) println(x)
/**
* Checks whether @cps annotations conform
diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
index d1a35df04b..57cba6e829 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
@@ -11,7 +11,8 @@ trait CPSUtils {
var cpsEnabled = false
val verbose: Boolean = System.getProperty("cpsVerbose", "false") == "true"
- def vprintln(x: =>Any): Unit = if (verbose) println(x)
+ @inline final def vprintln(x: =>Any): Unit = if (verbose) println(x)
+
lazy val MarkerCPSSym = definitions.getClass("scala.util.continuations.cpsSym")
lazy val MarkerCPSTypes = definitions.getClass("scala.util.continuations.cpsParam")
diff --git a/src/library/scala/Array.scala b/src/library/scala/Array.scala
index 250fd09602..9712990d73 100644
--- a/src/library/scala/Array.scala
+++ b/src/library/scala/Array.scala
@@ -363,18 +363,13 @@ object Array extends FallbackArrayBuilding {
*/
def iterate[T: ClassManifest](start: T, len: Int)(f: T => T): Array[T] = {
val b = newBuilder[T]
-
- if (len > 0) {
- b.sizeHint(len)
- var acc = start
- var i = 1
+ b.sizeHint(len)
+ var acc = start
+ var i = 0
+ while (i < len) {
b += acc
-
- while (i < len) {
- acc = f(acc)
- i += 1
- b += acc
- }
+ acc = f(acc)
+ i += 1
}
b.result
}
diff --git a/src/library/scala/Enumeration.scala b/src/library/scala/Enumeration.scala
index 871de3714d..5d1a0997ed 100644
--- a/src/library/scala/Enumeration.scala
+++ b/src/library/scala/Enumeration.scala
@@ -6,11 +6,23 @@
** |/ **
\* */
+
+
package scala
import scala.collection.SetLike
-import scala.collection.{ mutable, immutable, generic }
-import java.lang.reflect.{ Modifier, Method => JMethod, Field => JField }
+import scala.collection.mutable.{Builder, AddingBuilder, Map, HashMap}
+import scala.collection.immutable.{Set, BitSet}
+import scala.collection.generic.CanBuildFrom
+
+private object Enumeration {
+
+ /* This map is used to cache enumeration instances for
+ resolving enumeration _values_ to equal objects (by-reference)
+ when values are deserialized. */
+ private val emap: Map[Class[_], Enumeration] = new HashMap
+
+}
/** <p>
* Defines a finite set of values specific to the enumeration. Typically
@@ -40,7 +52,7 @@ import java.lang.reflect.{ Modifier, Method => JMethod, Field => JField }
*
* <b>def</b> isWorkingDay(d: WeekDay) = ! (d == Sat || d == Sun)
*
- * WeekDay.values filter isWorkingDay foreach println
+ * WeekDay.iterator filter isWorkingDay foreach println
* }</pre>
*
* @param initial The initial value from which to count the integers that
@@ -52,23 +64,48 @@ import java.lang.reflect.{ Modifier, Method => JMethod, Field => JField }
*/
@serializable
@SerialVersionUID(8476000850333817230L)
-abstract class Enumeration(initial: Int, names: String*) {
- thisenum =>
+abstract class Enumeration(initial: Int, names: String*) { thisenum =>
def this() = this(0, null)
def this(names: String*) = this(0, names: _*)
+ Enumeration.synchronized {
+ Enumeration.emap.get(getClass) match {
+ case None =>
+ Enumeration.emap += (getClass -> this)
+ case Some(_) =>
+ /* do nothing */
+ }
+ }
+
/* Note that `readResolve` cannot be private, since otherwise
the JVM does not invoke it when deserializing subclasses. */
- protected def readResolve(): AnyRef = thisenum.getClass.getField("MODULE$").get()
+ protected def readResolve(): AnyRef = Enumeration.synchronized {
+ Enumeration.emap.get(getClass) match {
+ case None =>
+ Enumeration.emap += (getClass -> this)
+ this
+ case Some(existing) =>
+ existing
+ }
+ }
/** The name of this enumeration.
*/
- override def toString = (getClass.getName stripSuffix "$" split '.' last) split '$' last
+ override def toString = {
+ val name = this.getClass.getName
+ var string =
+ if (name endsWith "$") name.substring(0, name.length - 1) else name
+ val idx1 = string.lastIndexOf('.' : Int)
+ if (idx1 != -1) string = string.substring(idx1 + 1)
+ val idx2 = string.indexOf('$')
+ if (idx2 != -1) string = string.substring(idx2 + 1)
+ string
+ }
/** The mapping from the integer used to identify values to the actual
* values. */
- private val vmap: mutable.Map[Int, Value] = new mutable.HashMap
+ private val vmap: Map[Int, Value] = new HashMap
/** The cache listing all values of this enumeration. */
@transient private var vset: ValueSet = null
@@ -76,13 +113,13 @@ abstract class Enumeration(initial: Int, names: String*) {
/** The mapping from the integer used to identify values to their
* names. */
- private val nmap: mutable.Map[Int, String] = new mutable.HashMap
+ private val nmap: Map[Int, String] = new HashMap
/** The values of this enumeration as a set.
*/
def values: ValueSet = {
if (!vsetDefined) {
- vset = new ValueSet(immutable.BitSet.empty ++ (vmap.values map (_.id)))
+ vset = new ValueSet(BitSet.empty ++ (vmap.values map (_.id)))
vsetDefined = true
}
vset
@@ -93,8 +130,6 @@ abstract class Enumeration(initial: Int, names: String*) {
/** The string to use to name the next created value. */
protected var nextName = names.iterator
- private def nextNameOrElse(orElse: => String) =
- if (nextName.hasNext) nextName.next else orElse
/** The highest integer amongst those used to identify values in this
* enumeration. */
@@ -136,7 +171,8 @@ abstract class Enumeration(initial: Int, names: String*) {
* unique amongst all values of the enumeration.
* @return ..
*/
- protected final def Value(i: Int): Value = Value(i, nextNameOrElse(null))
+ protected final def Value(i: Int): Value =
+ Value(i, if (nextName.hasNext) nextName.next else null)
/** Creates a fresh value, part of this enumeration, called <code>name</code>.
*
@@ -154,27 +190,32 @@ abstract class Enumeration(initial: Int, names: String*) {
*/
protected final def Value(i: Int, name: String): Value = new Val(i, name)
- private def populateNameMap() {
- // The list of possible Value methods: 0-args which return a conforming type
- val methods = getClass.getMethods filter (m => m.getParameterTypes.isEmpty && classOf[Value].isAssignableFrom(m.getReturnType))
-
- methods foreach { m =>
- val name = m.getName
- // invoke method to obtain actual `Value` instance
- val value = m.invoke(this).asInstanceOf[Value]
- // verify that outer points to the correct Enumeration: ticket #3616.
- if (value.outerEnum eq thisenum) {
- val id = Int.unbox(classOf[Val] getMethod "id" invoke value)
- nmap += ((id, name))
- }
- }
- }
-
/* Obtains the name for the value with id `i`. If no name is cached
* in `nmap`, it populates `nmap` using reflection.
*/
private def nameOf(i: Int): String = synchronized {
- nmap.getOrElse(i, { populateNameMap() ; nmap(i) })
+ def isValDef(m: java.lang.reflect.Method) =
+ getClass.getDeclaredFields.exists(fd => fd.getName == m.getName &&
+ fd.getType == m.getReturnType)
+ nmap.get(i) match {
+ case Some(name) => name
+ case None =>
+ val methods = getClass.getMethods
+ for (m <- methods
+ if (classOf[Value].isAssignableFrom(m.getReturnType) &&
+ !java.lang.reflect.Modifier.isFinal(m.getModifiers) &&
+ m.getParameterTypes.isEmpty &&
+ isValDef(m))) {
+ val name = m.getName
+ // invoke method to obtain actual `Value` instance
+ val value = m.invoke(this)
+ // invoke `id` method
+ val idMeth = classOf[Val].getMethod("id")
+ val id: Int = idMeth.invoke(value).asInstanceOf[java.lang.Integer].intValue()
+ nmap += (id -> name)
+ }
+ nmap(i)
+ }
}
/** The type of the enumerated values. */
@@ -183,14 +224,12 @@ abstract class Enumeration(initial: Int, names: String*) {
abstract class Value extends Ordered[Value] {
/** the id and bit location of this enumeration value */
def id: Int
- /** a marker so we can tell whose values belong to whom come reflective-naming time */
- private[Enumeration] val outerEnum = thisenum
-
override def compare(that: Value): Int = this.id - that.id
- override def equals(other: Any) = other match {
- case that: Enumeration#Value => (outerEnum eq that.outerEnum) && (id == that.id)
- case _ => false
- }
+ override def equals(other: Any): Boolean =
+ other match {
+ case that: thisenum.Value => compare(that) == 0
+ case _ => false
+ }
override def hashCode: Int = id.##
/** this enumeration value as an <code>Int</code> bit mask.
@@ -219,25 +258,29 @@ abstract class Enumeration(initial: Int, names: String*) {
@serializable
@SerialVersionUID(0 - 3501153230598116017L)
protected class Val(i: Int, name: String) extends Value {
- def this(i: Int) = this(i, nextNameOrElse(i.toString))
- def this(name: String) = this(nextId, name)
- def this() = this(nextId)
-
- assert(!vmap.isDefinedAt(i), "Duplicate id: " + i)
+ def this(i: Int) =
+ this(i, if (nextName.hasNext) nextName.next else i.toString())
+ def this(name: String) = this(nextId, name)
+ def this() =
+ this(nextId, if (nextName.hasNext) nextName.next else nextId.toString())
+ assert(!vmap.isDefinedAt(i))
vmap(i) = this
vsetDefined = false
nextId = i + 1
if (nextId > topId) topId = nextId
def id = i
override def toString() =
- if (name != null) name
- else try thisenum.nameOf(i)
- catch { case _: NoSuchElementException => "<Invalid enum: no field for #" + i + ">" }
-
+ if (name eq null) Enumeration.this.nameOf(i)
+ else name
protected def readResolve(): AnyRef = {
- val enum = thisenum.readResolve().asInstanceOf[Enumeration]
- if (enum.vmap == null) this
- else enum.vmap(i)
+ val enum = Enumeration.synchronized {
+ Enumeration.emap.get(Enumeration.this.getClass) match {
+ case None => Enumeration.this
+ case Some(existing) => existing
+ }
+ }
+ if (enum.vmap ne null) enum.vmap(i)
+ else this
}
}
@@ -245,24 +288,21 @@ abstract class Enumeration(initial: Int, names: String*) {
* Iterating through this set will yield values in increasing order of their ids.
* @param ids The set of ids of values, organized as a BitSet.
*/
- class ValueSet private[Enumeration] (val ids: immutable.BitSet) extends Set[Value] with SetLike[Value, ValueSet] {
+ class ValueSet private[Enumeration] (val ids: BitSet) extends Set[Value] with SetLike[Value, ValueSet] {
override def empty = ValueSet.empty
def contains(v: Value) = ids contains (v.id)
def + (value: Value) = new ValueSet(ids + value.id)
def - (value: Value) = new ValueSet(ids - value.id)
- def iterator = ids.iterator map thisenum.apply
- override def stringPrefix = thisenum + ".ValueSet"
+ def iterator = ids.iterator map Enumeration.this.apply
+ override def stringPrefix = Enumeration.this + ".ValueSet"
}
/** A factory object for value sets */
object ValueSet {
- import mutable.{ Builder, AddingBuilder }
- import generic.CanBuildFrom
-
/** The empty value set */
- val empty = new ValueSet(immutable.BitSet.empty)
+ val empty = new ValueSet(BitSet.empty)
/** A value set consisting of given elements */
- def apply(elems: Value*): ValueSet = empty ++ elems
+ def apply(elems: Value*): ValueSet = elems.foldLeft(empty)(_ + _)
/** A builder object for value sets */
def newBuilder: Builder[Value, ValueSet] = new AddingBuilder(empty)
/** The implicit builder for value sets */
diff --git a/src/library/scala/Option.scala b/src/library/scala/Option.scala
index c3cfb14b73..8992024353 100644
--- a/src/library/scala/Option.scala
+++ b/src/library/scala/Option.scala
@@ -6,9 +6,12 @@
** |/ **
\* */
+
+
package scala
-object Option {
+object Option
+{
/** An implicit conversion that converts an option to an iterable value
*/
implicit def option2Iterable[A](xo: Option[A]): Iterable[A] = xo.toList
@@ -20,11 +23,6 @@ object Option {
* @return Some(value) if value != null, None if value == null
*/
def apply[A](x: A): Option[A] = if (x == null) None else Some(x)
-
- /** An Option factory which returns None in a manner consistent with
- * the collections hierarchy.
- */
- def empty[A] : Option[A] = None
}
/** This class represents optional values. Instances of <code>Option</code>
diff --git a/src/library/scala/collection/IterableLike.scala b/src/library/scala/collection/IterableLike.scala
index 538fd09c0e..da18c712f5 100644
--- a/src/library/scala/collection/IterableLike.scala
+++ b/src/library/scala/collection/IterableLike.scala
@@ -158,8 +158,7 @@ self =>
* @param step the distance between the first elements of successive
* groups (defaults to 1)
* @return An iterator producing ${coll}s of size `size`, except the
- * last and the only element will be truncated if there are
- * fewer elements than size.
+ * last will be truncated if the elements don't divide evenly.
*/
def sliding[B >: A](size: Int): Iterator[Repr] = sliding(size, 1)
def sliding[B >: A](size: Int, step: Int): Iterator[Repr] =
diff --git a/src/library/scala/collection/Parallel.scala b/src/library/scala/collection/Parallel.scala
deleted file mode 100644
index e500817745..0000000000
--- a/src/library/scala/collection/Parallel.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-package scala.collection
-
-
-
-
-
-
-/** A marker trait for objects with parallelised operations.
- *
- * @since 2.8
- * @author prokopec
- */
-trait Parallel
-
-
-
-
diff --git a/src/library/scala/collection/Parallelizable.scala b/src/library/scala/collection/Parallelizable.scala
deleted file mode 100644
index 405c005c55..0000000000
--- a/src/library/scala/collection/Parallelizable.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-package scala.collection
-
-
-
-import parallel.ParallelIterableLike
-
-
-
-/** This trait describes collections which can be turned into parallel collections
- * by invoking the method `par`. Parallelizable collections may be parametrized with
- * a target type different than their own.
- */
-trait Parallelizable[+ParRepr <: Parallel] {
-
- /** Returns a parallel implementation of a collection.
- */
- def par: ParRepr
-
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/Sequentializable.scala b/src/library/scala/collection/Sequentializable.scala
deleted file mode 100644
index 61fb24571a..0000000000
--- a/src/library/scala/collection/Sequentializable.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-package scala.collection
-
-
-
-
-trait Sequentializable[+T, +Repr] {
-
- /** A view of this parallel collection, but with all
- * of the operations implemented sequentially (i.e. in a single-threaded manner).
- *
- * @return a sequential view of the collection.
- */
- def seq: Repr
-
-} \ No newline at end of file
diff --git a/src/library/scala/collection/SetLike.scala b/src/library/scala/collection/SetLike.scala
index fbbd77d8aa..2e9a1ec2a2 100644
--- a/src/library/scala/collection/SetLike.scala
+++ b/src/library/scala/collection/SetLike.scala
@@ -11,6 +11,7 @@ package scala.collection
import generic._
import mutable.{Builder, AddingBuilder}
+import PartialFunction._
/** A template trait for sets.
*
diff --git a/src/library/scala/collection/TraversableLike.scala b/src/library/scala/collection/TraversableLike.scala
index 6f851fb5e7..2169dcdd02 100644
--- a/src/library/scala/collection/TraversableLike.scala
+++ b/src/library/scala/collection/TraversableLike.scala
@@ -698,7 +698,7 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
def toTraversable: Traversable[A] = thisCollection
def toIterator: Iterator[A] = toStream.iterator
- def toStream: Stream[A] = toBuffer.toStream
+ def toStream: Stream[A] = Stream.empty[A] ++ thisCollection
/** Converts this $coll to a string.
* @return a string representation of this collection. By default this
diff --git a/src/library/scala/collection/TraversableOnce.scala b/src/library/scala/collection/TraversableOnce.scala
index de4eb6fc22..b6c0ce146e 100644
--- a/src/library/scala/collection/TraversableOnce.scala
+++ b/src/library/scala/collection/TraversableOnce.scala
@@ -277,7 +277,7 @@ trait TraversableOnce[+A] {
* @tparam B the result type of the `+` operator.
* @return the sum of all elements of this $coll with respect to the `+` operator in `num`.
*
- * @usecase def sum: A
+ * @usecase def sum: Int
*
* @return the sum of all elements in this $coll of numbers of type `Int`.
* Instead of `Int`, any other type `T` with an implicit `Numeric[T]` implementation
diff --git a/src/library/scala/collection/generic/CanCombineFrom.scala b/src/library/scala/collection/generic/CanCombineFrom.scala
deleted file mode 100644
index b56dab9794..0000000000
--- a/src/library/scala/collection/generic/CanCombineFrom.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-package scala.collection
-package generic
-
-import scala.collection.parallel._
-
-/**
- * A base trait for parallel builder factories.
- *
- * @tparam From the type of the underlying collection that requests a builder to be created
- * @tparam Elem the element type of the collection to be created
- * @tparam To the type of the collection to be created
- */
-trait CanCombineFrom[-From, -Elem, +To] extends CanBuildFrom[From, Elem, To] with Parallel {
- def apply(from: From): Combiner[Elem, To]
- def apply(): Combiner[Elem, To]
-}
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/generic/GenericParallelCompanion.scala b/src/library/scala/collection/generic/GenericParallelCompanion.scala
deleted file mode 100644
index e5ba36f846..0000000000
--- a/src/library/scala/collection/generic/GenericParallelCompanion.scala
+++ /dev/null
@@ -1,29 +0,0 @@
-package scala.collection.generic
-
-
-import scala.collection.parallel.Combiner
-import scala.collection.parallel.ParallelIterable
-import scala.collection.parallel.ParallelMap
-
-
-
-/** A template class for companion objects of parallel collection classes.
- * They should be mixed in together with `GenericCompanion` type.
- * @tparam CC the type constructor representing the collection class
- * @since 2.8
- */
-trait GenericParallelCompanion[+CC[X] <: ParallelIterable[X]] {
- /** The default builder for $Coll objects.
- */
- def newBuilder[A]: Combiner[A, CC[A]]
-
- /** The parallel builder for $Coll objects.
- */
- def newCombiner[A]: Combiner[A, CC[A]]
-}
-
-trait GenericParallelMapCompanion[+CC[P, Q] <: ParallelMap[P, Q]] {
- def newCombiner[P, Q]: Combiner[(P, Q), CC[P, Q]]
-}
-
-
diff --git a/src/library/scala/collection/generic/GenericParallelTemplate.scala b/src/library/scala/collection/generic/GenericParallelTemplate.scala
deleted file mode 100644
index e98c13fa36..0000000000
--- a/src/library/scala/collection/generic/GenericParallelTemplate.scala
+++ /dev/null
@@ -1,66 +0,0 @@
-package scala.collection.generic
-
-
-
-import scala.collection.parallel.Combiner
-import scala.collection.parallel.ParallelIterable
-import scala.collection.parallel.ParallelMap
-import scala.collection.parallel.TaskSupport
-
-
-import annotation.unchecked.uncheckedVariance
-
-
-
-
-
-
-/** A template trait for collections having a companion.
- *
- * @tparam A the element type of the collection
- * @tparam CC the type constructor representing the collection class
- * @since 2.8
- * @author prokopec
- */
-trait GenericParallelTemplate[+A, +CC[X] <: ParallelIterable[X]]
-extends GenericTraversableTemplate[A, CC]
- with HasNewCombiner[A, CC[A] @uncheckedVariance]
- with TaskSupport
-{
- def companion: GenericCompanion[CC] with GenericParallelCompanion[CC]
-
- protected[this] override def newBuilder: collection.mutable.Builder[A, CC[A]] = newCombiner
-
- protected[this] override def newCombiner: Combiner[A, CC[A]] = {
- val cb = companion.newCombiner[A]
- cb.environment = environment
- cb
- }
-
- override def genericBuilder[B]: Combiner[B, CC[B]] = genericCombiner[B]
-
- def genericCombiner[B]: Combiner[B, CC[B]] = {
- val cb = companion.newCombiner[B]
- cb.environment = environment
- cb
- }
-
-}
-
-
-trait GenericParallelMapTemplate[K, +V, +CC[X, Y] <: ParallelMap[X, Y]]
-extends TaskSupport
-{
- def mapCompanion: GenericParallelMapCompanion[CC]
-
- def genericMapCombiner[P, Q]: Combiner[(P, Q), CC[P, Q]] = {
- val cb = mapCompanion.newCombiner[P, Q]
- cb.environment = environment
- cb
- }
-}
-
-
-
-
-
diff --git a/src/library/scala/collection/generic/HasNewCombiner.scala b/src/library/scala/collection/generic/HasNewCombiner.scala
deleted file mode 100644
index 2c24b437d8..0000000000
--- a/src/library/scala/collection/generic/HasNewCombiner.scala
+++ /dev/null
@@ -1,26 +0,0 @@
-package scala.collection.generic
-
-
-
-import scala.collection.parallel.Combiner
-
-
-
-trait HasNewCombiner[+T, +Repr] {
- protected[this] def newCombiner: Combiner[T, Repr]
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/generic/ParallelFactory.scala b/src/library/scala/collection/generic/ParallelFactory.scala
deleted file mode 100644
index fd33631640..0000000000
--- a/src/library/scala/collection/generic/ParallelFactory.scala
+++ /dev/null
@@ -1,43 +0,0 @@
-package scala.collection.generic
-
-
-import scala.collection.parallel.ParallelIterable
-import scala.collection.parallel.Combiner
-
-
-
-/** A template class for companion objects of `ParallelIterable` and subclasses thereof.
- * This class extends `TraversableFactory` and provides a set of operations to create `$Coll` objects.
- *
- * @define $coll parallel collection
- * @define $Coll ParallelIterable
- */
-abstract class ParallelFactory[CC[X] <: ParallelIterable[X] with GenericParallelTemplate[X, CC]]
-extends TraversableFactory[CC]
- with GenericParallelCompanion[CC] {
-
- type EPC[T, C] = collection.parallel.EnvironmentPassingCombiner[T, C]
-
- /**
- * A generic implementation of the `CanCombineFrom` trait, which forwards all calls to
- * `apply(from)` to the `genericParallelBuilder` method of the $coll `from`, and calls to `apply()`
- * to this factory.
- */
- class GenericCanCombineFrom[A] extends GenericCanBuildFrom[A] with CanCombineFrom[CC[_], A, CC[A]] {
- override def apply(from: Coll) = from.genericCombiner
- override def apply() = newBuilder[A]
- }
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/generic/ParallelMapFactory.scala b/src/library/scala/collection/generic/ParallelMapFactory.scala
deleted file mode 100644
index 8f779b4029..0000000000
--- a/src/library/scala/collection/generic/ParallelMapFactory.scala
+++ /dev/null
@@ -1,42 +0,0 @@
-package scala.collection.generic
-
-
-
-import scala.collection.parallel.ParallelMap
-import scala.collection.parallel.ParallelMapLike
-import scala.collection.parallel.Combiner
-import scala.collection.mutable.Builder
-
-
-
-
-/** A template class for companion objects of `ParallelMap` and subclasses thereof.
- * This class extends `TraversableFactory` and provides a set of operations to create `$Coll` objects.
- *
- * @define $coll parallel map
- * @define $Coll ParallelMap
- */
-abstract class ParallelMapFactory[CC[X, Y] <: ParallelMap[X, Y] with ParallelMapLike[X, Y, CC[X, Y], _]]
-extends MapFactory[CC]
- with GenericParallelMapCompanion[CC] {
-
- type MapColl = CC[_, _]
-
- /** The default builder for $Coll objects.
- * @tparam K the type of the keys
- * @tparam V the type of the associated values
- */
- override def newBuilder[K, V]: Builder[(K, V), CC[K, V]] = newCombiner[K, V]
-
- /** The default combiner for $Coll objects.
- * @tparam K the type of the keys
- * @tparam V the type of the associated values
- */
- def newCombiner[K, V]: Combiner[(K, V), CC[K, V]]
-
- class CanCombineFromMap[K, V] extends CanCombineFrom[CC[_, _], (K, V), CC[K, V]] {
- def apply(from: MapColl) = from.genericMapCombiner[K, V].asInstanceOf[Combiner[(K, V), CC[K, V]]]
- def apply() = newCombiner[K, V]
- }
-
-}
diff --git a/src/library/scala/collection/generic/Signalling.scala b/src/library/scala/collection/generic/Signalling.scala
deleted file mode 100644
index 1dac4297b7..0000000000
--- a/src/library/scala/collection/generic/Signalling.scala
+++ /dev/null
@@ -1,192 +0,0 @@
-package scala.collection.generic
-
-
-import java.util.concurrent.atomic.AtomicInteger
-
-
-
-
-
-/**
- * A message interface serves as a unique interface to the
- * part of the collection capable of receiving messages from
- * a different task.
- *
- * One example of use of this is the `find` method, which can use the
- * signalling interface to inform worker threads that an element has
- * been found and no further search is necessary.
- *
- * @author prokopec
- *
- * @define abortflag
- * Abort flag being true means that a worker can abort and produce whatever result,
- * since its result will not affect the final result of computation. An example
- * of operations using this are `find`, `forall` and `exists` methods.
- *
- * @define indexflag
- * The index flag holds an integer which carries some operation-specific meaning. For
- * instance, `takeWhile` operation sets the index flag to the position of the element
- * where the predicate fails. Other workers may check this index against the indices
- * they are working on and return if this index is smaller than their index. Examples
- * of operations using this are `takeWhile`, `dropWhile`, `span` and `indexOf`.
- */
-trait Signalling {
- /**
- * Checks whether an abort signal has been issued.
- *
- * $abortflag
- * @return the state of the abort
- */
- def isAborted: Boolean
-
- /**
- * Sends an abort signal to other workers.
- *
- * $abortflag
- */
- def abort: Unit
-
- /**
- * Returns the value of the index flag.
- *
- * $indexflag
- * @return the value of the index flag
- */
- def indexFlag: Int
-
- /**
- * Sets the value of the index flag.
- *
- * $indexflag
- * @param f the value to which the index flag is set.
- */
- def setIndexFlag(f: Int)
-
- /**
- * Sets the value of the index flag if argument is greater than current value.
- * This method does this atomically.
- *
- * $indexflag
- * @param f the value to which the index flag is set
- */
- def setIndexFlagIfGreater(f: Int)
-
- /**
- * Sets the value of the index flag if argument is lesser than current value.
- * This method does this atomically.
- *
- * $indexflag
- * @param f the value to which the index flag is set
- */
- def setIndexFlagIfLesser(f: Int)
-
- /**
- * A read only tag specific to the signalling object. It is used to give
- * specific workers information on the part of the collection being operated on.
- */
- def tag: Int
-}
-
-
-/**
- * This signalling implementation returns default values and ignores received signals.
- */
-class DefaultSignalling extends Signalling {
- def isAborted = false
- def abort {}
-
- def indexFlag = -1
- def setIndexFlag(f: Int) {}
- def setIndexFlagIfGreater(f: Int) {}
- def setIndexFlagIfLesser(f: Int) {}
-
- def tag = -1
-}
-
-
-/**
- * An object that returns default values and ignores received signals.
- */
-object IdleSignalling extends DefaultSignalling
-
-
-/**
- * A mixin trait that implements abort flag behaviour using volatile variables.
- */
-trait VolatileAbort extends Signalling {
- @volatile private var abortflag = false
- abstract override def isAborted = abortflag
- abstract override def abort = abortflag = true
-}
-
-
-/**
- * A mixin trait that implements index flag behaviour using atomic integers.
- * The `setIndex` operation is wait-free, while conditional set operations `setIndexIfGreater`
- * and `setIndexIfLesser` are lock-free and support only monotonic changes.
- */
-trait AtomicIndexFlag extends Signalling {
- private val intflag: AtomicInteger = new AtomicInteger(-1)
- abstract override def indexFlag = intflag.get
- abstract override def setIndexFlag(f: Int) = intflag.set(f)
- abstract override def setIndexFlagIfGreater(f: Int) = {
- var loop = true
- do {
- val old = intflag.get
- if (f <= old) loop = false
- else if (intflag.compareAndSet(old, f)) loop = false
- } while (loop);
- }
- abstract override def setIndexFlagIfLesser(f: Int) = {
- var loop = true
- do {
- val old = intflag.get
- if (f >= old) loop = false
- else if (intflag.compareAndSet(old, f)) loop = false
- } while (loop);
- }
-}
-
-
-/**
- * An implementation of the signalling interface using delegates.
- */
-trait DelegatedSignalling extends Signalling {
- /**
- * A delegate that method calls are redirected to.
- */
- var signalDelegate: Signalling
-
- def isAborted = signalDelegate.isAborted
- def abort = signalDelegate.abort
-
- def indexFlag = signalDelegate.indexFlag
- def setIndexFlag(f: Int) = signalDelegate.setIndexFlag(f)
- def setIndexFlagIfGreater(f: Int) = signalDelegate.setIndexFlagIfGreater(f)
- def setIndexFlagIfLesser(f: Int) = signalDelegate.setIndexFlagIfLesser(f)
-
- def tag = signalDelegate.tag
-}
-
-
-/**
- * Class implementing delegated signalling.
- */
-class DelegatedContext(var signalDelegate: Signalling) extends DelegatedSignalling
-
-
-/**
- * Class implementing delegated signalling, but having its own distinct `tag`.
- */
-class TaggedDelegatedContext(deleg: Signalling, override val tag: Int) extends DelegatedContext(deleg)
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/generic/Sizing.scala b/src/library/scala/collection/generic/Sizing.scala
deleted file mode 100644
index bf801302ae..0000000000
--- a/src/library/scala/collection/generic/Sizing.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-package scala.collection.generic
-
-
-
-/** A trait for objects which have a size.
- */
-trait Sizing {
- def size: Int
-} \ No newline at end of file
diff --git a/src/library/scala/collection/generic/TraversableFactory.scala b/src/library/scala/collection/generic/TraversableFactory.scala
index c6f5ce4dde..d8541d2714 100644
--- a/src/library/scala/collection/generic/TraversableFactory.scala
+++ b/src/library/scala/collection/generic/TraversableFactory.scala
@@ -224,17 +224,13 @@ abstract class TraversableFactory[CC[X] <: Traversable[X] with GenericTraversabl
*/
def iterate[A](start: A, len: Int)(f: A => A): CC[A] = {
val b = newBuilder[A]
- if (len > 0) {
- b.sizeHint(len)
- var acc = start
- var i = 1
+ b.sizeHint(len)
+ var acc = start
+ var i = 0
+ while (i < len) {
b += acc
-
- while (i < len) {
- acc = f(acc)
- i += 1
- b += acc
- }
+ acc = f(acc)
+ i += 1
}
b.result
}
diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala
index f40905428e..01ef597d24 100644
--- a/src/library/scala/collection/immutable/HashMap.scala
+++ b/src/library/scala/collection/immutable/HashMap.scala
@@ -14,10 +14,6 @@ package immutable
import generic._
import annotation.unchecked.uncheckedVariance
-
-import parallel.immutable.ParallelHashTrie
-
-
/** This class implements immutable maps using a hash trie.
*
* '''Note:''' the builder of a hash map returns specialized representations EmptyMap,Map1,..., Map4
@@ -36,7 +32,7 @@ import parallel.immutable.ParallelHashTrie
* @define willNotTerminateInf
*/
@serializable @SerialVersionUID(2L)
-class HashMap[A, +B] extends Map[A,B] with MapLike[A, B, HashMap[A, B]] with Parallelizable[ParallelHashTrie[A, B]] {
+class HashMap[A, +B] extends Map[A,B] with MapLike[A, B, HashMap[A, B]] {
override def size: Int = 0
@@ -75,20 +71,13 @@ class HashMap[A, +B] extends Map[A,B] with MapLike[A, B, HashMap[A, B]] with Par
protected def get0(key: A, hash: Int, level: Int): Option[B] = None
- def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1)): HashMap[A, B1] =
+ protected def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1)): HashMap[A, B1] =
new HashMap.HashMap1(key, hash, value, kv)
protected def removed0(key: A, hash: Int, level: Int): HashMap[A, B] = this
- protected def writeReplace(): AnyRef = new HashMap.SerializationProxy(this)
-
- def split: Seq[HashMap[A, B]] = Seq(this)
- def merge[B1 >: B](that: HashMap[A, B1]): HashMap[A, B1] = merge0(that, 0)
-
- protected def merge0[B1 >: B](that: HashMap[A, B1], level: Int): HashMap[A, B1] = that
-
- def par = ParallelHashTrie.fromTrie(this)
+ protected def writeReplace(): AnyRef = new HashMap.SerializationProxy(this)
}
@@ -110,60 +99,19 @@ object HashMap extends ImmutableMapFactory[HashMap] {
// TODO: add HashMap2, HashMap3, ...
- // statistics - will remove in future
- var bothsingle = 0
- var bothtries = 0
- var onetrie = 0
-
-
- class HashMap1[A,+B](private[HashMap] var key: A, private[HashMap] var hash: Int, private[HashMap] var value: (B @uncheckedVariance), private[HashMap] var kv: (A,B @uncheckedVariance)) extends HashMap[A,B] {
+ class HashMap1[A,+B](private var key: A, private[HashMap] var hash: Int, private var value: (B @uncheckedVariance), private var kv: (A,B @uncheckedVariance)) extends HashMap[A,B] {
override def size = 1
override def get0(key: A, hash: Int, level: Int): Option[B] =
if (hash == this.hash && key == this.key) Some(value) else None
- // override def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1)): HashMap[A, B1] =
- // if (hash == this.hash && key == this.key) new HashMap1(key, hash, value, kv)
- // else {
- // var thatindex = (hash >>> level) & 0x1f
- // var thisindex = (this.hash >>> level) & 0x1f
- // if (hash != this.hash) {
- // //new HashTrieMap[A,B1](level+5, this, new HashMap1(key, hash, value, kv))
- // val m = new HashTrieMap[A,B1](0,new Array[HashMap[A,B1]](0),0) // TODO: could save array alloc
- // m.updated0(this.key, this.hash, level, this.value, this.kv).updated0(key, hash, level, value, kv) // TODO and it will
- // } else {
- // // 32-bit hash collision (rare, but not impossible)
- // new HashMapCollision1(hash, ListMap.empty.updated(this.key,this.value).updated(key,value))
- // }
- // }
-
override def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1)): HashMap[A, B1] =
if (hash == this.hash && key == this.key) new HashMap1(key, hash, value, kv)
else {
- var thatindex = (hash >>> level) & 0x1f
- var thisindex = (this.hash >>> level) & 0x1f
if (hash != this.hash) {
- // they have different hashes, but may collide at this level - find a level at which they don't
- var lvl = level
- var top: HashTrieMap[A, B1] = null
- var prev: HashTrieMap[A, B1] = null
- while (thisindex == thatindex) {
- val newlevel = new HashTrieMap[A, B1](1 << thisindex, new Array[HashMap[A, B1]](1), 2)
- if (prev ne null) prev.elems(0) = newlevel else top = newlevel
- prev = newlevel
- lvl += 5
- thatindex = (hash >>> lvl) & 0x1f
- thisindex = (this.hash >>> lvl) & 0x1f
- }
- val bottelems = new Array[HashMap[A,B1]](2)
- val ind = if (thisindex < thatindex) 1 else 0
- bottelems(1 - ind) = this
- bottelems(ind) = new HashMap1[A, B1](key, hash, value, kv)
- val bottom = new HashTrieMap[A,B1]((1 << thisindex) | (1 << thatindex), bottelems, 2)
- if (prev ne null) {
- prev.elems(0) = bottom
- top
- } else bottom
+ //new HashTrieMap[A,B1](level+5, this, new HashMap1(key, hash, value, kv))
+ val m = new HashTrieMap[A,B1](0,new Array[HashMap[A,B1]](0),0) // TODO: could save array alloc
+ m.updated0(this.key, this.hash, level, this.value, this.kv).updated0(key, hash, level, value, kv)
} else {
// 32-bit hash collision (rare, but not impossible)
new HashMapCollision1(hash, ListMap.empty.updated(this.key,this.value).updated(key,value))
@@ -176,11 +124,6 @@ object HashMap extends ImmutableMapFactory[HashMap] {
override def iterator: Iterator[(A,B)] = Iterator(ensurePair)
override def foreach[U](f: ((A, B)) => U): Unit = f(ensurePair)
private[HashMap] def ensurePair: (A,B) = if (kv ne null) kv else { kv = (key, value); kv }
- protected override def merge0[B1 >: B](that: HashMap[A, B1], level: Int): HashMap[A, B1] = {
- // if (that.isInstanceOf[HashMap1[_, _]]) bothsingle += 1
- // else onetrie += 1
- that.updated0(key, hash, level, value, kv)
- }
}
private class HashMapCollision1[A,+B](private[HashMap] var hash: Int, var kvs: ListMap[A,B @uncheckedVariance]) extends HashMap[A,B] {
@@ -210,21 +153,11 @@ object HashMap extends ImmutableMapFactory[HashMap] {
override def iterator: Iterator[(A,B)] = kvs.iterator
override def foreach[U](f: ((A, B)) => U): Unit = kvs.foreach(f)
- override def split: Seq[HashMap[A, B]] = {
- val (x, y) = kvs.splitAt(kvs.size / 2)
- def newhm(lm: ListMap[A, B @uncheckedVariance]) = new HashMapCollision1(hash, lm)
- List(newhm(x), newhm(y))
- }
- protected override def merge0[B1 >: B](that: HashMap[A, B1], level: Int): HashMap[A, B1] = {
- // this can be made more efficient by passing the entire ListMap at once
- var m = that
- for (p <- kvs) m = m.updated0(p._1, this.hash, level, p._2, p)
- m
- }
}
- class HashTrieMap[A,+B](private[HashMap] var bitmap: Int, private[HashMap] var elems: Array[HashMap[A,B @uncheckedVariance]],
- private[HashMap] var size0: Int) extends HashMap[A,B] {
+
+ class HashTrieMap[A,+B](private var bitmap: Int, private var elems: Array[HashMap[A,B @uncheckedVariance]],
+ private var size0: Int) extends HashMap[A,B] {
/*
def this (level: Int, m1: HashMap1[A,B], m2: HashMap1[A,B]) = {
this(((m1.hash >>> level) & 0x1f) | ((m2.hash >>> level) & 0x1f), {
@@ -275,7 +208,8 @@ object HashMap extends ImmutableMapFactory[HashMap] {
Array.copy(elems, 0, elemsNew, 0, offset)
elemsNew(offset) = new HashMap1(key, hash, value, kv)
Array.copy(elems, offset, elemsNew, offset + 1, elems.length - offset)
- new HashTrieMap(bitmap | mask, elemsNew, size + 1)
+ val bitmapNew = bitmap | mask
+ new HashTrieMap(bitmapNew, elemsNew, size + 1)
}
}
@@ -412,146 +346,6 @@ time { mNew.iterator.foreach( p => ()) }
}
}
- private def printBitmap(bm: Int) {
- var i = 32
- var b = bm
- while (i != 0) {
- print((b & 1) + " ")
- b = b >>> 1
- i -= 1
- }
- println
- }
-
- private def posOf(n: Int, bm: Int) = {
- var left = n
- var i = -1
- var b = bm
- while (left >= 0) {
- i += 1
- if ((b & 1) != 0) left -= 1
- b = b >>> 1
- }
- i
- }
-
- override def split: Seq[HashMap[A, B]] = if (size == 1) Seq(this) else {
- val nodesize = Integer.bitCount(bitmap)
- if (nodesize > 1) {
- // printBitmap(bitmap)
- // println(elems.toList)
-
- // println("subtrees: " + nodesize)
- // println("will split at: " + (nodesize / 2))
- val splitpoint = nodesize / 2
- val bitsplitpoint = posOf(nodesize / 2, bitmap)
- val bm1 = bitmap & (-1 << bitsplitpoint)
- val bm2 = bitmap & (-1 >>> (32 - bitsplitpoint))
- // printBitmap(bm1)
- // printBitmap(bm2)
- val (e1, e2) = elems.splitAt(splitpoint)
- // println(e1.toList)
- // println(e2.toList)
- val hm1 = new HashTrieMap(bm1, e1, e1.foldLeft(0)(_ + _.size))
- val hm2 = new HashTrieMap(bm2, e2, e2.foldLeft(0)(_ + _.size))
-
- List(hm1, hm2)
- } else elems(0).split
- }
-
- protected override def merge0[B1 >: B](that: HashMap[A, B1], level: Int): HashMap[A, B1] = that match {
- case hm: HashMap1[_, _] =>
- // onetrie += 1
- this.updated0(hm.key, hm.hash, level, hm.value.asInstanceOf[B1], hm.kv)
- case hm: HashTrieMap[_, _] =>
- // bothtries += 1
- val that = hm.asInstanceOf[HashTrieMap[A, B1]]
- val thiselems = this.elems
- val thatelems = that.elems
- var thisbm = this.bitmap
- var thatbm = that.bitmap
-
- // determine the necessary size for the array
- val subcount = Integer.bitCount(thisbm | thatbm)
-
- // construct a new array of appropriate size
- val merged = new Array[HashMap[A, B1]](subcount)
-
- // run through both bitmaps and add elements to it
- var i = 0
- var thisi = 0
- var thati = 0
- var totalelems = 0
- while (i < subcount) {
- val thislsb = thisbm ^ (thisbm & (thisbm - 1))
- val thatlsb = thatbm ^ (thatbm & (thatbm - 1))
- // if (this.bitmap == -1660585213) { TODO remove
- // printBitmap(thislsb)
- // printBitmap(thatlsb)
- // println("------------------")
- // }
- if (thislsb == thatlsb) {
- // println("a collision")
- val m = thiselems(thisi).merge0(thatelems(thati), level + 5)
- totalelems += m.size
- merged(i) = m
- thisbm = thisbm & ~thislsb
- thatbm = thatbm & ~thatlsb
- thati += 1
- thisi += 1
- } else {
- // condition below is due to 2 things:
- // 1) no unsigned int compare on JVM
- // 2) 0 (no lsb) should always be greater in comparison
- // also, search for unsigned compare Scala to find Dave's solution
- // and compare a and b defined as below:
- val a = thislsb - 1
- val b = thatlsb - 1
- // ! our case indeed is more specific, but this didn't help:
- // if ((thislsb > 0 && thislsb < thatlsb) || thatlsb == 0 || (thatlsb < 0 && thislsb != 0)) {
- if ((a < b) ^ (a < 0) ^ (b < 0)) {
- // println("an element from this trie")
- val m = thiselems(thisi)
- totalelems += m.size
- merged(i) = m
- thisbm = thisbm & ~thislsb
- thisi += 1
- } else {
- // println("an element from that trie")
- val m = thatelems(thati)
- totalelems += m.size
- merged(i) = m
- thatbm = thatbm & ~thatlsb
- thati += 1
- }
- }
- i += 1
- }
-
- new HashTrieMap[A, B1](this.bitmap | that.bitmap, merged, totalelems)
- case hm: HashMapCollision1[_, _] => that.merge0(this, level)
- case _ => error("section supposed to be unreachable.")
- }
-
- }
-
- private def check[K](x: HashMap[K, _], y: HashMap[K, _], xy: HashMap[K, _]) = { // TODO remove this debugging helper
- var xs = Set[K]()
- for (elem <- x) xs += elem._1
- var ys = Set[K]()
- for (elem <- y) ys += elem._1
- var union = Set[K]()
- for (elem <- xy) union += elem._1
- if ((xs ++ ys) != union) {
- println("Error.")
- println(x.getClass)
- println(y.getClass)
- println(xs)
- println(ys)
- println(xs ++ ys)
- println(union)
- false
- } else true
}
@serializable @SerialVersionUID(2L) private class SerializationProxy[A,B](@transient private var orig: HashMap[A, B]) {
diff --git a/src/library/scala/collection/immutable/IntMap.scala b/src/library/scala/collection/immutable/IntMap.scala
index d4605d3e1f..ba5cd896ac 100644
--- a/src/library/scala/collection/immutable/IntMap.scala
+++ b/src/library/scala/collection/immutable/IntMap.scala
@@ -11,14 +11,6 @@
package scala.collection
package immutable;
-
-
-import scala.collection.generic.CanBuildFrom
-import scala.collection.mutable.Builder
-import scala.collection.mutable.MapBuilder
-
-
-
/** Utility class for integer maps.
* @author David MacIver
*/
@@ -61,12 +53,6 @@ import IntMapUtils._
* @since 2.7
*/
object IntMap {
- /** $mapCanBuildFromInfo */
- implicit def canBuildFrom[A, B] = new CanBuildFrom[IntMap[A], (Int, B), IntMap[B]] {
- def apply(from: IntMap[A]): Builder[(Int, B), IntMap[B]] = apply()
- def apply(): Builder[(Int, B), IntMap[B]] = new MapBuilder[Int, B, IntMap[B]](empty[B])
- }
-
def empty[T] : IntMap[T] = IntMap.Nil;
def singleton[T](key : Int, value : T) : IntMap[T] = IntMap.Tip(key, value);
def apply[T](elems : (Int, T)*) : IntMap[T] =
@@ -161,7 +147,7 @@ import IntMap._
/** Specialised immutable map structure for integer keys, based on
* <a href="http://citeseer.ist.psu.edu/okasaki98fast.html">Fast Mergeable Integer Maps</a>
- * by Okasaki and Gill. Essentially a trie based on binary digits of the integers.
+ * by Okasaki and Gill. Essentially a trie based on binary digits of the the integers.
*
* Note: This class is as of 2.8 largely superseded by HashMap.
*
diff --git a/src/library/scala/collection/immutable/LongMap.scala b/src/library/scala/collection/immutable/LongMap.scala
index dcdc6e948f..691a81d9f0 100644
--- a/src/library/scala/collection/immutable/LongMap.scala
+++ b/src/library/scala/collection/immutable/LongMap.scala
@@ -1,23 +1,6 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
package scala.collection
package immutable
-
-import scala.collection.generic.CanBuildFrom
-import scala.collection.mutable.Builder
-import scala.collection.mutable.MapBuilder
-
-
-
/** Utility class for long maps.
* @author David MacIver
*/
@@ -61,12 +44,6 @@ import LongMapUtils._
* @since 2.7
*/
object LongMap{
- /** $mapCanBuildFromInfo */
- implicit def canBuildFrom[A, B] = new CanBuildFrom[LongMap[A], (Long, B), LongMap[B]] {
- def apply(from: LongMap[A]): Builder[(Long, B), LongMap[B]] = apply()
- def apply(): Builder[(Long, B), LongMap[B]] = new MapBuilder[Long, B, LongMap[B]](empty[B])
- }
-
def empty[T] : LongMap[T] = LongMap.Nil;
def singleton[T](key : Long, value : T) : LongMap[T] = LongMap.Tip(key, value);
def apply[T](elems : (Long, T)*) : LongMap[T] =
@@ -159,7 +136,7 @@ import LongMap._;
/**
* Specialised immutable map structure for long keys, based on
* <a href="http://citeseer.ist.psu.edu/okasaki98fast.html">Fast Mergeable Long Maps</a>
- * by Okasaki and Gill. Essentially a trie based on binary digits of the integers.
+ * by Okasaki and Gill. Essentially a trie based on binary digits of the the integers.
*
* Note: This class is as of 2.8 largely superseded by HashMap.
*
diff --git a/src/library/scala/collection/immutable/NumericRange.scala b/src/library/scala/collection/immutable/NumericRange.scala
index b8bd5bd20e..db44e9ffa0 100644
--- a/src/library/scala/collection/immutable/NumericRange.scala
+++ b/src/library/scala/collection/immutable/NumericRange.scala
@@ -40,8 +40,8 @@ import generic._
abstract class NumericRange[T]
(val start: T, val end: T, val step: T, val isInclusive: Boolean)
(implicit num: Integral[T])
-extends IndexedSeq[T] {
-
+extends IndexedSeq[T]
+{
/** Note that NumericRange must be invariant so that constructs
* such as
*
@@ -122,6 +122,18 @@ extends IndexedSeq[T] {
else start + (fromInt(idx) * step)
}
+ // a well-typed contains method.
+ def containsTyped(x: T): Boolean = {
+ def divides(d: T, by: T) = equiv(d % by, zero)
+
+ limitTest(x) || (
+ if (step > zero)
+ (start <= x) && (x < end) && divides(x - start, step)
+ else
+ (start >= x) && (x > end) && divides(start - x, step)
+ )
+ }
+
// Motivated by the desire for Double ranges with BigDecimal precision,
// we need some way to map a Range and get another Range. This can't be
// done in any fully general way because Ranges are not arbitrary
@@ -153,7 +165,7 @@ extends IndexedSeq[T] {
if (isInclusive) NumericRange.inclusive(start, end, step)
else NumericRange(start, end, step)
- private lazy val underlyingRange: NumericRange[T] = self
+ private val underlyingRange: NumericRange[T] = self
override def foreach[U](f: A => U) { underlyingRange foreach (x => f(fm(x))) }
override def isEmpty = underlyingRange.isEmpty
override def apply(idx: Int): A = fm(underlyingRange(idx))
@@ -161,21 +173,20 @@ extends IndexedSeq[T] {
}
}
- // a well-typed contains method.
- def containsTyped(x: T): Boolean = {
- def divides(d: T, by: T) = equiv(d % by, zero)
-
- limitTest(x) || (
- if (step > zero)
- (start <= x) && (x < end) && divides(x - start, step)
- else
- (start >= x) && (x > end) && divides(start - x, step)
- )
- }
-
+ // The contains situation makes for some interesting code.
+ // I am not aware of any way to avoid a cast somewhere, because
+ // contains must take an Any.
override def contains(x: Any): Boolean =
- try containsTyped(x.asInstanceOf[T])
- catch { case _: ClassCastException => false }
+ try {
+ // if we don't verify that x == typedX, then a range
+ // of e.g. Longs will appear to contain an Int because
+ // the cast will perform the conversion. (As of this writing
+ // it is anticipated that in scala 2.8, 5L != 5 although
+ // this is not yet implemented.)
+ val typedX = x.asInstanceOf[T]
+ containsTyped(typedX) && (x == typedX)
+ }
+ catch { case _: ClassCastException => super.contains(x) }
override lazy val hashCode = super.hashCode()
override def equals(other: Any) = other match {
diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala
index 5475c59809..7e363a7e96 100644
--- a/src/library/scala/collection/immutable/Stream.scala
+++ b/src/library/scala/collection/immutable/Stream.scala
@@ -15,8 +15,6 @@ import generic._
import mutable.{Builder, StringBuilder, LazyBuilder, ListBuffer}
import scala.annotation.tailrec
-
-
/** The class `Stream` implements lazy lists where elements
* are only evaluated when they are needed. Here is an example:
*
@@ -203,14 +201,11 @@ self =>
* @param p the predicate used to filter the stream.
* @return the elements of this stream satisfying <code>p</code>.
*/
- override def filter(p: A => Boolean): Stream[A] = {
+ override final def filter(p: A => Boolean): Stream[A] = {
// optimization: drop leading prefix of elems for which f returns false
- // var rest = this dropWhile (!p(_)) - forget DRY principle - GC can't collect otherwise
- var rest = this
- while (!rest.isEmpty && !p(rest.head)) rest = rest.tail
- // private utility func to avoid `this` on stack (would be needed for the lazy arg)
- if (rest.nonEmpty) Stream.filteredTail(rest, p)
- else Stream.Empty
+ var rest = this dropWhile (!p(_))
+ if (rest.isEmpty) Stream.Empty
+ else new Stream.Cons(rest.head, rest.tail filter p)
}
override final def withFilter(p: A => Boolean): StreamWithFilter = new StreamWithFilter(p)
@@ -218,7 +213,6 @@ self =>
/** A lazier implementation of WithFilter than TraversableLike's.
*/
final class StreamWithFilter(p: A => Boolean) extends WithFilter(p) {
-
override def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Stream[A], B, That]): That = {
def tailMap = asStream[B](tail withFilter p map f)
asThat[That](
@@ -350,8 +344,6 @@ self =>
if (n <= 0 || isEmpty) Stream.Empty
else new Stream.Cons(head, if (n == 1) Stream.empty else tail take (n-1))
- override def splitAt(n: Int): (Stream[A], Stream[A]) = (take(n), drop(n))
-
/** A substream starting at index `from`
* and extending up to (but not including) index `until`.
*
@@ -460,17 +452,9 @@ self =>
flatten1(asTraversable(head))
}
- override def view = new StreamView[A, Stream[A]] {
- protected lazy val underlying = self.repr
- override def iterator = self.iterator
- override def length = self.length
- override def apply(idx: Int) = self.apply(idx)
- }
-
/** Defines the prefix of this object's <code>toString</code> representation as ``Stream''.
*/
override def stringPrefix = "Stream"
-
}
/**
@@ -617,8 +601,8 @@ object Stream extends SeqFactory[Stream] {
if (n <= 0) Empty else new Cons(elem, fill(n-1)(elem))
override def tabulate[A](n: Int)(f: Int => A): Stream[A] = {
- def loop(i: Int): Stream[A] =
- if (i >= n) Empty else new Cons(f(i), loop(i+1))
+ def loop(i: Int) =
+ if (i >= n) Empty else new Cons(f(i), tabulate(i+1)(f))
loop(0)
}
@@ -626,10 +610,6 @@ object Stream extends SeqFactory[Stream] {
if (if (step < 0) start <= end else end <= start) Empty
else new Cons(start, range(start + step, end, step))
- private[immutable] def filteredTail[A](stream: Stream[A], p: A => Boolean) = {
- new Stream.Cons(stream.head, stream.tail filter p)
- }
-
/** A stream containing all elements of a given iterator, in the order they are produced.
* @param it The iterator producing the stream's elements
*/
diff --git a/src/library/scala/collection/immutable/StreamView.scala b/src/library/scala/collection/immutable/StreamView.scala
deleted file mode 100644
index 9a7da3be89..0000000000
--- a/src/library/scala/collection/immutable/StreamView.scala
+++ /dev/null
@@ -1,12 +0,0 @@
-package scala.collection
-package immutable
-
-
-
-import scala.collection.generic.CanBuildFrom
-
-
-
-
-
-trait StreamView[+A, +Coll] extends StreamViewLike[A, Coll, StreamView[A, Coll]]
diff --git a/src/library/scala/collection/immutable/StreamViewLike.scala b/src/library/scala/collection/immutable/StreamViewLike.scala
deleted file mode 100644
index 0b76559a0b..0000000000
--- a/src/library/scala/collection/immutable/StreamViewLike.scala
+++ /dev/null
@@ -1,76 +0,0 @@
-package scala.collection
-package immutable
-
-
-
-import scala.collection.generic.CanBuildFrom
-
-
-
-
-
-trait StreamViewLike[+A,
- +Coll,
- +This <: StreamView[A, Coll] with StreamViewLike[A, Coll, This]]
-extends SeqView[A, Coll]
- with SeqViewLike[A, Coll, This]
-{ self =>
-
- override def force[B >: A, That](implicit bf: CanBuildFrom[Coll, B, That]) = {
- this.iterator.toStream.asInstanceOf[That]
- }
-
- trait Transformed[+B] extends StreamView[B, Coll] with super.Transformed[B]
-
- trait Forced[B] extends Transformed[B] with super.Forced[B]
-
- trait Sliced extends Transformed[A] with super.Sliced
-
- trait Mapped[B] extends Transformed[B] with super.Mapped[B]
-
- trait FlatMapped[B] extends Transformed[B] with super.FlatMapped[B]
-
- trait Appended[B >: A] extends Transformed[B] with super.Appended[B]
-
- trait Filtered extends Transformed[A] with super.Filtered
-
- trait TakenWhile extends Transformed[A] with super.TakenWhile
-
- trait DroppedWhile extends Transformed[A] with super.DroppedWhile
-
- trait Zipped[B] extends Transformed[(A, B)] with super.Zipped[B]
-
- trait ZippedAll[A1 >: A, B] extends Transformed[(A1, B)] with super.ZippedAll[A1, B]
-
- trait Reversed extends Transformed[A] with super.Reversed
-
- trait Patched[B >: A] extends Transformed[B] with super.Patched[B]
-
- trait Prepended[B >: A] extends Transformed[B] with super.Prepended[B]
-
- /** boilerplate */
- protected override def newForced[B](xs: => collection.Seq[B]): Transformed[B] = new Forced[B] { val forced = xs }
- protected override def newAppended[B >: A](that: collection.Traversable[B]): Transformed[B] = new Appended[B] { val rest = that }
- protected override def newMapped[B](f: A => B): Transformed[B] = new Mapped[B] { val mapping = f }
- protected override def newFlatMapped[B](f: A => collection.Traversable[B]): Transformed[B] = new FlatMapped[B] { val mapping = f }
- protected override def newFiltered(p: A => Boolean): Transformed[A] = new Filtered { val pred = p }
- protected override def newSliced(_from: Int, _until: Int): Transformed[A] = new Sliced { val from = _from; val until = _until }
- protected override def newDroppedWhile(p: A => Boolean): Transformed[A] = new DroppedWhile { val pred = p }
- protected override def newTakenWhile(p: A => Boolean): Transformed[A] = new TakenWhile { val pred = p }
- protected override def newZipped[B](that: collection.Iterable[B]): Transformed[(A, B)] = new Zipped[B] { val other = that }
- protected override def newZippedAll[A1 >: A, B](that: collection.Iterable[B], _thisElem: A1, _thatElem: B): Transformed[(A1, B)] = {
- new ZippedAll[A1, B] { val other = that; val thisElem = _thisElem; val thatElem = _thatElem }
- }
- protected override def newReversed: Transformed[A] = new Reversed { }
- protected override def newPatched[B >: A](_from: Int, _patch: collection.Seq[B], _replaced: Int): Transformed[B] = {
- new Patched[B] { val from = _from; val patch = _patch; val replaced = _replaced }
- }
- protected override def newPrepended[B >: A](elem: B): Transformed[B] = new Prepended[B] { protected[this] val fst = elem }
-
-}
-
-
-
-
-
-
diff --git a/src/library/scala/collection/immutable/StringOps.scala b/src/library/scala/collection/immutable/StringOps.scala
index 0d8f5f6b83..8a27a4ad4b 100644
--- a/src/library/scala/collection/immutable/StringOps.scala
+++ b/src/library/scala/collection/immutable/StringOps.scala
@@ -36,16 +36,8 @@ final class StringOps(override val repr: String) extends StringLike[String] {
/** Creates a string builder buffer as builder for this class */
override protected[this] def newBuilder = new StringBuilder
- override def slice(from: Int, until: Int): String = {
- /** Slice must be forgiving on all out of bounds indices and
- * substring is not.
- */
- val start = from max 0
- val end = until min repr.length
-
- if (start >= end) ""
- else repr.substring(start, end)
- }
+ override def slice(from: Int, until: Int): String =
+ repr.substring(from max 0, until min repr.length)
override def toString = repr
}
diff --git a/src/library/scala/collection/immutable/package.scala b/src/library/scala/collection/immutable/package.scala
deleted file mode 100644
index 5ff9fa223d..0000000000
--- a/src/library/scala/collection/immutable/package.scala
+++ /dev/null
@@ -1,81 +0,0 @@
-package scala.collection
-
-
-
-
-
-
-
-
-
-package object immutable {
-
- trait RangeUtils[+Repr <: RangeUtils[Repr]] {
-
- def start: Int
- def end: Int
- def step: Int
- def inclusive: Boolean
- def create(_start: Int, _end: Int, _step: Int, _inclusive: Boolean): Repr
-
- private final def inclusiveLast: Int = {
- val size = end.toLong - start.toLong
- (size / step.toLong * step.toLong + start.toLong).toInt
- }
-
- final def _last: Int = if (!inclusive) {
- if (step == 1 || step == -1) end - step
- else {
- val inclast = inclusiveLast
- if ((end.toLong - start.toLong) % step == 0) inclast - step else inclast
- }
- } else {
- if (step == 1 || step == -1) end
- else inclusiveLast
- }
-
- final def _foreach[U](f: Int => U) = if (_length > 0) {
- var i = start
- val last = _last
- while (i != last) {
- f(i)
- i += step
- }
- }
-
- final def _length: Int = if (!inclusive) {
- if (end > start == step > 0 && start != end) {
- (_last.toLong - start.toLong) / step.toLong + 1
- } else 0
- }.toInt else {
- if (end > start == step > 0 || start == end) {
- (_last.toLong - start.toLong) / step.toLong + 1
- } else 0
- }.toInt
-
- final def _apply(idx: Int): Int = {
- if (idx < 0 || idx >= _length) throw new IndexOutOfBoundsException(idx.toString)
- start + idx * step
- }
-
- private def locationAfterN(n: Int) = if (n > 0) {
- if (step > 0) ((start.toLong + step.toLong * n.toLong) min _last.toLong).toInt
- else ((start.toLong + step.toLong * n.toLong) max _last.toLong).toInt
- } else start
-
- final def _take(n: Int) = if (n > 0 && _length > 0) {
- create(start, locationAfterN(n), step, true)
- } else create(start, start, step, false)
-
- final def _drop(n: Int) = create(locationAfterN(n), end, step, inclusive)
-
- final def _slice(from: Int, until: Int) = _drop(from)._take(until - from)
-
- }
-
-}
-
-
-
-
-
diff --git a/src/library/scala/collection/mutable/ArrayBuffer.scala b/src/library/scala/collection/mutable/ArrayBuffer.scala
index a59a0db2e1..6412a21531 100644
--- a/src/library/scala/collection/mutable/ArrayBuffer.scala
+++ b/src/library/scala/collection/mutable/ArrayBuffer.scala
@@ -12,7 +12,6 @@ package scala.collection
package mutable
import generic._
-import parallel.mutable.ParallelArray
/** An implementation of the `Buffer` class using an array to
* represent the assembled sequence internally. Append, update and random
@@ -47,8 +46,7 @@ class ArrayBuffer[A](override protected val initialSize: Int)
with BufferLike[A, ArrayBuffer[A]]
with IndexedSeqOptimized[A, ArrayBuffer[A]]
with Builder[A, ArrayBuffer[A]]
- with ResizableArray[A]
- with Parallelizable[ParallelArray[A]] {
+ with ResizableArray[A] {
override def companion: GenericCompanion[ArrayBuffer] = ArrayBuffer
@@ -66,8 +64,6 @@ class ArrayBuffer[A](override protected val initialSize: Int)
}
}
- def par = ParallelArray.handoff[A](array.asInstanceOf[Array[A]], size)
-
/** Appends a single element to this buffer and returns
* the identity of the buffer. It takes constant amortized time.
*
diff --git a/src/library/scala/collection/mutable/ArrayOps.scala b/src/library/scala/collection/mutable/ArrayOps.scala
index 3cf6a642d2..d7072c0661 100644
--- a/src/library/scala/collection/mutable/ArrayOps.scala
+++ b/src/library/scala/collection/mutable/ArrayOps.scala
@@ -10,13 +10,9 @@
package scala.collection
package mutable
-import compat.Platform.arraycopy
import scala.reflect.ClassManifest
-import parallel.mutable.ParallelArray
-
-
/** This class serves as a wrapper for `Array`s with all the operations found in
* indexed sequences. Where needed, instances of arrays are implicitly converted
* into this class.
@@ -35,28 +31,19 @@ import parallel.mutable.ParallelArray
* @define mayNotTerminateInf
* @define willNotTerminateInf
*/
-abstract class ArrayOps[T] extends ArrayLike[T, Array[T]] with Parallelizable[ParallelArray[T]] {
+abstract class ArrayOps[T] extends ArrayLike[T, Array[T]] {
private def rowBuilder[U]: Builder[U, Array[U]] =
Array.newBuilder(
ClassManifest.fromClass(
repr.getClass.getComponentType.getComponentType.asInstanceOf[Predef.Class[U]]))
- override def copyToArray[U >: T](xs: Array[U], start: Int, len: Int) {
- var l = len
- if (repr.length < l) l = repr.length
- if (xs.length - start < l) l = xs.length - start max 0
- Array.copy(repr, 0, xs, start, l)
- }
-
override def toArray[U >: T : ClassManifest]: Array[U] =
if (implicitly[ClassManifest[U]].erasure eq repr.getClass.getComponentType)
repr.asInstanceOf[Array[U]]
else
super.toArray[U]
- def par = ParallelArray.handoff(repr)
-
/** Flattens a two-dimensional array by concatenating all its rows
* into a single array.
*
diff --git a/src/library/scala/collection/mutable/BufferLike.scala b/src/library/scala/collection/mutable/BufferLike.scala
index dd4b5f303f..80a8824a3b 100644
--- a/src/library/scala/collection/mutable/BufferLike.scala
+++ b/src/library/scala/collection/mutable/BufferLike.scala
@@ -64,13 +64,16 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
with Shrinkable[A]
with Scriptable[A]
with Subtractable[A, This]
+ with Cloneable[This]
with SeqLike[A, This]
{ self : This =>
// Note this does not extend Addable because `+` is being phased out of
// all Seq-derived classes.
- // Abstract methods from Seq:
+ import scala.collection.{Iterable, Traversable}
+
+ // Abstract methods from IndexedSeq:
def apply(n: Int): A
def update(n: Int, newelem: A)
@@ -96,7 +99,7 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
* @throws IndexOutofBoundsException if the index `n` is not in the valid range
* `0 <= n <= length`.
*/
- def insertAll(n: Int, elems: collection.Traversable[A])
+ def insertAll(n: Int, elems: Traversable[A])
/** Removes the element at a given index from this buffer.
*
diff --git a/src/library/scala/collection/mutable/BufferProxy.scala b/src/library/scala/collection/mutable/BufferProxy.scala
index 5d2e7fd86d..7adbb8ee3f 100644
--- a/src/library/scala/collection/mutable/BufferProxy.scala
+++ b/src/library/scala/collection/mutable/BufferProxy.scala
@@ -125,20 +125,16 @@ trait BufferProxy[A] extends Buffer[A] with Proxy {
* @param n the index where a new element will be inserted.
* @param iter the iterable object providing all elements to insert.
*/
- def insertAll(n: Int, iter: scala.collection.Iterable[A]) {
- self.insertAll(n, iter)
- }
+ def insertAll(n: Int, iter: scala.collection.Iterable[A]): Unit = self.insertAll(n, iter)
- override def insertAll(n: Int, iter: scala.collection.Traversable[A]) {
- self.insertAll(n, iter)
- }
+ override def insertAll(n: Int, iter: scala.collection.Traversable[A]): Unit = self.insertAll(n, iter)
/** Replace element at index `n` with the new element `newelem`.
*
* @param n the index of the element to replace.
* @param newelem the new element.
*/
- def update(n: Int, newelem: A) { self.update(n, newelem) }
+ def update(n: Int, newelem: A): Unit = self.update(n, newelem)
/** Removes the element on a given index position.
*
diff --git a/src/library/scala/collection/mutable/PriorityQueue.scala b/src/library/scala/collection/mutable/PriorityQueue.scala
index b1ca5fde3c..acdfc03597 100644
--- a/src/library/scala/collection/mutable/PriorityQueue.scala
+++ b/src/library/scala/collection/mutable/PriorityQueue.scala
@@ -37,6 +37,7 @@ class PriorityQueue[A](implicit ord: Ordering[A])
extends Seq[A]
with SeqLike[A, PriorityQueue[A]]
with Growable[A]
+ with Cloneable[PriorityQueue[A]]
with Builder[A, PriorityQueue[A]]
{
import ord._
diff --git a/src/library/scala/collection/mutable/Seq.scala b/src/library/scala/collection/mutable/Seq.scala
index c318dd34cf..eff387353e 100644
--- a/src/library/scala/collection/mutable/Seq.scala
+++ b/src/library/scala/collection/mutable/Seq.scala
@@ -29,6 +29,14 @@ trait Seq[A] extends Iterable[A]
with GenericTraversableTemplate[A, Seq]
with SeqLike[A, Seq[A]] {
override def companion: GenericCompanion[Seq] = Seq
+
+ /** Replaces element at given index with a new value.
+ *
+ * @param n the index of the element to replace.
+ * @param lem the new value.
+ * @throws IndexOutofBoundsException if the index is not valid.
+ */
+ def update(idx: Int, elem: A)
}
/** $factoryInfo
diff --git a/src/library/scala/collection/mutable/SeqLike.scala b/src/library/scala/collection/mutable/SeqLike.scala
deleted file mode 100644
index e16aa37fe2..0000000000
--- a/src/library/scala/collection/mutable/SeqLike.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.collection
-package mutable
-
-import generic._
-
-/** A template trait for mutable sequences of type `mutable.Seq[A]`.
- * @tparam A the type of the elements of the set
- * @tparam This the type of the set itself.
- *
- */
-trait SeqLike[A, +This <: SeqLike[A, This] with Seq[A]]
- extends scala.collection.SeqLike[A, This]
- with Cloneable[This] {
- self =>
-
- /** Replaces element at given index with a new value.
- *
- * @param n the index of the element to replace.
- * @param lem the new value.
- * @throws IndexOutofBoundsException if the index is not valid.
- */
- def update(idx: Int, elem: A)
-}
diff --git a/src/library/scala/collection/mutable/Stack.scala b/src/library/scala/collection/mutable/Stack.scala
index c791066398..e678f7fa5c 100644
--- a/src/library/scala/collection/mutable/Stack.scala
+++ b/src/library/scala/collection/mutable/Stack.scala
@@ -62,8 +62,7 @@ class Stack[A] private (var elems: List[A]) extends scala.collection.Seq[A] with
* @param elems the element sequence.
* @return the stack with the new elements on top.
*/
- def push(elem1: A, elem2: A, elems: A*): this.type =
- this.push(elem1).push(elem2).pushAll(elems)
+ def push(elem1: A, elem2: A, elems: A*): this.type = this.push(elem1).push(elem2).pushAll(elems)
/** Push all elements in the given traversable object onto
* the stack. The last element in the traversable object
@@ -135,5 +134,5 @@ class Stack[A] private (var elems: List[A]) extends scala.collection.Seq[A] with
// !!! TODO - integrate
object Stack {
- def apply[A](xs: A*): Stack[A] = new Stack[A] pushAll xs
+ def apply[A](xs: A*): Stack[A] = new Stack[A] ++= xs
}
diff --git a/src/library/scala/collection/parallel/Combiner.scala b/src/library/scala/collection/parallel/Combiner.scala
deleted file mode 100644
index a37f642d42..0000000000
--- a/src/library/scala/collection/parallel/Combiner.scala
+++ /dev/null
@@ -1,66 +0,0 @@
-package scala.collection.parallel
-
-
-import scala.collection.Parallel
-import scala.collection.mutable.Builder
-import scala.collection.generic.Sizing
-
-
-
-/** The base trait for all combiners.
- * A combiner lets one construct collections incrementally just like
- * a regular builder, but also implements an efficient merge operation of two builders
- * via `combine` method. Once the collection is constructed, it may be obtained by invoking
- * the `result` method.
- *
- * @tparam Elem the type of the elements added to the builder
- * @tparam To the type of the collection the builder produces
- *
- * @author prokopec
- */
-trait Combiner[-Elem, +To] extends Builder[Elem, To] with Sizing with Parallel with TaskSupport {
- self: EnvironmentPassingCombiner[Elem, To] =>
-
- type EPC = EnvironmentPassingCombiner[Elem, To]
-
- /** Combines the contents of the receiver builder and the `other` builder,
- * producing a new builder containing both their elements.
- *
- * This method may combine the two builders by copying them into a larger collection,
- * by producing a lazy view that gets evaluated once `result` is invoked, or use
- * a merge operation specific to the data structure in question.
- *
- * Note that both the receiver builder and `other` builder become invalidated
- * after the invocation of this method, and should be cleared (see `clear`)
- * if they are to be used again.
- *
- * Also, combining two combiners `c1` and `c2` for which `c1 eq c2` is `true`, that is,
- * they are the same objects in memories, always does nothing and returns the first combiner.
- *
- * @tparam N the type of elements contained by the `other` builder
- * @tparam NewTo the type of collection produced by the `other` builder
- * @param other the other builder
- * @return the parallel builder containing both the elements of this and the `other` builder
- */
- def combine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]): Combiner[N, NewTo]
-
-}
-
-
-trait EnvironmentPassingCombiner[-Elem, +To] extends Combiner[Elem, To] {
- abstract override def result = {
- val res = super.result
-// res.environment = environment
- res
- }
-}
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/ParallelIterable.scala b/src/library/scala/collection/parallel/ParallelIterable.scala
deleted file mode 100644
index 4882dc19ee..0000000000
--- a/src/library/scala/collection/parallel/ParallelIterable.scala
+++ /dev/null
@@ -1,49 +0,0 @@
-package scala.collection.parallel
-
-
-import scala.collection.generic._
-import scala.collection.parallel.mutable.ParallelArrayCombiner
-import scala.collection.parallel.mutable.ParallelArray
-
-
-/** A template trait for parallel iterable collections.
- *
- * $paralleliterableinfo
- *
- * $sideeffects
- *
- * @tparam T the element type of the collection
- *
- * @author prokopec
- * @since 2.8
- */
-trait ParallelIterable[+T] extends Iterable[T]
- with GenericParallelTemplate[T, ParallelIterable]
- with ParallelIterableLike[T, ParallelIterable[T], Iterable[T]] {
- override def companion: GenericCompanion[ParallelIterable] with GenericParallelCompanion[ParallelIterable] = ParallelIterable
-}
-
-/** $factoryinfo
- */
-object ParallelIterable extends ParallelFactory[ParallelIterable] {
- implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParallelIterable[T]] =
- new GenericCanCombineFrom[T]
-
- def newBuilder[T]: Combiner[T, ParallelIterable[T]] = ParallelArrayCombiner[T]
-
- def newCombiner[T]: Combiner[T, ParallelIterable[T]] = ParallelArrayCombiner[T]
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/ParallelIterableLike.scala b/src/library/scala/collection/parallel/ParallelIterableLike.scala
deleted file mode 100644
index 7ac2713b55..0000000000
--- a/src/library/scala/collection/parallel/ParallelIterableLike.scala
+++ /dev/null
@@ -1,940 +0,0 @@
-package scala.collection.parallel
-
-
-
-
-import scala.collection.mutable.Builder
-import scala.collection.mutable.ListBuffer
-import scala.collection.IterableLike
-import scala.collection.Parallel
-import scala.collection.Parallelizable
-import scala.collection.Sequentializable
-import scala.collection.generic._
-
-
-
-
-// TODO update docs!!
-/** A template trait for parallel collections of type `ParallelIterable[T]`.
- *
- * $paralleliterableinfo
- *
- * $sideeffects
- *
- * @tparam T the element type of the collection
- * @tparam Repr the type of the actual collection containing the elements
- *
- * @define paralleliterableinfo
- * This is a base trait for Scala parallel collections. It defines behaviour
- * common to all parallel collections. The actual parallel operation implementation
- * is found in the `ParallelIterableFJImpl` trait extending this trait. Concrete
- * parallel collections should inherit both this and that trait.
- *
- * Parallel operations are implemented with divide and conquer style algorithms that
- * parallelize well. The basic idea is to split the collection into smaller parts until
- * they are small enough to be operated on sequentially.
- *
- * All of the parallel operations are implemented in terms of several methods. The first is:
- * {{{
- * def split: Seq[Repr]
- * }}}
- * which splits the collection into a sequence of disjunct views. This is typically a
- * very fast operation which simply creates wrappers around the receiver collection.
- * These views can then be split recursively into smaller views and so on. Each of
- * the views is still a parallel collection.
- *
- * The next method is:
- * {{{
- * def combine[OtherRepr >: Repr](other: OtherRepr): OtherRepr
- * }}}
- * which combines this collection with the argument collection and returns a collection
- * containing both the elements of this collection and the argument collection. This behaviour
- * may be implemented by producing a view that iterates over both collections, by aggressively
- * copying all the elements into the new collection or by lazily creating a wrapper over both
- * collections that gets evaluated once it's needed. It is recommended to avoid copying all of
- * the elements for performance reasons, although that cost might be negligible depending on
- * the use case.
- *
- * Methods:
- * {{{
- * def seq: Repr
- * }}}
- * and
- * {{{
- * def par: Repr
- * }}}
- * produce a view of the collection that has sequential or parallel operations, respectively.
- *
- * The method:
- * {{{
- * def threshold(sz: Int, p: Int): Int
- * }}}
- * provides an estimate on the minimum number of elements the collection has before
- * the splitting stops and depends on the number of elements in the collection. A rule of the
- * thumb is the number of elements divided by 8 times the parallelism level. This method may
- * be overridden in concrete implementations if necessary.
- *
- * Finally, method `newParallelBuilder` produces a new parallel builder.
- *
- * Since this trait extends the `Iterable` trait, methods like `size` and `iterator` must also
- * be implemented.
- *
- * Each parallel collection is bound to a specific fork/join pool, on which dormant worker
- * threads are kept. One can change a fork/join pool of a collection any time except during
- * some method being invoked. The fork/join pool contains other information such as the parallelism
- * level, that is, the number of processors used. When a collection is created, it is assigned the
- * default fork/join pool found in the `scala.collection.parallel` package object.
- *
- * Parallel collections may or may not be strict, and they are not ordered in terms of the `foreach`
- * operation (see `Traversable`). In terms of the iterator of the collection, some collections
- * are ordered (for instance, parallel sequences).
- *
- * @author prokopec
- * @since 2.8
- *
- * @define sideeffects
- * The higher-order functions passed to certain operations may contain side-effects. Since implementations
- * of operations may not be sequential, this means that side-effects may not be predictable and may
- * produce data-races, deadlocks or invalidation of state if care is not taken. It is up to the programmer
- * to either avoid using side-effects or to use some form of synchronization when accessing mutable data.
- *
- * @define undefinedorder
- * The order in which the operations on elements are performed is unspecified and may be nondeterministic.
- *
- * @define pbfinfo
- * An implicit value of class `CanCombineFrom` which determines the
- * result class `That` from the current representation type `Repr` and
- * and the new element type `B`. This builder factory can provide a parallel
- * builder for the resulting collection.
- *
- * @define abortsignalling
- * This method will provide sequential views it produces with `abort` signalling capabilities. This means
- * that sequential views may send and read `abort` signals.
- *
- * @define indexsignalling
- * This method will provide sequential views it produces with `indexFlag` signalling capabilities. This means
- * that sequential views may set and read `indexFlag` state.
- */
-trait ParallelIterableLike[+T, +Repr <: Parallel, +SequentialView <: Iterable[T]]
-extends IterableLike[T, Repr]
- with Parallelizable[Repr]
- with Sequentializable[T, SequentialView]
- with Parallel
- with HasNewCombiner[T, Repr]
- with TaskSupport {
- self =>
-
- /** Parallel iterators are split iterators that have additional accessor and
- * transformer methods defined in terms of methods `next` and `hasNext`.
- * When creating a new parallel collection, one might want to override these
- * new methods to make them more efficient.
- *
- * Parallel iterators are augmented with signalling capabilities. This means
- * that a signalling object can be assigned to them as needed.
- *
- * The self-type ensures that signal context passing behaviour gets mixed in
- * a concrete object instance.
- */
- trait ParallelIterator extends ParallelIterableIterator[T, Repr] {
- me: SignalContextPassingIterator[ParallelIterator] =>
- var signalDelegate: Signalling = IdleSignalling
- def repr = self.repr
- def split: Seq[ParallelIterator]
- }
-
- /** A stackable modification that ensures signal contexts get passed along the iterators.
- * A self-type requirement in `ParallelIterator` ensures that this trait gets mixed into
- * concrete iterators.
- */
- trait SignalContextPassingIterator[+IterRepr <: ParallelIterator] extends ParallelIterator {
- // Note: This functionality must be factored out to this inner trait to avoid boilerplate.
- // Also, one could omit the cast below. However, this leads to return type inconsistencies,
- // due to inability to override the return type of _abstract overrides_.
- // Be aware that this stackable modification has to be subclassed, so it shouldn't be rigid
- // on the type of iterators it splits.
- // The alternative is some boilerplate - better to tradeoff some type safety to avoid it here.
- abstract override def split: Seq[IterRepr] = {
- val pits = super.split
- pits foreach { _.signalDelegate = signalDelegate }
- pits.asInstanceOf[Seq[IterRepr]]
- }
- }
-
- /** Convenience for signal context passing iterator.
- */
- type SCPI <: SignalContextPassingIterator[ParallelIterator]
-
- /** Creates a new parallel iterator used to traverse the elements of this parallel collection.
- * This iterator is more specific than the iterator of the returned by `iterator`, and augmented
- * with additional accessor and transformer methods.
- *
- * @return a parallel iterator
- */
- protected def parallelIterator: ParallelIterator
-
- /** Creates a new split iterator used to traverse the elements of this collection.
- *
- * By default, this method is implemented in terms of the protected `parallelIterator` method.
- *
- * @return a split iterator
- */
- def iterator: Splitter[T] = parallelIterator
-
- def par = repr
-
- /** Some minimal number of elements after which this collection should be handled
- * sequentially by different processors.
- *
- * This method depends on the size of the collection and the parallelism level, which
- * are both specified as arguments.
- *
- * @param sz the size based on which to compute the threshold
- * @param p the parallelism level based on which to compute the threshold
- * @return the maximum number of elements for performing operations sequentially
- */
- def threshold(sz: Int, p: Int): Int = thresholdFromSize(sz, p)
-
- /** The `newBuilder` operation returns a parallel builder assigned to this collection's fork/join pool.
- * This method forwards the call to `newCombiner`.
- */
- protected[this] override def newBuilder: collection.mutable.Builder[T, Repr] = newCombiner
-
- /** Optionally reuses existing combiner for better performance. By default it doesn't - subclasses may override this behaviour.
- * The provided combiner `oldc` that can potentially be reused will be either some combiner from the previous computational task, or `None` if there
- * was no previous phase (in which case this method must return `newc`).
- *
- * @param oldc The combiner that is the result of the previous task, or `None` if there was no previous task.
- * @param newc The new, empty combiner that can be used.
- * @return Either `newc` or `oldc`.
- */
- protected def reuse[S, That](oldc: Option[Combiner[S, That]], newc: Combiner[S, That]): Combiner[S, That] = newc
-
- /* convenience task operations wrapper */
- protected implicit def task2ops[R, Tp](tsk: Task[R, Tp]) = new {
- def mapResult[R1](mapping: R => R1): ResultMapping[R, Tp, R1] = new ResultMapping[R, Tp, R1](tsk) {
- def map(r: R): R1 = mapping(r)
- }
-
- def compose[R3, R2, Tp2](t2: Task[R2, Tp2])(resCombiner: (R, R2) => R3) = new SeqComposite[R, R2, R3, Task[R, Tp], Task[R2, Tp2]] {
- val ft = tsk
- val st = t2
- def combineResults(fr: R, sr: R2): R3 = resCombiner(fr, sr)
- }
-
- def parallel[R3, R2, Tp2](t2: Task[R2, Tp2])(resCombiner: (R, R2) => R3) = new ParComposite[R, R2, R3, Task[R, Tp], Task[R2, Tp2]] {
- val ft = tsk
- val st = t2
- def combineResults(fr: R, sr: R2): R3 = resCombiner(fr, sr)
- }
- }
-
- protected def wrap[R](body: => R) = new NonDivisible[R] {
- def leaf(prevr: Option[R]) = result = body
- var result: R = null.asInstanceOf[R]
- }
-
- /* convenience iterator operations wrapper */
- protected implicit def iterator2ops[PI <: ParallelIterator](it: PI) = new {
- def assign(cntx: Signalling): PI = {
- it.signalDelegate = cntx
- it
- }
- }
-
- protected implicit def builder2ops[Elem, To](cb: Builder[Elem, To]) = new {
- def ifIs[Cmb](isbody: Cmb => Unit) = new {
- def otherwise(notbody: => Unit)(implicit m: ClassManifest[Cmb]) {
- if (cb.getClass == m.erasure) isbody(cb.asInstanceOf[Cmb]) else notbody
- }
- }
- }
-
- override def toString = seq.mkString(stringPrefix + "(", ", ", ")")
-
- /** Reduces the elements of this sequence using the specified associative binary operator.
- *
- * $undefinedorder
- *
- * Note this method has a different signature than the `reduceLeft`
- * and `reduceRight` methods of the trait `Traversable`.
- * The result of reducing may only be a supertype of this parallel collection's
- * type parameter `T`.
- *
- * @tparam U A type parameter for the binary operator, a supertype of `T`.
- * @param op A binary operator that must be associative.
- * @return The result of applying reduce operator `op` between all the elements if the collection is nonempty.
- * @throws UnsupportedOperationException
- * if this $coll is empty.
- */
- def reduce[U >: T](op: (U, U) => U): U = {
- executeAndWaitResult(new Reduce(op, parallelIterator))
- }
-
- /** Optionally reduces the elements of this sequence using the specified associative binary operator.
- *
- * $undefinedorder
- *
- * Note this method has a different signature than the `reduceLeftOption`
- * and `reduceRightOption` methods of the trait `Traversable`.
- * The result of reducing may only be a supertype of this parallel collection's
- * type parameter `T`.
- *
- * @tparam U A type parameter for the binary operator, a supertype of `T`.
- * @param op A binary operator that must be associative.
- * @return An option value containing result of applying reduce operator `op` between all
- * the elements if the collection is nonempty, and `None` otherwise.
- */
- def reduceOption[U >: T](op: (U, U) => U): Option[U] = if (isEmpty) None else Some(reduce(op))
-
- /** Folds the elements of this sequence using the specified associative binary operator.
- * The order in which the elements are reduced is unspecified and may be nondeterministic.
- *
- * Note this method has a different signature than the `foldLeft`
- * and `foldRight` methods of the trait `Traversable`.
- * The result of folding may only be a supertype of this parallel collection's
- * type parameter `T`.
- *
- * @tparam U a type parameter for the binary operator, a supertype of `T`.
- * @param z a neutral element for the fold operation, it may be added to the result
- * an arbitrary number of times, not changing the result (e.g. `Nil` for list concatenation,
- * 0 for addition, or 1 for multiplication)
- * @param op a binary operator that must be associative
- * @return the result of applying fold operator `op` between all the elements and `z`
- */
- def fold[U >: T](z: U)(op: (U, U) => U): U = {
- executeAndWaitResult(new Fold(z, op, parallelIterator))
- }
-
- /** Aggregates the results of applying an operator to subsequent elements.
- *
- * This is a more general form of `fold` and `reduce`. It has similar semantics, but does
- * not require the result to be a supertype of the element type. It traverses the elements in
- * different partitions sequentially, using `seqop` to update the result, and then
- * applies `combop` to results from different partitions. The implementation of this
- * operation may operate on an arbitrary number of collection partitions, so `combop`
- * may be invoked arbitrary number of times.
- *
- * For example, one might want to process some elements and then produce a `Set`. In this
- * case, `seqop` would process an element and append it to the list, while `combop`
- * would concatenate two lists from different partitions together. The initial value
- * `z` would be an empty set.
- *
- * {{{
- * pc.aggregate(Set[Int]())(_ += process(_), _ ++ _)
- * }}}
- *
- * Another example is calculating geometric mean from a collection of doubles
- * (one would typically require big doubles for this).
- *
- * @tparam S the type of accumulated results
- * @param z the initial value for the accumulated result of the partition - this
- * will typically be the neutral element for the `seqop` operator (e.g.
- * `Nil` for list concatenation or `0` for summation)
- * @param seqop an operator used to accumulate results within a partition
- * @param combop an associative operator used to combine results from different partitions
- */
- def aggregate[S](z: S)(seqop: (S, T) => S, combop: (S, S) => S): S = {
- executeAndWaitResult(new Aggregate(z, seqop, combop, parallelIterator))
- }
-
- /** Applies a function `f` to all the elements of the receiver.
- *
- * $undefinedorder
- *
- * @tparam U the result type of the function applied to each element, which is always discarded
- * @param f function that's applied to each element
- */
- override def foreach[U](f: T => U): Unit = {
- executeAndWait(new Foreach(f, parallelIterator))
- }
-
- override def count(p: T => Boolean): Int = {
- executeAndWaitResult(new Count(p, parallelIterator))
- }
-
- override def sum[U >: T](implicit num: Numeric[U]): U = {
- executeAndWaitResult(new Sum[U](num, parallelIterator))
- }
-
- override def product[U >: T](implicit num: Numeric[U]): U = {
- executeAndWaitResult(new Product[U](num, parallelIterator))
- }
-
- override def min[U >: T](implicit ord: Ordering[U]): T = {
- executeAndWaitResult(new Min(ord, parallelIterator)).asInstanceOf[T]
- }
-
- override def max[U >: T](implicit ord: Ordering[U]): T = {
- executeAndWaitResult(new Max(ord, parallelIterator)).asInstanceOf[T]
- }
-
- override def map[S, That](f: T => S)(implicit bf: CanBuildFrom[Repr, S, That]): That = bf ifParallel { pbf =>
- executeAndWaitResult(new Map[S, That](f, pbf, parallelIterator) mapResult { _.result })
- } otherwise super.map(f)(bf)
-
- override def collect[S, That](pf: PartialFunction[T, S])(implicit bf: CanBuildFrom[Repr, S, That]): That = bf ifParallel { pbf =>
- executeAndWaitResult(new Collect[S, That](pf, pbf, parallelIterator) mapResult { _.result })
- } otherwise super.collect(pf)(bf)
-
- override def flatMap[S, That](f: T => Traversable[S])(implicit bf: CanBuildFrom[Repr, S, That]): That = bf ifParallel { pbf =>
- executeAndWaitResult(new FlatMap[S, That](f, pbf, parallelIterator) mapResult { _.result })
- } otherwise super.flatMap(f)(bf)
-
- /** Tests whether a predicate holds for all elements of this $coll.
- *
- * $abortsignalling
- *
- * @param p a predicate used to test elements
- * @return true if `p` holds for all elements, false otherwise
- */
- override def forall(pred: T => Boolean): Boolean = {
- executeAndWaitResult(new Forall(pred, parallelIterator assign new DefaultSignalling with VolatileAbort))
- }
-
- /** Tests whether a predicate holds for some element of this $coll.
- *
- * $abortsignalling
- *
- * @param p a predicate used to test elements
- * @return true if `p` holds for some element, false otherwise
- */
- override def exists(pred: T => Boolean): Boolean = {
- executeAndWaitResult(new Exists(pred, parallelIterator assign new DefaultSignalling with VolatileAbort))
- }
-
- /** Finds some element in the collection for which the predicate holds, if such
- * an element exists. The element may not necessarily be the first such element
- * in the iteration order.
- *
- * If there are multiple elements obeying the predicate, the choice is nondeterministic.
- *
- * $abortsignalling
- *
- * @param p predicate used to test the elements
- * @return an option value with the element if such an element exists, or `None` otherwise
- */
- override def find(pred: T => Boolean): Option[T] = {
- executeAndWaitResult(new Find(pred, parallelIterator assign new DefaultSignalling with VolatileAbort))
- }
-
- protected[this] def cbfactory = () => newCombiner
-
- override def filter(pred: T => Boolean): Repr = {
- executeAndWaitResult(new Filter(pred, cbfactory, parallelIterator) mapResult { _.result })
- }
-
- override def filterNot(pred: T => Boolean): Repr = {
- executeAndWaitResult(new FilterNot(pred, cbfactory, parallelIterator) mapResult { _.result })
- }
-
- override def ++[U >: T, That](that: TraversableOnce[U])(implicit bf: CanBuildFrom[Repr, U, That]): That = {
- if (that.isParallel && bf.isParallel) {
- // println("case both are parallel")
- val other = that.asParallelIterable
- val pbf = bf.asParallel
- val copythis = new Copy(() => pbf(repr), parallelIterator)
- val copythat = wrap {
- val othtask = new other.Copy(() => pbf(self.repr), other.parallelIterator)
- othtask.compute
- othtask.result
- }
- val task = (copythis parallel copythat) { _ combine _ } mapResult { _.result }
- executeAndWaitResult(task)
- } else if (bf.isParallel) {
- // println("case parallel builder, `that` not parallel")
- val pbf = bf.asParallel
- val copythis = new Copy(() => pbf(repr), parallelIterator)
- val copythat = wrap {
- val cb = pbf(repr)
- for (elem <- that) cb += elem
- cb
- }
- executeAndWaitResult((copythis parallel copythat) { _ combine _ } mapResult { _.result })
- } else {
- // println("case not a parallel builder")
- val b = bf(repr)
- this.parallelIterator.copy2builder[U, That, Builder[U, That]](b)
- if (that.isInstanceOf[Parallel]) for (elem <- that.asInstanceOf[Iterable[U]].iterator) b += elem
- else for (elem <- that) b += elem
- b.result
- }
- }
-
- override def partition(pred: T => Boolean): (Repr, Repr) = {
- executeAndWaitResult(new Partition(pred, cbfactory, parallelIterator) mapResult { p => (p._1.result, p._2.result) })
- }
-
- override def take(n: Int): Repr = {
- val actualn = if (size > n) n else size
- if (actualn < MIN_FOR_COPY) take_sequential(actualn)
- else executeAndWaitResult(new Take(actualn, cbfactory, parallelIterator) mapResult { _.result })
- }
-
- private def take_sequential(n: Int) = {
- val cb = newCombiner
- cb.sizeHint(n)
- val it = parallelIterator
- var left = n
- while (left > 0) {
- cb += it.next
- left -= 1
- }
- cb.result
- }
-
- override def drop(n: Int): Repr = {
- val actualn = if (size > n) n else size
- if ((size - actualn) < MIN_FOR_COPY) drop_sequential(actualn)
- else executeAndWaitResult(new Drop(actualn, cbfactory, parallelIterator) mapResult { _.result })
- }
-
- private def drop_sequential(n: Int) = {
- val it = parallelIterator drop n
- val cb = newCombiner
- cb.sizeHint(size - n)
- while (it.hasNext) cb += it.next
- cb.result
- }
-
- override def slice(unc_from: Int, unc_until: Int): Repr = {
- val from = unc_from min size max 0
- val until = unc_until min size max from
- if ((until - from) <= MIN_FOR_COPY) slice_sequential(from, until)
- else executeAndWaitResult(new Slice(from, until, cbfactory, parallelIterator) mapResult { _.result })
- }
-
- private def slice_sequential(from: Int, until: Int): Repr = {
- val cb = newCombiner
- var left = until - from
- val it = parallelIterator drop from
- while (left > 0) {
- cb += it.next
- left -= 1
- }
- cb.result
- }
-
- override def splitAt(n: Int): (Repr, Repr) = {
- executeAndWaitResult(new SplitAt(n, cbfactory, parallelIterator) mapResult { p => (p._1.result, p._2.result) })
- }
-
- /** Takes the longest prefix of elements that satisfy the predicate.
- *
- * $indexsignalling
- * The index flag is initially set to maximum integer value.
- *
- * @param pred the predicate used to test the elements
- * @return the longest prefix of this $coll of elements that satisy the predicate `pred`
- */
- override def takeWhile(pred: T => Boolean): Repr = {
- val cntx = new DefaultSignalling with AtomicIndexFlag
- cntx.setIndexFlag(Int.MaxValue)
- executeAndWaitResult(new TakeWhile(0, pred, cbfactory, parallelIterator assign cntx) mapResult { _._1.result })
- }
-
- /** Splits this $coll into a prefix/suffix pair according to a predicate.
- *
- * $indexsignalling
- * The index flag is initially set to maximum integer value.
- *
- * @param pred the predicate used to test the elements
- * @return a pair consisting of the longest prefix of the collection for which all
- * the elements satisfy `pred`, and the rest of the collection
- */
- override def span(pred: T => Boolean): (Repr, Repr) = {
- val cntx = new DefaultSignalling with AtomicIndexFlag
- cntx.setIndexFlag(Int.MaxValue)
- executeAndWaitResult(new Span(0, pred, cbfactory, parallelIterator assign cntx) mapResult {
- p => (p._1.result, p._2.result)
- })
- }
-
- /** Drops all elements in the longest prefix of elements that satisfy the predicate,
- * and returns a collection composed of the remaining elements.
- *
- * $indexsignalling
- * The index flag is initially set to maximum integer value.
- *
- * @param pred the predicate used to test the elements
- * @return a collection composed of all the elements after the longest prefix of elements
- * in this $coll that satisfy the predicate `pred`
- */
- override def dropWhile(pred: T => Boolean): Repr = {
- val cntx = new DefaultSignalling with AtomicIndexFlag
- cntx.setIndexFlag(Int.MaxValue)
- executeAndWaitResult(new Span(0, pred, cbfactory, parallelIterator assign cntx) mapResult { _._2.result })
- }
-
- override def copyToArray[U >: T](xs: Array[U], start: Int, len: Int) = if (len > 0) {
- executeAndWait(new CopyToArray(start, len, xs, parallelIterator))
- }
-
- override def toIterable: Iterable[T] = seq.drop(0).asInstanceOf[Iterable[T]]
-
- override def toArray[U >: T: ClassManifest]: Array[U] = {
- val arr = new Array[U](size)
- copyToArray(arr)
- arr
- }
-
- override def toList: List[T] = seq.toList
-
- override def toIndexedSeq[S >: T]: collection.immutable.IndexedSeq[S] = seq.toIndexedSeq[S]
-
- override def toStream: Stream[T] = seq.toStream
-
- override def toSet[S >: T]: collection.immutable.Set[S] = seq.toSet
-
- override def toSeq: Seq[T] = seq.toSeq
-
- /* tasks */
-
- /** Standard accessor task that iterates over the elements of the collection.
- *
- * @tparam R type of the result of this method (`R` for result).
- * @tparam Tp the representation type of the task at hand.
- */
- protected trait Accessor[R, Tp]
- extends super.Task[R, Tp] {
- val pit: ParallelIterator
- def newSubtask(p: ParallelIterator): Accessor[R, Tp]
- def shouldSplitFurther = pit.remaining > threshold(size, parallelismLevel)
- def split = pit.split.map(newSubtask(_)) // default split procedure
- override def toString = "Accessor(" + pit.toString + ")"
- }
-
- protected[this] trait NonDivisibleTask[R, Tp] extends super.Task[R, Tp] {
- def shouldSplitFurther = false
- def split = throw new UnsupportedOperationException("Does not split.")
- override def toString = "NonDivisibleTask"
- }
-
- protected[this] trait NonDivisible[R] extends NonDivisibleTask[R, NonDivisible[R]]
-
- protected[this] trait Composite[FR, SR, R, First <: super.Task[FR, _], Second <: super.Task[SR, _]]
- extends NonDivisibleTask[R, Composite[FR, SR, R, First, Second]] {
- val ft: First
- val st: Second
- def combineResults(fr: FR, sr: SR): R
- var result: R = null.asInstanceOf[R]
- }
-
- /** Sequentially performs one task after another. */
- protected[this] trait SeqComposite[FR, SR, R, First <: super.Task[FR, _], Second <: super.Task[SR, _]]
- extends Composite[FR, SR, R, First, Second] {
- def leaf(prevr: Option[R]) = {
- ft.compute
- st.compute
- result = combineResults(ft.result, st.result)
- }
- }
-
- /** Performs two tasks in parallel, and waits for both to finish. */
- protected[this] trait ParComposite[FR, SR, R, First <: super.Task[FR, _], Second <: super.Task[SR, _]]
- extends Composite[FR, SR, R, First, Second] {
- def leaf(prevr: Option[R]) = {
- st.start
- ft.compute
- st.sync
- result = combineResults(ft.result, st.result)
- }
- }
-
- protected[this] abstract class ResultMapping[R, Tp, R1](val inner: Task[R, Tp])
- extends NonDivisibleTask[R1, ResultMapping[R, Tp, R1]] {
- var result: R1 = null.asInstanceOf[R1]
- def map(r: R): R1
- def leaf(prevr: Option[R1]) = {
- inner.compute
- result = map(inner.result)
- }
- }
-
- protected trait Transformer[R, Tp] extends Accessor[R, Tp]
-
- protected[this] class Foreach[S](op: T => S, val pit: ParallelIterator) extends Accessor[Unit, Foreach[S]] {
- var result: Unit = ()
- def leaf(prevr: Option[Unit]) = pit.foreach(op)
- def newSubtask(p: ParallelIterator) = new Foreach[S](op, p)
- }
-
- protected[this] class Count(pred: T => Boolean, val pit: ParallelIterator) extends Accessor[Int, Count] {
- var result: Int = 0
- def leaf(prevr: Option[Int]) = result = pit.count(pred)
- def newSubtask(p: ParallelIterator) = new Count(pred, p)
- override def merge(that: Count) = result = result + that.result
- }
-
- protected[this] class Reduce[U >: T](op: (U, U) => U, val pit: ParallelIterator) extends Accessor[U, Reduce[U]] {
- var result: U = null.asInstanceOf[U]
- def leaf(prevr: Option[U]) = result = pit.reduce(op)
- def newSubtask(p: ParallelIterator) = new Reduce(op, p)
- override def merge(that: Reduce[U]) = result = op(result, that.result)
- }
-
- protected[this] class Fold[U >: T](z: U, op: (U, U) => U, val pit: ParallelIterator) extends Accessor[U, Fold[U]] {
- var result: U = null.asInstanceOf[U]
- def leaf(prevr: Option[U]) = result = pit.fold(z)(op)
- def newSubtask(p: ParallelIterator) = new Fold(z, op, p)
- override def merge(that: Fold[U]) = result = op(result, that.result)
- }
-
- protected[this] class Aggregate[S](z: S, seqop: (S, T) => S, combop: (S, S) => S, val pit: ParallelIterator)
- extends Accessor[S, Aggregate[S]] {
- var result: S = null.asInstanceOf[S]
- def leaf(prevr: Option[S]) = result = pit.foldLeft(z)(seqop)
- def newSubtask(p: ParallelIterator) = new Aggregate(z, seqop, combop, p)
- override def merge(that: Aggregate[S]) = result = combop(result, that.result)
- }
-
- protected[this] class Sum[U >: T](num: Numeric[U], val pit: ParallelIterator) extends Accessor[U, Sum[U]] {
- var result: U = null.asInstanceOf[U]
- def leaf(prevr: Option[U]) = result = pit.sum(num)
- def newSubtask(p: ParallelIterator) = new Sum(num, p)
- override def merge(that: Sum[U]) = result = num.plus(result, that.result)
- }
-
- protected[this] class Product[U >: T](num: Numeric[U], val pit: ParallelIterator) extends Accessor[U, Product[U]] {
- var result: U = null.asInstanceOf[U]
- def leaf(prevr: Option[U]) = result = pit.product(num)
- def newSubtask(p: ParallelIterator) = new Product(num, p)
- override def merge(that: Product[U]) = result = num.times(result, that.result)
- }
-
- protected[this] class Min[U >: T](ord: Ordering[U], val pit: ParallelIterator) extends Accessor[U, Min[U]] {
- var result: U = null.asInstanceOf[U]
- def leaf(prevr: Option[U]) = result = pit.min(ord)
- def newSubtask(p: ParallelIterator) = new Min(ord, p)
- override def merge(that: Min[U]) = result = if (ord.lteq(result, that.result)) result else that.result
- }
-
- protected[this] class Max[U >: T](ord: Ordering[U], val pit: ParallelIterator) extends Accessor[U, Max[U]] {
- var result: U = null.asInstanceOf[U]
- def leaf(prevr: Option[U]) = result = pit.max(ord)
- def newSubtask(p: ParallelIterator) = new Max(ord, p)
- override def merge(that: Max[U]) = result = if (ord.gteq(result, that.result)) result else that.result
- }
-
- protected[this] class Map[S, That](f: T => S, pbf: CanCombineFrom[Repr, S, That], val pit: ParallelIterator)
- extends Transformer[Combiner[S, That], Map[S, That]] {
- var result: Combiner[S, That] = null
- def leaf(prev: Option[Combiner[S, That]]) = result = pit.map2combiner(f, reuse(prev, pbf(self.repr)))
- def newSubtask(p: ParallelIterator) = new Map(f, pbf, p)
- override def merge(that: Map[S, That]) = result = result combine that.result
- }
-
- protected[this] class Collect[S, That]
- (pf: PartialFunction[T, S], pbf: CanCombineFrom[Repr, S, That], val pit: ParallelIterator)
- extends Transformer[Combiner[S, That], Collect[S, That]] {
- var result: Combiner[S, That] = null
- def leaf(prev: Option[Combiner[S, That]]) = result = pit.collect2combiner[S, That](pf, pbf) // TODO
- def newSubtask(p: ParallelIterator) = new Collect(pf, pbf, p)
- override def merge(that: Collect[S, That]) = result = result combine that.result
- }
-
- protected[this] class FlatMap[S, That](f: T => Traversable[S], pbf: CanCombineFrom[Repr, S, That], val pit: ParallelIterator)
- extends Transformer[Combiner[S, That], FlatMap[S, That]] {
- var result: Combiner[S, That] = null
- def leaf(prev: Option[Combiner[S, That]]) = result = pit.flatmap2combiner(f, pbf) // TODO
- def newSubtask(p: ParallelIterator) = new FlatMap(f, pbf, p)
- override def merge(that: FlatMap[S, That]) = result = result combine that.result
- }
-
- protected[this] class Forall(pred: T => Boolean, val pit: ParallelIterator) extends Accessor[Boolean, Forall] {
- var result: Boolean = true
- def leaf(prev: Option[Boolean]) = { if (!pit.isAborted) result = pit.forall(pred); if (result == false) pit.abort }
- def newSubtask(p: ParallelIterator) = new Forall(pred, p)
- override def merge(that: Forall) = result = result && that.result
- }
-
- protected[this] class Exists(pred: T => Boolean, val pit: ParallelIterator) extends Accessor[Boolean, Exists] {
- var result: Boolean = false
- def leaf(prev: Option[Boolean]) = { if (!pit.isAborted) result = pit.exists(pred); if (result == true) pit.abort }
- def newSubtask(p: ParallelIterator) = new Exists(pred, p)
- override def merge(that: Exists) = result = result || that.result
- }
-
- protected[this] class Find[U >: T](pred: T => Boolean, val pit: ParallelIterator) extends Accessor[Option[U], Find[U]] {
- var result: Option[U] = None
- def leaf(prev: Option[Option[U]]) = { if (!pit.isAborted) result = pit.find(pred); if (result != None) pit.abort }
- def newSubtask(p: ParallelIterator) = new Find(pred, p)
- override def merge(that: Find[U]) = if (this.result == None) result = that.result
- }
-
- protected[this] class Filter[U >: T, This >: Repr](pred: T => Boolean, cbf: () => Combiner[U, This], val pit: ParallelIterator)
- extends Transformer[Combiner[U, This], Filter[U, This]] {
- var result: Combiner[U, This] = null
- def leaf(prev: Option[Combiner[U, This]]) = result = pit.filter2combiner(pred, reuse(prev, cbf()))
- def newSubtask(p: ParallelIterator) = new Filter(pred, cbf, p)
- override def merge(that: Filter[U, This]) = result = result combine that.result
- }
-
- protected[this] class FilterNot[U >: T, This >: Repr](pred: T => Boolean, cbf: () => Combiner[U, This], val pit: ParallelIterator)
- extends Transformer[Combiner[U, This], FilterNot[U, This]] {
- var result: Combiner[U, This] = null
- def leaf(prev: Option[Combiner[U, This]]) = result = pit.filterNot2combiner(pred, reuse(prev, cbf()))
- def newSubtask(p: ParallelIterator) = new FilterNot(pred, cbf, p)
- override def merge(that: FilterNot[U, This]) = result = result combine that.result
- }
-
- protected class Copy[U >: T, That](cfactory: () => Combiner[U, That], val pit: ParallelIterator)
- extends Transformer[Combiner[U, That], Copy[U, That]] {
- var result: Combiner[U, That] = null
- def leaf(prev: Option[Combiner[U, That]]) = result = pit.copy2builder[U, That, Combiner[U, That]](reuse(prev, cfactory()))
- def newSubtask(p: ParallelIterator) = new Copy[U, That](cfactory, p)
- override def merge(that: Copy[U, That]) = result = result combine that.result
- }
-
- protected[this] class Partition[U >: T, This >: Repr](pred: T => Boolean, cbf: () => Combiner[U, This], val pit: ParallelIterator)
- extends Transformer[(Combiner[U, This], Combiner[U, This]), Partition[U, This]] {
- var result: (Combiner[U, This], Combiner[U, This]) = null
- def leaf(prev: Option[(Combiner[U, This], Combiner[U, This])]) = result = pit.partition2combiners(pred, reuse(prev.map(_._1), cbf()), reuse(prev.map(_._2), cbf()))
- def newSubtask(p: ParallelIterator) = new Partition(pred, cbf, p)
- override def merge(that: Partition[U, This]) = result = (result._1 combine that.result._1, result._2 combine that.result._2)
- }
-
- protected[this] class Take[U >: T, This >: Repr](n: Int, cbf: () => Combiner[U, This], val pit: ParallelIterator)
- extends Transformer[Combiner[U, This], Take[U, This]] {
- var result: Combiner[U, This] = null
- def leaf(prev: Option[Combiner[U, This]]) = result = pit.take2combiner(n, reuse(prev, cbf()))
- def newSubtask(p: ParallelIterator) = throw new UnsupportedOperationException
- override def split = {
- val pits = pit.split
- val sizes = pits.scanLeft(0)(_ + _.remaining)
- for ((p, untilp) <- pits zip sizes; if untilp <= n) yield {
- if (untilp + p.remaining < n) new Take(p.remaining, cbf, p)
- else new Take(n - untilp, cbf, p)
- }
- }
- override def merge(that: Take[U, This]) = result = result combine that.result
- }
-
- protected[this] class Drop[U >: T, This >: Repr](n: Int, cbf: () => Combiner[U, This], val pit: ParallelIterator)
- extends Transformer[Combiner[U, This], Drop[U, This]] {
- var result: Combiner[U, This] = null
- def leaf(prev: Option[Combiner[U, This]]) = result = pit.drop2combiner(n, reuse(prev, cbf()))
- def newSubtask(p: ParallelIterator) = throw new UnsupportedOperationException
- override def split = {
- val pits = pit.split
- val sizes = pits.scanLeft(0)(_ + _.remaining)
- for ((p, withp) <- pits zip sizes.tail; if withp >= n) yield {
- if (withp - p.remaining > n) new Drop(0, cbf, p)
- else new Drop(n - withp + p.remaining, cbf, p)
- }
- }
- override def merge(that: Drop[U, This]) = result = result combine that.result
- }
-
- protected[this] class Slice[U >: T, This >: Repr](from: Int, until: Int, cbf: () => Combiner[U, This], val pit: ParallelIterator)
- extends Transformer[Combiner[U, This], Slice[U, This]] {
- var result: Combiner[U, This] = null
- def leaf(prev: Option[Combiner[U, This]]) = result = pit.slice2combiner(from, until, reuse(prev, cbf()))
- def newSubtask(p: ParallelIterator) = throw new UnsupportedOperationException
- override def split = {
- val pits = pit.split
- val sizes = pits.scanLeft(0)(_ + _.remaining)
- for ((p, untilp) <- pits zip sizes; if untilp + p.remaining >= from || untilp <= until) yield {
- val f = (from max untilp) - untilp
- val u = (until min (untilp + p.remaining)) - untilp
- new Slice(f, u, cbf, p)
- }
- }
- override def merge(that: Slice[U, This]) = result = result combine that.result
- }
-
- protected[this] class SplitAt[U >: T, This >: Repr](at: Int, cbf: () => Combiner[U, This], val pit: ParallelIterator)
- extends Transformer[(Combiner[U, This], Combiner[U, This]), SplitAt[U, This]] {
- var result: (Combiner[U, This], Combiner[U, This]) = null
- def leaf(prev: Option[(Combiner[U, This], Combiner[U, This])]) = result = pit.splitAt2combiners(at, reuse(prev.map(_._1), cbf()), reuse(prev.map(_._2), cbf()))
- def newSubtask(p: ParallelIterator) = throw new UnsupportedOperationException
- override def split = {
- val pits = pit.split
- val sizes = pits.scanLeft(0)(_ + _.remaining)
- for ((p, untilp) <- pits zip sizes) yield new SplitAt((at max untilp min (untilp + p.remaining)) - untilp, cbf, p)
- }
- override def merge(that: SplitAt[U, This]) = result = (result._1 combine that.result._1, result._2 combine that.result._2)
- }
-
- protected[this] class TakeWhile[U >: T, This >: Repr]
- (pos: Int, pred: T => Boolean, cbf: () => Combiner[U, This], val pit: ParallelIterator)
- extends Transformer[(Combiner[U, This], Boolean), TakeWhile[U, This]] {
- var result: (Combiner[U, This], Boolean) = null
- def leaf(prev: Option[(Combiner[U, This], Boolean)]) = if (pos < pit.indexFlag) {
- result = pit.takeWhile2combiner(pred, reuse(prev.map(_._1), cbf()))
- if (!result._2) pit.setIndexFlagIfLesser(pos)
- } else result = (reuse(prev.map(_._1), cbf()), false)
- def newSubtask(p: ParallelIterator) = throw new UnsupportedOperationException
- override def split = {
- val pits = pit.split
- for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new TakeWhile(pos + untilp, pred, cbf, p)
- }
- override def merge(that: TakeWhile[U, This]) = if (result._2) {
- result = (result._1 combine that.result._1, that.result._2)
- }
- }
-
- protected[this] class Span[U >: T, This >: Repr]
- (pos: Int, pred: T => Boolean, cbf: () => Combiner[U, This], val pit: ParallelIterator)
- extends Transformer[(Combiner[U, This], Combiner[U, This]), Span[U, This]] {
- var result: (Combiner[U, This], Combiner[U, This]) = null
- def leaf(prev: Option[(Combiner[U, This], Combiner[U, This])]) = if (pos < pit.indexFlag) {
- result = pit.span2combiners(pred, reuse(prev.map(_._1), cbf()), reuse(prev.map(_._2), cbf()))
- if (result._2.size > 0) pit.setIndexFlagIfLesser(pos)
- } else {
- result = (reuse(prev.map(_._2), cbf()), pit.copy2builder[U, This, Combiner[U, This]](reuse(prev.map(_._2), cbf())))
- }
- def newSubtask(p: ParallelIterator) = throw new UnsupportedOperationException
- override def split = {
- val pits = pit.split
- for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new Span(pos + untilp, pred, cbf, p)
- }
- override def merge(that: Span[U, This]) = result = if (result._2.size == 0) {
- (result._1 combine that.result._1, that.result._2)
- } else {
- (result._1, result._2 combine that.result._1 combine that.result._2)
- }
- }
-
- protected[this] class CopyToArray[U >: T, This >: Repr](from: Int, len: Int, array: Array[U], val pit: ParallelIterator)
- extends Accessor[Unit, CopyToArray[U, This]] {
- var result: Unit = ()
- def leaf(prev: Option[Unit]) = pit.copyToArray(array, from, len)
- def newSubtask(p: ParallelIterator) = throw new UnsupportedOperationException
- override def split = {
- val pits = pit.split
- for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining); if untilp < len) yield {
- val plen = p.remaining min (len - untilp)
- new CopyToArray[U, This](from + untilp, plen, array, p)
- }
- }
- }
-
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/ParallelIterableView.scala b/src/library/scala/collection/parallel/ParallelIterableView.scala
deleted file mode 100644
index f40f02eb3b..0000000000
--- a/src/library/scala/collection/parallel/ParallelIterableView.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-package scala.collection.parallel
-
-
-
-
-import scala.collection.Parallel
-import scala.collection.TraversableViewLike
-import scala.collection.IterableView
-
-
-
-
-/** A template view of a non-strict view of a parallel iterable collection.
- *
- * @tparam T ...
- * @tparam Coll ...
- *
- * @since 2.8
- */
-trait ParallelIterableView[+T, +Coll <: Parallel, +CollSeq]
-extends ParallelIterableViewLike[T, Coll, CollSeq, ParallelIterableView[T, Coll, CollSeq], IterableView[T, CollSeq]]
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/ParallelIterableViewLike.scala b/src/library/scala/collection/parallel/ParallelIterableViewLike.scala
deleted file mode 100644
index 024eb48d25..0000000000
--- a/src/library/scala/collection/parallel/ParallelIterableViewLike.scala
+++ /dev/null
@@ -1,59 +0,0 @@
-package scala.collection.parallel
-
-
-
-
-import scala.collection.Parallel
-import scala.collection.TraversableViewLike
-import scala.collection.IterableView
-import scala.collection.IterableViewLike
-
-
-
-
-
-/** A template view of a non-strict view of parallel iterable collection.
- *
- * '''Note:''' Regular view traits have type parameters used to carry information
- * about the type of the elements, type of the collection they are derived from and
- * their own actual representation type. Parallel views have an additional parameter
- * which carries information about the type of the sequential version of the view.
- *
- * @tparam T the type of the elements this view can traverse
- * @tparam Coll the type of the collection this view is derived from
- * @tparam CollSeq TODO
- * @tparam This the actual representation type of this view
- * @tparam ThisSeq the type of the sequential representation of this view
- *
- * @since 2.8
- */
-trait ParallelIterableViewLike[+T,
- +Coll <: Parallel,
- +CollSeq,
- +This <: ParallelIterableView[T, Coll, CollSeq] with ParallelIterableViewLike[T, Coll, CollSeq, This, ThisSeq],
- +ThisSeq <: IterableView[T, CollSeq] with IterableViewLike[T, CollSeq, ThisSeq]]
-extends IterableView[T, Coll]
- with IterableViewLike[T, Coll, This]
- with ParallelIterable[T]
- with ParallelIterableLike[T, This, ThisSeq]
-{
- self =>
-
- override protected[this] def newCombiner: Combiner[T, This] = throw new UnsupportedOperationException(this + ".newCombiner");
-
- //type SCPI = SignalContextPassingIterator[ParallelIterator] // complains when overriden further in inh. hier., TODO check it out
- type CPI = SignalContextPassingIterator[ParallelIterator]
-
- trait Transformed[+S] extends ParallelIterableView[S, Coll, CollSeq] with super.Transformed[S]
-
-}
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/ParallelMap.scala b/src/library/scala/collection/parallel/ParallelMap.scala
deleted file mode 100644
index 5ce61469bc..0000000000
--- a/src/library/scala/collection/parallel/ParallelMap.scala
+++ /dev/null
@@ -1,71 +0,0 @@
-package scala.collection.parallel
-
-
-
-
-
-import scala.collection.Map
-import scala.collection.mutable.Builder
-import scala.collection.generic.ParallelMapFactory
-import scala.collection.generic.GenericParallelMapTemplate
-import scala.collection.generic.GenericParallelMapCompanion
-import scala.collection.generic.CanCombineFrom
-
-
-
-
-
-
-trait ParallelMap[K, +V]
-extends Map[K, V]
- with GenericParallelMapTemplate[K, V, ParallelMap]
- with ParallelIterable[(K, V)]
- with ParallelMapLike[K, V, ParallelMap[K, V], Map[K, V]]
-{
-self =>
-
- def mapCompanion: GenericParallelMapCompanion[ParallelMap] = ParallelMap
-
- override def empty: ParallelMap[K, V] = new immutable.ParallelHashTrie[K, V]
-
- override def stringPrefix = "ParallelMap"
-}
-
-
-
-object ParallelMap extends ParallelMapFactory[ParallelMap] {
- def empty[K, V]: ParallelMap[K, V] = new immutable.ParallelHashTrie[K, V]
-
- def newCombiner[K, V]: Combiner[(K, V), ParallelMap[K, V]] = immutable.HashTrieCombiner[K, V]
-
- implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParallelMap[K, V]] = new CanCombineFromMap[K, V]
-
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/ParallelMapLike.scala b/src/library/scala/collection/parallel/ParallelMapLike.scala
deleted file mode 100644
index 8a0b54525f..0000000000
--- a/src/library/scala/collection/parallel/ParallelMapLike.scala
+++ /dev/null
@@ -1,43 +0,0 @@
-package scala.collection.parallel
-
-
-
-
-import scala.collection.MapLike
-import scala.collection.Map
-import scala.collection.mutable.Builder
-
-
-
-
-
-
-
-
-trait ParallelMapLike[K,
- +V,
- +Repr <: ParallelMapLike[K, V, Repr, SequentialView] with ParallelMap[K, V],
- +SequentialView <: Map[K, V]]
-extends MapLike[K, V, Repr]
- with ParallelIterableLike[(K, V), Repr, SequentialView]
-{ self =>
-
- protected[this] override def newBuilder: Builder[(K, V), Repr] = newCombiner
-
- protected[this] override def newCombiner: Combiner[(K, V), Repr] = error("Must be implemented in concrete classes.")
-
- override def empty: Repr
-
-}
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/ParallelSeq.scala b/src/library/scala/collection/parallel/ParallelSeq.scala
deleted file mode 100644
index 71b802cd11..0000000000
--- a/src/library/scala/collection/parallel/ParallelSeq.scala
+++ /dev/null
@@ -1,64 +0,0 @@
-package scala.collection.parallel
-
-
-
-import scala.collection.generic.GenericCompanion
-import scala.collection.generic.GenericParallelCompanion
-import scala.collection.generic.GenericParallelTemplate
-import scala.collection.generic.ParallelFactory
-import scala.collection.generic.CanCombineFrom
-import scala.collection.parallel.mutable.ParallelArrayCombiner
-import scala.collection.parallel.mutable.ParallelArray
-
-
-/** A template trait for parallel sequences.
- *
- * $parallelseqinfo
- *
- * $sideeffects
- */
-trait ParallelSeq[+T] extends Seq[T]
- with ParallelIterable[T]
- with GenericParallelTemplate[T, ParallelSeq]
- with ParallelSeqLike[T, ParallelSeq[T], Seq[T]] {
- override def companion: GenericCompanion[ParallelSeq] with GenericParallelCompanion[ParallelSeq] = ParallelSeq
-
- def apply(i: Int): T
-
- override def toString = super[ParallelIterable].toString
-}
-
-
-object ParallelSeq extends ParallelFactory[ParallelSeq] {
- implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParallelSeq[T]] = new GenericCanCombineFrom[T]
-
- def newBuilder[T]: Combiner[T, ParallelSeq[T]] = ParallelArrayCombiner[T]
-
- def newCombiner[T]: Combiner[T, ParallelSeq[T]] = ParallelArrayCombiner[T]
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/ParallelSeqLike.scala b/src/library/scala/collection/parallel/ParallelSeqLike.scala
deleted file mode 100644
index 18b0c83f23..0000000000
--- a/src/library/scala/collection/parallel/ParallelSeqLike.scala
+++ /dev/null
@@ -1,473 +0,0 @@
-package scala.collection.parallel
-
-
-import scala.collection.Parallel
-import scala.collection.SeqLike
-import scala.collection.generic.DefaultSignalling
-import scala.collection.generic.AtomicIndexFlag
-import scala.collection.generic.CanBuildFrom
-import scala.collection.generic.CanCombineFrom
-import scala.collection.generic.VolatileAbort
-
-
-
-
-// TODO update docs!!
-/** A template trait for sequences of type `ParallelSeq[T]`, representing
- * parallel sequences with element type `T`.
- *
- * $parallelseqinfo
- *
- * @tparam T the type of the elements contained in this collection
- * @tparam Repr the type of the actual collection containing the elements
- *
- * @define parallelseqinfo
- * Parallel sequences inherit the `IndexedSeq` trait. This means they provide
- * efficient indexing and length computations. Like their sequential counterparts
- * they always have a defined order of elements. This means they will produce resulting
- * parallel sequences in the same way sequential sequences do. However, the order
- * in which they iterate over elements to produce results is not defined and is generally
- * nondeterministic. If the higher-order functions given to them produce no sideeffects,
- * then this won't be noticeable.
- *
- * This trait defines a new, more general `split` operation and reimplements the `split`
- * operation of `ParallelIterable` trait using the new `split` operation.
- *
- * @author prokopec
- * @since 2.8
- */
-trait ParallelSeqLike[+T, +Repr <: Parallel, +Sequential <: Seq[T] with SeqLike[T, Sequential]]
-extends scala.collection.SeqLike[T, Repr]
- with ParallelIterableLike[T, Repr, Sequential] {
- self =>
-
- type SuperParallelIterator = super.ParallelIterator
-
- /** An iterator that can be split into arbitrary subsets of iterators.
- * The self-type requirement ensures that the signal context passing behaviour gets mixed in
- * the concrete iterator instance in some concrete collection.
- *
- * '''Note:''' In concrete collection classes, collection implementers might want to override the iterator
- * `reverse2builder` method to ensure higher efficiency.
- */
- trait ParallelIterator extends ParallelSeqIterator[T, Repr] with super.ParallelIterator {
- me: SignalContextPassingIterator[ParallelIterator] =>
- def split: Seq[ParallelIterator]
- def psplit(sizes: Int*): Seq[ParallelIterator]
- }
-
- /** A stackable modification that ensures signal contexts get passed along the iterators.
- * A self-type requirement in `ParallelIterator` ensures that this trait gets mixed into
- * concrete iterators.
- */
- trait SignalContextPassingIterator[+IterRepr <: ParallelIterator]
- extends ParallelIterator with super.SignalContextPassingIterator[IterRepr] {
- // Note: See explanation in `ParallelIterableLike.this.SignalContextPassingIterator`
- // to understand why we do the cast here, and have a type parameter.
- // Bottomline: avoiding boilerplate and fighting against inability to override stackable modifications.
- abstract override def psplit(sizes: Int*): Seq[IterRepr] = {
- val pits = super.psplit(sizes: _*)
- pits foreach { _.signalDelegate = signalDelegate }
- pits.asInstanceOf[Seq[IterRepr]]
- }
- }
-
- /** A convenient shorthand for the signal context passing stackable modification.
- */
- type SCPI <: SignalContextPassingIterator[ParallelIterator]
-
- /** A more refined version of the iterator found in the `ParallelIterable` trait,
- * this iterator can be split into arbitrary subsets of iterators.
- *
- * @return an iterator that can be split into subsets of precise size
- */
- protected def parallelIterator: ParallelIterator
-
- override def iterator: PreciseSplitter[T] = parallelIterator
-
- override def size = length
-
- /** Used to iterate elements using indices */
- protected abstract class Elements(start: Int, val end: Int) extends ParallelIterator with BufferedIterator[T] {
- me: SignalContextPassingIterator[ParallelIterator] =>
-
- private var i = start
-
- def hasNext = i < end
-
- def next: T = if (i < end) {
- val x = self(i)
- i += 1
- x
- } else Iterator.empty.next
-
- def head = self(i)
-
- final def remaining = end - i
-
- def split = psplit(remaining / 2, remaining - remaining / 2)
-
- def psplit(sizes: Int*) = {
- val incr = sizes.scanLeft(0)(_ + _)
- for ((from, until) <- incr.init zip incr.tail) yield {
- new Elements(start + from, (start + until) min end) with SignalContextPassingIterator[ParallelIterator]
- }
- }
-
- override def toString = "Elements(" + start + ", " + end + ")"
- }
-
- /* ParallelSeq methods */
-
- /** Returns the length of the longest segment of elements starting at
- * a given position satisfying some predicate.
- *
- * $indexsignalling
- *
- * The index flag is initially set to maximum integer value.
- *
- * @param p the predicate used to test the elements
- * @param from the starting offset for the search
- * @return the length of the longest segment of elements starting at `from` and
- * satisfying the predicate
- */
- override def segmentLength(p: T => Boolean, from: Int): Int = if (from >= length) 0 else {
- val realfrom = if (from < 0) 0 else from
- val ctx = new DefaultSignalling with AtomicIndexFlag
- ctx.setIndexFlag(Int.MaxValue)
- executeAndWaitResult(new SegmentLength(p, 0, parallelIterator.psplit(realfrom, length - realfrom)(1) assign ctx))._1
- }
-
- override def prefixLength(p: T => Boolean) = segmentLength(p, 0)
-
- /** Finds the first element satisfying some predicate.
- *
- * $indexsignalling
- *
- * The index flag is initially set to maximum integer value.
- *
- * @param p the predicate used to test the elements
- * @param from the starting offset for the search
- * @return the index `>= from` of the first element of this $coll that satisfies the predicate `p`,
- * or `-1`, if none exists
- */
- override def indexWhere(p: T => Boolean, from: Int): Int = if (from >= length) -1 else {
- val realfrom = if (from < 0) 0 else from
- val ctx = new DefaultSignalling with AtomicIndexFlag
- ctx.setIndexFlag(Int.MaxValue)
- executeAndWaitResult(new IndexWhere(p, realfrom, parallelIterator.psplit(realfrom, length - realfrom)(1) assign ctx))
- }
-
- override def indexWhere(p: T => Boolean): Int = indexWhere(p, 0)
-
- override def findIndexOf(p: T => Boolean): Int = indexWhere(p, 0)
-
- override def indexOf[U >: T](elem: U): Int = indexOf(elem, 0)
-
- override def indexOf[U >: T](elem: U, from: Int): Int = indexWhere(elem ==, from)
-
- /** Finds the last element satisfying some predicate.
- *
- * $indexsignalling
- *
- * The index flag is initially set to minimum integer value.
- *
- * @param p the predicate used to test the elements
- * @param end the maximum offset for the search
- * @return the index `<= end` of the first element of this $coll that satisfies the predicate `p`,
- * or `-1`, if none exists
- */
- override def lastIndexWhere(p: T => Boolean, end: Int): Int = if (end < 0) -1 else {
- val until = if (end >= length) length else end + 1
- val ctx = new DefaultSignalling with AtomicIndexFlag
- ctx.setIndexFlag(Int.MinValue)
- executeAndWaitResult(new LastIndexWhere(p, 0, parallelIterator.psplit(until, length - until)(0) assign ctx))
- }
-
- override def reverse: Repr = {
- executeAndWaitResult(new Reverse(() => newCombiner, parallelIterator) mapResult { _.result })
- }
-
- override def reverseMap[S, That](f: T => S)(implicit bf: CanBuildFrom[Repr, S, That]): That = bf ifParallel { pbf =>
- executeAndWaitResult(new ReverseMap[S, That](f, pbf, parallelIterator) mapResult { _.result })
- } otherwise super.reverseMap(f)(bf)
-
- override def startsWith[S](that: Seq[S]): Boolean = startsWith(that, 0)
-
- /** Tests whether this $coll contains the given sequence at a given index.
- *
- * $abortsignalling
- *
- * @tparam U the element type of `that` parallel sequence
- * @param that the parallel sequence this sequence is being searched for
- * @param offset the starting offset for the search
- * @return `true` if there is a sequence `that` starting at `offset` in this sequence, `false` otherwise
- */
- override def startsWith[S](that: Seq[S], offset: Int): Boolean = that ifParallelSeq { pthat =>
- if (offset < 0 || offset >= length) offset == length && pthat.length == 0
- else if (pthat.length == 0) true
- else if (pthat.length > length - offset) false
- else {
- val ctx = new DefaultSignalling with VolatileAbort
- executeAndWaitResult(new SameElements(parallelIterator.psplit(offset, pthat.length)(1) assign ctx, pthat.parallelIterator))
- }
- } otherwise super.startsWith(that, offset)
-
- override def sameElements[U >: T](that: Iterable[U]): Boolean = that ifParallelSeq { pthat =>
- val ctx = new DefaultSignalling with VolatileAbort
- length == pthat.length && executeAndWaitResult(new SameElements(parallelIterator assign ctx, pthat.parallelIterator))
- } otherwise super.sameElements(that)
-
- /** Tests whether this $coll ends with the given parallel sequence
- *
- * $abortsignalling
- *
- * @tparam S the type of the elements of `that` sequence
- * @param that the sequence to test
- * @return `true` if this $coll has `that` as a suffix, `false` otherwise
- */
- override def endsWith[S](that: Seq[S]): Boolean = that ifParallelSeq { pthat =>
- if (that.length == 0) true
- else if (that.length > length) false
- else {
- val ctx = new DefaultSignalling with VolatileAbort
- val tlen = that.length
- executeAndWaitResult(new SameElements(parallelIterator.psplit(length - tlen, tlen)(1) assign ctx, pthat.parallelIterator))
- }
- } otherwise super.endsWith(that)
-
- override def patch[U >: T, That](from: Int, patch: Seq[U], replaced: Int)
- (implicit bf: CanBuildFrom[Repr, U, That]): That = if (patch.isParallelSeq && bf.isParallel) {
- val that = patch.asParallelSeq
- val pbf = bf.asParallel
- val realreplaced = replaced min (length - from)
- val pits = parallelIterator.psplit(from, replaced, length - from - realreplaced)
- val copystart = new Copy[U, That](() => pbf(repr), pits(0))
- val copymiddle = wrap {
- val tsk = new that.Copy[U, That](() => pbf(repr), that.parallelIterator)
- tsk.compute
- tsk.result
- }
- val copyend = new Copy[U, That](() => pbf(repr), pits(2))
- executeAndWaitResult(((copystart parallel copymiddle) { _ combine _ } parallel copyend) { _ combine _ } mapResult { _.result })
- } else patch_sequential(from, patch, replaced)
-
- private def patch_sequential[U >: T, That](from: Int, patch: Seq[U], r: Int)(implicit bf: CanBuildFrom[Repr, U, That]): That = {
- val b = bf(repr)
- val repl = r min (length - from)
- val pits = parallelIterator.psplit(from, repl, length - from - repl)
- b ++= pits(0)
- b ++= patch.iterator
- b ++= pits(2)
- b.result
- }
-
- override def updated[U >: T, That](index: Int, elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = bf ifParallel { pbf =>
- executeAndWaitResult(new Updated(index, elem, pbf, parallelIterator) mapResult { _.result })
- } otherwise super.updated(index, elem)
-
- override def +:[U >: T, That](elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = {
- patch(0, mutable.ParallelArray(elem), 0)
- }
-
- override def :+[U >: T, That](elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = {
- patch(length, mutable.ParallelArray(elem), 0)
- }
-
- override def padTo[U >: T, That](len: Int, elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = if (length < len) {
- patch(length, new immutable.Repetition(elem, len - length), 0)
- } else patch(length, Nil, 0)
-
- /** Tests whether every element of this $coll relates to the
- * corresponding element of another parallel sequence by satisfying a test predicate.
- *
- * $abortsignalling
- *
- * @param that the other parallel sequence
- * @param p the test predicate, which relates elements from both sequences
- * @tparam S the type of the elements of `that`
- * @return `true` if both parallel sequences have the same length and
- * `p(x, y)` is `true` for all corresponding elements `x` of this $coll
- * and `y` of `that`, otherwise `false`
- */
- override def corresponds[S](that: Seq[S])(p: (T, S) => Boolean): Boolean = that ifParallelSeq { pthat =>
- val ctx = new DefaultSignalling with VolatileAbort
- length == pthat.length && executeAndWaitResult(new Corresponds(p, parallelIterator assign ctx, pthat.parallelIterator))
- } otherwise super.corresponds(that)(p)
-
- override def toString = seq.mkString(stringPrefix + "(", ", ", ")")
-
- override def view = new ParallelSeqView[T, Repr, Sequential] {
- protected lazy val underlying = self.repr
- def length = self.length
- def apply(idx: Int) = self(idx)
- def seq = self.seq.view
- def parallelIterator = new Elements(0, length) with SCPI {}
- }
-
- override def view(from: Int, until: Int) = view.slice(from, until)
-
- /* tasks */
-
- protected def down(p: SuperParallelIterator) = p.asInstanceOf[ParallelIterator]
-
- protected trait Accessor[R, Tp] extends super.Accessor[R, Tp] {
- val pit: ParallelIterator
- }
-
- protected trait Transformer[R, Tp] extends Accessor[R, Tp] with super.Transformer[R, Tp]
-
- protected[this] class SegmentLength(pred: T => Boolean, from: Int, val pit: ParallelIterator)
- extends Accessor[(Int, Boolean), SegmentLength] {
- var result: (Int, Boolean) = null
- def leaf(prev: Option[(Int, Boolean)]) = if (from < pit.indexFlag) {
- val itsize = pit.remaining
- val seglen = pit.prefixLength(pred)
- result = (seglen, itsize == seglen)
- if (!result._2) pit.setIndexFlagIfLesser(from)
- } else result = (0, false)
- def newSubtask(p: SuperParallelIterator) = throw new UnsupportedOperationException
- override def split = {
- val pits = pit.split
- for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new SegmentLength(pred, from + untilp, p)
- }
- override def merge(that: SegmentLength) = if (result._2) result = (result._1 + that.result._1, that.result._2)
- }
-
- protected[this] class IndexWhere(pred: T => Boolean, from: Int, val pit: ParallelIterator)
- extends Accessor[Int, IndexWhere] {
- var result: Int = -1
- def leaf(prev: Option[Int]) = if (from < pit.indexFlag) {
- val r = pit.indexWhere(pred)
- if (r != -1) {
- result = from + r
- pit.setIndexFlagIfLesser(from)
- }
- }
- def newSubtask(p: SuperParallelIterator) = throw new UnsupportedOperationException
- override def split = {
- val pits = pit.split
- for ((p, untilp) <- pits zip pits.scanLeft(from)(_ + _.remaining)) yield new IndexWhere(pred, untilp, p)
- }
- override def merge(that: IndexWhere) = result = if (result == -1) that.result else {
- if (that.result != -1) result min that.result else result
- }
- }
-
- protected[this] class LastIndexWhere(pred: T => Boolean, pos: Int, val pit: ParallelIterator)
- extends Accessor[Int, LastIndexWhere] {
- var result: Int = -1
- def leaf(prev: Option[Int]) = if (pos > pit.indexFlag) {
- val r = pit.lastIndexWhere(pred)
- if (r != -1) {
- result = pos + r
- pit.setIndexFlagIfGreater(pos)
- }
- }
- def newSubtask(p: SuperParallelIterator) = throw new UnsupportedOperationException
- override def split = {
- val pits = pit.split
- for ((p, untilp) <- pits zip pits.scanLeft(pos)(_ + _.remaining)) yield new LastIndexWhere(pred, untilp, p)
- }
- override def merge(that: LastIndexWhere) = result = if (result == -1) that.result else {
- if (that.result != -1) result max that.result else result
- }
- }
-
- protected[this] class Reverse[U >: T, This >: Repr](cbf: () => Combiner[U, This], val pit: ParallelIterator)
- extends Transformer[Combiner[U, This], Reverse[U, This]] {
- var result: Combiner[U, This] = null
- def leaf(prev: Option[Combiner[U, This]]) = result = pit.reverse2combiner(reuse(prev, cbf()))
- def newSubtask(p: SuperParallelIterator) = new Reverse(cbf, down(p))
- override def merge(that: Reverse[U, This]) = result = that.result combine result
- }
-
- protected[this] class ReverseMap[S, That](f: T => S, pbf: CanCombineFrom[Repr, S, That], val pit: ParallelIterator)
- extends Transformer[Combiner[S, That], ReverseMap[S, That]] {
- var result: Combiner[S, That] = null
- def leaf(prev: Option[Combiner[S, That]]) = result = pit.reverseMap2combiner(f, pbf) // TODO
- def newSubtask(p: SuperParallelIterator) = new ReverseMap(f, pbf, down(p))
- override def merge(that: ReverseMap[S, That]) = result = that.result combine result
- }
-
- protected[this] class SameElements[U >: T](val pit: ParallelIterator, val otherpit: PreciseSplitter[U])
- extends Accessor[Boolean, SameElements[U]] {
- var result: Boolean = true
- def leaf(prev: Option[Boolean]) = if (!pit.isAborted) {
- result = pit.sameElements(otherpit)
- if (!result) pit.abort
- }
- def newSubtask(p: SuperParallelIterator) = throw new UnsupportedOperationException
- override def split = {
- val fp = pit.remaining / 2
- val sp = pit.remaining - fp
- for ((p, op) <- pit.psplit(fp, sp) zip otherpit.psplit(fp, sp)) yield new SameElements(p, op)
- }
- override def merge(that: SameElements[U]) = result = result && that.result
- }
-
- protected[this] class Updated[U >: T, That](pos: Int, elem: U, pbf: CanCombineFrom[Repr, U, That], val pit: ParallelIterator)
- extends Transformer[Combiner[U, That], Updated[U, That]] {
- var result: Combiner[U, That] = null
- def leaf(prev: Option[Combiner[U, That]]) = result = pit.updated2combiner(pos, elem, pbf) // TODO
- def newSubtask(p: SuperParallelIterator) = throw new UnsupportedOperationException
- override def split = {
- val pits = pit.split
- for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new Updated(pos - untilp, elem, pbf, p)
- }
- override def merge(that: Updated[U, That]) = result = result combine that.result
- }
-
- protected[this] class Corresponds[S](corr: (T, S) => Boolean, val pit: ParallelIterator, val otherpit: PreciseSplitter[S])
- extends Accessor[Boolean, Corresponds[S]] {
- var result: Boolean = true
- def leaf(prev: Option[Boolean]) = if (!pit.isAborted) {
- result = pit.corresponds(corr)(otherpit)
- if (!result) pit.abort
- }
- def newSubtask(p: SuperParallelIterator) = throw new UnsupportedOperationException
- override def split = {
- val fp = pit.remaining / 2
- val sp = pit.remaining - fp
- for ((p, op) <- pit.psplit(fp, sp) zip otherpit.psplit(fp, sp)) yield new Corresponds(corr, p, op)
- }
- override def merge(that: Corresponds[S]) = result = result && that.result
- }
-
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/ParallelSeqView.scala b/src/library/scala/collection/parallel/ParallelSeqView.scala
deleted file mode 100644
index 7862e99f44..0000000000
--- a/src/library/scala/collection/parallel/ParallelSeqView.scala
+++ /dev/null
@@ -1,64 +0,0 @@
-package scala.collection.parallel
-
-
-
-
-import scala.collection.TraversableView
-import scala.collection.SeqView
-import scala.collection.Parallel
-import scala.collection.generic.CanCombineFrom
-
-
-
-
-
-/** A template view of a non-strict view of a parallel sequence.
- *
- * @tparam T
- * @tparam Coll
- *
- * @since 2.8
- */
-trait ParallelSeqView[+T, +Coll <: Parallel, +CollSeq]
-extends ParallelSeqViewLike[T, Coll, CollSeq, ParallelSeqView[T, Coll, CollSeq], SeqView[T, CollSeq]]
-
-
-
-object ParallelSeqView {
- abstract class NoCombiner[T] extends Combiner[T, Nothing] {
- self: EnvironmentPassingCombiner[T, Nothing] =>
- def +=(elem: T): this.type = this
- def iterator: Iterator[T] = Iterator.empty
- def result() = throw new UnsupportedOperationException("ParallelSeqView.Combiner.result")
- def size = throw new UnsupportedOperationException("ParallelSeqView.Combiner.size")
- def clear() {}
- def combine[N <: T, NewTo >: Nothing](other: Combiner[N, NewTo]) =
- throw new UnsupportedOperationException("ParallelSeqView.Combiner.result")
- }
-
- type Coll = ParallelSeqView[_, C, _] forSome { type C <: ParallelSeq[_] }
-
- implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParallelSeqView[T, ParallelSeq[T], Seq[T]]] =
- new CanCombineFrom[Coll, T, ParallelSeqView[T, ParallelSeq[T], Seq[T]]] {
- def apply(from: Coll) = new NoCombiner[T] with EnvironmentPassingCombiner[T, Nothing]
- def apply() = new NoCombiner[T] with EnvironmentPassingCombiner[T, Nothing]
- }
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/ParallelSeqViewLike.scala b/src/library/scala/collection/parallel/ParallelSeqViewLike.scala
deleted file mode 100644
index eab4d7ad5f..0000000000
--- a/src/library/scala/collection/parallel/ParallelSeqViewLike.scala
+++ /dev/null
@@ -1,192 +0,0 @@
-package scala.collection.parallel
-
-
-
-
-
-import scala.collection.SeqView
-import scala.collection.SeqViewLike
-import scala.collection.Parallel
-import scala.collection.generic.CanBuildFrom
-import scala.collection.generic.CanCombineFrom
-
-
-
-
-
-
-
-/** A template view of a non-strict view of parallel sequence.
- *
- * @tparam T the type of the elements in this view
- * @tparam Coll type of the collection this view is derived from
- * @tparam CollSeq TODO
- * @tparam This actual representation type of this view
- * @tparam ThisSeq type of the sequential version of this view
- *
- * @since 2.8
- */
-trait ParallelSeqViewLike[+T,
- +Coll <: Parallel,
- +CollSeq,
- +This <: ParallelSeqView[T, Coll, CollSeq] with ParallelSeqViewLike[T, Coll, CollSeq, This, ThisSeq],
- +ThisSeq <: SeqView[T, CollSeq] with SeqViewLike[T, CollSeq, ThisSeq]]
-extends SeqView[T, Coll]
- with SeqViewLike[T, Coll, This]
- with ParallelIterableView[T, Coll, CollSeq]
- with ParallelIterableViewLike[T, Coll, CollSeq, This, ThisSeq]
- with ParallelSeq[T]
- with ParallelSeqLike[T, This, ThisSeq]
-{
- self =>
-
- type SCPI = SignalContextPassingIterator[ParallelIterator]
-
- trait Transformed[+S] extends ParallelSeqView[S, Coll, CollSeq]
- with super[ParallelIterableView].Transformed[S] with super[SeqView].Transformed[S] {
- override def parallelIterator = new Elements(0, length) with SCPI {}
- override def iterator = parallelIterator
- environment = self.environment
- }
-
- trait Forced[S] extends super.Forced[S] with Transformed[S] {
- // cheating here - knowing that `underlying` of `self.seq` is of type `CollSeq`,
- // we use it to obtain a view of the correct type - not the most efficient thing
- // in the universe, but without making `newForced` more accessible, or adding
- // a `forced` method to `SeqView`, this is the best we can do
- def seq = self.seq.take(0).++(forced).asInstanceOf[SeqView[S, CollSeq]]
- }
-
- trait Filtered extends super.Filtered with Transformed[T] {
- def seq = self.seq filter pred
- }
-
- trait Sliced extends super.Sliced with Transformed[T] {
- override def slice(from1: Int, until1: Int): This = newSliced(from1 max 0, until1 max 0).asInstanceOf[This]
- def seq = self.seq.slice(from, until)
- }
-
- trait Appended[U >: T] extends super.Appended[U] with Transformed[U] {
- def seq = self.seq.++(rest).asInstanceOf[SeqView[U, CollSeq]]
- }
-
- trait Mapped[S] extends super.Mapped[S] with Transformed[S]{
- def seq = self.seq.map(mapping).asInstanceOf[SeqView[S, CollSeq]]
- }
-
- trait FlatMapped[S] extends super.FlatMapped[S] with Transformed[S] {
- def seq = self.seq.flatMap(mapping).asInstanceOf[SeqView[S, CollSeq]]
- }
-
- trait TakenWhile extends super.TakenWhile with Transformed[T] {
- def seq = self.seq takeWhile pred
- }
-
- trait DroppedWhile extends super.DroppedWhile with Transformed[T] {
- def seq = self.seq dropWhile pred
- }
-
- trait Zipped[S] extends super.Zipped[S] with Transformed[(T, S)] {
- def seq = (self.seq zip other).asInstanceOf[SeqView[(T, S), CollSeq]]
- }
-
- trait ZippedAll[T1 >: T, S] extends super.ZippedAll[T1, S] with Transformed[(T1, S)] {
- def seq = self.seq.zipAll(other, thisElem, thatElem).asInstanceOf[SeqView[(T1, S), CollSeq]]
- }
-
- trait Reversed extends super.Reversed with Transformed[T] {
- def seq = self.seq.reverse
- }
-
- trait Patched[U >: T] extends super.Patched[U] with Transformed[U] {
- def seq = self.seq.patch(from, patch, replaced).asInstanceOf[SeqView[U, CollSeq]]
- }
-
- trait Prepended[U >: T] extends super.Prepended[U] with Transformed[U] {
- def seq = (fst +: self.seq).asInstanceOf[SeqView[U, CollSeq]]
- }
-
- protected override def newFiltered(p: T => Boolean): Transformed[T] = new Filtered { val pred = p }
- protected override def newSliced(f: Int, u: Int): Transformed[T] = new Sliced { val from = f; val until = u }
- protected override def newAppended[U >: T](that: Traversable[U]): Transformed[U] = new Appended[U] { val rest = that }
- protected override def newMapped[S](f: T => S): Transformed[S] = new Mapped[S] { val mapping = f }
- protected override def newFlatMapped[S](f: T => Traversable[S]): Transformed[S] = new FlatMapped[S] { val mapping = f }
- protected override def newDroppedWhile(p: T => Boolean): Transformed[T] = new DroppedWhile { val pred = p }
- protected override def newTakenWhile(p: T => Boolean): Transformed[T] = new TakenWhile { val pred = p }
- protected override def newZipped[S](that: Iterable[S]): Transformed[(T, S)] = new Zipped[S] { val other = that }
- protected override def newZippedAll[T1 >: T, S](that: Iterable[S], _thisElem: T1, _thatElem: S): Transformed[(T1, S)] = new ZippedAll[T1, S] { val other = that; val thisElem = _thisElem; val thatElem = _thatElem }
- protected override def newReversed: Transformed[T] = new Reversed { }
- protected override def newPatched[U >: T](_from: Int, _patch: Seq[U], _replaced: Int): Transformed[U] = new Patched[U] { val from = _from; val patch = _patch; val replaced = _replaced }
- protected override def newPrepended[U >: T](elem: U): Transformed[U] = new Prepended[U] { protected[this] val fst = elem }
-
- override def filter(p: T => Boolean): This = newFiltered(p).asInstanceOf[This]
- override def filterNot(p: T => Boolean): This = newFiltered(!p(_)).asInstanceOf[This]
- override def partition(p: T => Boolean): (This, This) = (filter(p), filterNot(p))
- override def slice(from: Int, until: Int): This = newSliced(from, until).asInstanceOf[This]
- override def take(n: Int): This = newSliced(0, n).asInstanceOf[This]
- override def drop(n: Int): This = newSliced(n, length).asInstanceOf[This]
- override def splitAt(n: Int): (This, This) = (take(n), drop(n))
- override def ++[U >: T, That](xs: TraversableOnce[U])(implicit bf: CanBuildFrom[This, U, That]): That = newAppended(xs.toTraversable).asInstanceOf[That]
- override def map[S, That](f: T => S)(implicit bf: CanBuildFrom[This, S, That]): That = newMapped(f).asInstanceOf[That]
- override def flatMap[S, That](f: T => Traversable[S])(implicit bf: CanBuildFrom[This, S, That]): That = newFlatMapped(f).asInstanceOf[That]
- override def collect[S, That](pf: PartialFunction[T, S])(implicit bf: CanBuildFrom[This, S, That]): That = filter(pf.isDefinedAt).map(pf)(bf)
- override def takeWhile(p: T => Boolean): This = newTakenWhile(p).asInstanceOf[This]
- override def dropWhile(p: T => Boolean): This = newDroppedWhile(p).asInstanceOf[This]
- override def span(p: T => Boolean): (This, This) = (takeWhile(p), dropWhile(p))
- override def scanLeft[S, That](z: S)(op: (S, T) => S)(implicit bf: CanBuildFrom[This, S, That]): That = newForced(thisSeq.scanLeft(z)(op)).asInstanceOf[That]
- override def scanRight[S, That](z: S)(op: (T, S) => S)(implicit bf: CanBuildFrom[This, S, That]): That = newForced(thisSeq.scanRight(z)(op)).asInstanceOf[That]
- override def groupBy[K](f: T => K): collection.immutable.Map[K, This] = thisSeq.groupBy(f).mapValues(xs => newForced(xs).asInstanceOf[This])
- override def +:[U >: T, That](elem: U)(implicit bf: CanBuildFrom[This, U, That]): That = newPrepended(elem).asInstanceOf[That]
- override def reverse: This = newReversed.asInstanceOf[This]
- override def patch[U >: T, That](from: Int, patch: Seq[U], replaced: Int)(implicit bf: CanBuildFrom[This, U, That]): That = newPatched(from, patch, replaced).asInstanceOf[That]
- override def padTo[U >: T, That](len: Int, elem: U)(implicit bf: CanBuildFrom[This, U, That]): That = patch(length, Seq.fill(len - length)(elem), 0)
- override def reverseMap[S, That](f: T => S)(implicit bf: CanBuildFrom[This, S, That]): That = reverse.map(f)
- override def updated[U >: T, That](index: Int, elem: U)(implicit bf: CanBuildFrom[This, U, That]): That = {
- require(0 <= index && index < length)
- patch(index, List(elem), 1)(bf)
- }
- override def :+[U >: T, That](elem: U)(implicit bf: CanBuildFrom[This, U, That]): That = ++(Iterator.single(elem))(bf)
- override def union[U >: T, That](that: Seq[U])(implicit bf: CanBuildFrom[This, U, That]): That = this ++ that
- override def diff[U >: T](that: Seq[U]): This = newForced(thisSeq diff that).asInstanceOf[This]
- override def intersect[U >: T](that: Seq[U]): This = newForced(thisSeq intersect that).asInstanceOf[This]
- override def sorted[U >: T](implicit ord: Ordering[U]): This = newForced(thisSeq sorted ord).asInstanceOf[This]
-
- override def force[U >: T, That](implicit bf: CanBuildFrom[Coll, U, That]) = bf ifParallel { pbf =>
- executeAndWaitResult(new Force(pbf, parallelIterator) mapResult { _.result })
- } otherwise {
- val b = bf(underlying)
- b ++= this.iterator
- b.result
- }
-
- /* tasks */
-
- protected[this] class Force[U >: T, That](cbf: CanCombineFrom[Coll, U, That], val pit: ParallelIterator)
- extends Transformer[Combiner[U, That], Force[U, That]] {
- var result: Combiner[U, That] = null
- def leaf(prev: Option[Combiner[U, That]]) = result = pit.copy2builder[U, That, Combiner[U, That]](reuse(prev, cbf(self.underlying)))
- def newSubtask(p: SuperParallelIterator) = new Force(cbf, down(p))
- override def merge(that: Force[U, That]) = result = result combine that.result
- }
-
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/RemainsIterator.scala b/src/library/scala/collection/parallel/RemainsIterator.scala
deleted file mode 100644
index 6ed6d235d2..0000000000
--- a/src/library/scala/collection/parallel/RemainsIterator.scala
+++ /dev/null
@@ -1,438 +0,0 @@
-package scala.collection.parallel
-
-
-
-import scala.collection.Parallel
-import scala.collection.generic.Signalling
-import scala.collection.generic.DelegatedSignalling
-import scala.collection.generic.CanCombineFrom
-import scala.collection.mutable.Builder
-import scala.collection.Iterator.empty
-
-trait RemainsIterator[+T] extends Iterator[T] {
- /** The number of elements this iterator has yet to iterate.
- * This method doesn't change the state of the iterator.
- */
- def remaining: Int
-}
-
-
-/** Augments iterators with additional methods, mostly transformers,
- * assuming they iterate an iterable collection.
- *
- * @param T type of the elements iterated.
- * @param Repr type of the collection iterator iterates.
- */
-trait AugmentedIterableIterator[+T, +Repr <: Parallel] extends RemainsIterator[T] {
-
- def repr: Repr
-
- /* accessors */
-
- override def count(p: T => Boolean): Int = {
- var i = 0
- while (hasNext) if (p(next)) i += 1
- i
- }
-
- def reduce[U >: T](op: (U, U) => U): U = {
- var r: U = next
- while (hasNext) r = op(r, next)
- r
- }
-
- def fold[U >: T](z: U)(op: (U, U) => U): U = {
- var r = z
- while (hasNext) r = op(r, next)
- r
- }
-
- override def sum[U >: T](implicit num: Numeric[U]): U = {
- var r: U = num.zero
- while (hasNext) r = num.plus(r, next)
- r
- }
-
- override def product[U >: T](implicit num: Numeric[U]): U = {
- var r: U = num.one
- while (hasNext) r = num.times(r, next)
- r
- }
-
- override def min[U >: T](implicit ord: Ordering[U]): T = {
- var r = next
- while (hasNext) {
- val curr = next
- if (ord.lteq(curr, r)) r = curr
- }
- r
- }
-
- override def max[U >: T](implicit ord: Ordering[U]): T = {
- var r = next
- while (hasNext) {
- val curr = next
- if (ord.gteq(curr, r)) r = curr
- }
- r
- }
-
- override def copyToArray[U >: T](array: Array[U], from: Int, len: Int) {
- var i = from
- val until = from + len
- while (i < until && hasNext) {
- array(i) = next
- i += 1
- }
- }
-
- /* transformers to combiners */
-
- def map2combiner[S, That](f: T => S, cb: Combiner[S, That]): Combiner[S, That] = {
- //val cb = pbf(repr)
- cb.sizeHint(remaining)
- while (hasNext) cb += f(next)
- cb
- }
-
- def collect2combiner[S, That](pf: PartialFunction[T, S], pbf: CanCombineFrom[Repr, S, That]): Combiner[S, That] = {
- val cb = pbf(repr)
- while (hasNext) {
- val curr = next
- if (pf.isDefinedAt(curr)) cb += pf(curr)
- }
- cb
- }
-
- def flatmap2combiner[S, That](f: T => Traversable[S], pbf: CanCombineFrom[Repr, S, That]): Combiner[S, That] = {
- val cb = pbf(repr)
- while (hasNext) {
- val traversable = f(next)
- if (traversable.isInstanceOf[Iterable[_]]) cb ++= traversable.asInstanceOf[Iterable[S]].iterator
- else cb ++= traversable
- }
- cb
- }
-
- def copy2builder[U >: T, Coll, Bld <: Builder[U, Coll]](b: Bld): Bld = {
- b.sizeHint(remaining)
- while (hasNext) b += next
- b
- }
-
- def filter2combiner[U >: T, This >: Repr](pred: T => Boolean, cb: Combiner[U, This]): Combiner[U, This] = {
- while (hasNext) {
- val curr = next
- if (pred(curr)) cb += curr
- }
- cb
- }
-
- def filterNot2combiner[U >: T, This >: Repr](pred: T => Boolean, cb: Combiner[U, This]): Combiner[U, This] = {
- while (hasNext) {
- val curr = next
- if (!pred(curr)) cb += curr
- }
- cb
- }
-
- def partition2combiners[U >: T, This >: Repr](pred: T => Boolean, btrue: Combiner[U, This], bfalse: Combiner[U, This]) = {
- while (hasNext) {
- val curr = next
- if (pred(curr)) btrue += curr
- else bfalse += curr
- }
- (btrue, bfalse)
- }
-
- def take2combiner[U >: T, This >: Repr](n: Int, cb: Combiner[U, This]): Combiner[U, This] = {
- cb.sizeHint(n)
- var left = n
- while (left > 0) {
- cb += next
- left -= 1
- }
- cb
- }
-
- def drop2combiner[U >: T, This >: Repr](n: Int, cb: Combiner[U, This]): Combiner[U, This] = {
- drop(n)
- cb.sizeHint(remaining)
- while (hasNext) cb += next
- cb
- }
-
- def slice2combiner[U >: T, This >: Repr](from: Int, until: Int, cb: Combiner[U, This]): Combiner[U, This] = {
- drop(from)
- var left = until - from
- cb.sizeHint(left)
- while (left > 0) {
- cb += next
- left -= 1
- }
- cb
- }
-
- def splitAt2combiners[U >: T, This >: Repr](at: Int, before: Combiner[U, This], after: Combiner[U, This]) = {
- before.sizeHint(at)
- after.sizeHint(remaining - at)
- var left = at
- while (left > 0) {
- before += next
- left -= 1
- }
- while (hasNext) after += next
- (before, after)
- }
-
- def takeWhile2combiner[U >: T, This >: Repr](p: T => Boolean, cb: Combiner[U, This]) = {
- var loop = true
- while (hasNext && loop) {
- val curr = next
- if (p(curr)) cb += curr
- else loop = false
- }
- (cb, loop)
- }
-
- def span2combiners[U >: T, This >: Repr](p: T => Boolean, before: Combiner[U, This], after: Combiner[U, This]) = {
- var isBefore = true
- while (hasNext && isBefore) {
- val curr = next
- if (p(curr)) before += curr
- else {
- after.sizeHint(remaining + 1)
- after += curr
- isBefore = false
- }
- }
- while (hasNext) after += next
- (before, after)
- }
-}
-
-
-trait AugmentedSeqIterator[+T, +Repr <: Parallel] extends AugmentedIterableIterator[T, Repr] {
-
- /** The exact number of elements this iterator has yet to iterate.
- * This method doesn't change the state of the iterator.
- */
- def remaining: Int
-
- /* accessors */
-
- def prefixLength(pred: T => Boolean): Int = {
- var total = 0
- var loop = true
- while (hasNext && loop) {
- if (pred(next)) total += 1
- else loop = false
- }
- total
- }
-
- override def indexWhere(pred: T => Boolean): Int = {
- var i = 0
- var loop = true
- while (hasNext && loop) {
- if (pred(next)) loop = false
- else i += 1
- }
- if (loop) -1 else i
- }
-
- def lastIndexWhere(pred: T => Boolean): Int = {
- var pos = -1
- var i = 0
- while (hasNext) {
- if (pred(next)) pos = i
- i += 1
- }
- pos
- }
-
- def corresponds[S](corr: (T, S) => Boolean)(that: Iterator[S]): Boolean = {
- while (hasNext && that.hasNext) {
- if (!corr(next, that.next)) return false
- }
- hasNext == that.hasNext
- }
-
- /* transformers */
-
- def reverse2combiner[U >: T, This >: Repr](cb: Combiner[U, This]): Combiner[U, This] = {
- cb.sizeHint(remaining)
- var lst = List[T]()
- while (hasNext) lst ::= next
- while (lst != Nil) {
- cb += lst.head
- lst = lst.tail
- }
- cb
- }
-
- def reverseMap2combiner[S, That](f: T => S, cbf: CanCombineFrom[Repr, S, That]): Combiner[S, That] = {
- val cb = cbf(repr)
- cb.sizeHint(remaining)
- var lst = List[S]()
- while (hasNext) lst ::= f(next)
- while (lst != Nil) {
- cb += lst.head
- lst = lst.tail
- }
- cb
- }
-
- def updated2combiner[U >: T, That](index: Int, elem: U, cbf: CanCombineFrom[Repr, U, That]): Combiner[U, That] = {
- val cb = cbf(repr)
- cb.sizeHint(remaining)
- var j = 0
- while (hasNext) {
- if (j == index) {
- cb += elem
- next
- } else cb += next
- j += 1
- }
- cb
- }
-
-}
-
-
-
-trait ParallelIterableIterator[+T, +Repr <: Parallel]
-extends AugmentedIterableIterator[T, Repr]
- with Splitter[T]
- with Signalling
- with DelegatedSignalling
-{
- def split: Seq[ParallelIterableIterator[T, Repr]]
-
- /** The number of elements this iterator has yet to traverse. This method
- * doesn't change the state of the iterator.
- *
- * This method is used to provide size hints to builders and combiners, and
- * to approximate positions of iterators within a data structure.
- *
- * '''Note''': This method may be implemented to return an upper bound on the number of elements
- * in the iterator, instead of the exact number of elements to iterate.
- *
- * In that case, 2 considerations must be taken into account:
- *
- * 1) classes that inherit `ParallelIterable` must reimplement methods `take`, `drop`, `slice`, `splitAt` and `copyToArray`.
- *
- * 2) if an iterator provides an upper bound on the number of elements, then after splitting the sum
- * of `remaining` values of split iterators must be less than or equal to this upper bound.
- */
- def remaining: Int
-}
-
-
-trait ParallelSeqIterator[+T, +Repr <: Parallel]
-extends ParallelIterableIterator[T, Repr]
- with AugmentedSeqIterator[T, Repr]
- with PreciseSplitter[T]
-{
- def split: Seq[ParallelSeqIterator[T, Repr]]
- def psplit(sizes: Int*): Seq[ParallelSeqIterator[T, Repr]]
-
- /** The number of elements this iterator has yet to traverse. This method
- * doesn't change the state of the iterator. Unlike the version of this method in the supertrait,
- * method `remaining` in `ParallelSeqLike.this.ParallelIterator` must return an exact number
- * of elements remaining in the iterator.
- *
- * @return an exact number of elements this iterator has yet to iterate
- */
- def remaining: Int
-}
-
-
-trait DelegatedIterator[+T, +Delegate <: Iterator[T]] extends RemainsIterator[T] {
- val delegate: Delegate
- def next = delegate.next
- def hasNext = delegate.hasNext
-}
-
-
-trait Counting[+T] extends RemainsIterator[T] {
- val initialSize: Int
- def remaining = initialSize - traversed
- var traversed = 0
- abstract override def next = {
- val n = super.next
- traversed += 1
- n
- }
-}
-
-
-/** A mixin for iterators that traverse only filtered elements of a delegate.
- */
-trait FilteredIterator[+T, +Delegate <: Iterator[T]] extends DelegatedIterator[T, Delegate] {
- protected[this] val pred: T => Boolean
-
- private[this] var hd: T = _
- private var hdDefined = false
-
- override def hasNext: Boolean = hdDefined || {
- do {
- if (!delegate.hasNext) return false
- hd = delegate.next
- } while (!pred(hd))
- hdDefined = true
- true
- }
-
- override def next = if (hasNext) { hdDefined = false; hd } else empty.next
-}
-
-
-/** A mixin for iterators that traverse elements of the delegate iterator, and of another collection.
- */
-trait AppendedIterator[+T, +Delegate <: Iterator[T]] extends DelegatedIterator[T, Delegate] {
- // `rest` should never alias `delegate`
- protected[this] val rest: Iterator[T]
-
- private[this] var current: Iterator[T] = delegate
-
- override def hasNext = (current.hasNext) || (current == delegate && rest.hasNext)
-
- override def next = {
- if (!current.hasNext) current = rest
- current.next
- }
-
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/Splitter.scala b/src/library/scala/collection/parallel/Splitter.scala
deleted file mode 100644
index b3cad6d67a..0000000000
--- a/src/library/scala/collection/parallel/Splitter.scala
+++ /dev/null
@@ -1,86 +0,0 @@
-package scala.collection.parallel
-
-
-import scala.collection.Seq
-
-
-/** A splitter (or a split iterator) can be split into more splitters that traverse over
- * disjoint subsets of elements.
- *
- * @tparam T type of the elements this parallel iterator traverses
- *
- * @since 2.8.1
- * @author prokopec
- */
-trait Splitter[+T] extends Iterator[T] {
-
- /** Splits the iterator into a sequence of disjunct views.
- *
- * Returns a sequence of split iterators, each iterating over some subset of the
- * elements in the collection. These subsets are disjoint and should be approximately
- * equal in size. These subsets are not empty, unless the iterator is empty in which
- * case this method returns a sequence with a single empty iterator. If the iterator has
- * more than two elements, this method will return two or more iterators.
- *
- * Implementors are advised to keep this partition relatively small - two iterators are
- * already enough when partitioning the collection, although there may be a few more.
- *
- * '''Note:''' this method actually invalidates the current iterator.
- *
- * @return a sequence of disjunct iterators of the collection
- */
- def split: Seq[Splitter[T]]
-
-}
-
-
-/** A precise splitter (or a precise split iterator) can be split into arbitrary number of splitters
- * that traverse disjoint subsets of arbitrary sizes.
- *
- * Implementors might want to override the parameterless `split` method for efficiency.
- *
- * @tparam T type of the elements this parallel iterator traverses
- *
- * @since 2.8.1
- * @author prokopec
- */
-trait PreciseSplitter[+T] extends Splitter[T] {
-
- /** Splits the iterator into disjunct views.
- *
- * This overloaded version of the `split` method is specific to precise parallel iterators.
- * It returns a sequence of parallel iterators, each iterating some subset of the
- * elements in this iterator. The sizes of the subiterators in the partition is equal to
- * the size in the corresponding argument, as long as there are enough elements in this
- * iterator to split it that way.
- *
- * If there aren't enough elements, a zero element iterator is appended for each additional argument.
- * If there are additional elements, an additional iterator is appended at the end to compensate.
- *
- * For example, say we have a parallel iterator `ps` with 100 elements. Invoking:
- * {{{
- * ps.split(50, 25, 25, 10, 5)
- * }}}
- * will return a sequence of five iterators, last two views being empty. On the other hand, calling:
- * {{{
- * ps.split(50, 40)
- * }}}
- * will return a sequence of three iterators, last of them containing ten elements.
- *
- * '''Note:''' this method actually invalidates the current iterator.
- *
- * Unlike the case with `split` found in parallel iterable iterators, views returned by this method can be empty.
- *
- * @param sizes the sizes used to split this split iterator into iterators that traverse disjunct subsets
- * @return a sequence of disjunct subsequence iterators of this parallel iterator
- */
- def psplit(sizes: Int*): Seq[PreciseSplitter[T]]
-
- def split: Seq[PreciseSplitter[T]]
-
-}
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/TaskSupport.scala b/src/library/scala/collection/parallel/TaskSupport.scala
deleted file mode 100644
index 8a072b22aa..0000000000
--- a/src/library/scala/collection/parallel/TaskSupport.scala
+++ /dev/null
@@ -1,27 +0,0 @@
-package scala.collection.parallel
-
-
-
-
-
-
-
-trait TaskSupport extends AdaptiveWorkStealingForkJoinTasks
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/Tasks.scala b/src/library/scala/collection/parallel/Tasks.scala
deleted file mode 100644
index 3ef60f8c7a..0000000000
--- a/src/library/scala/collection/parallel/Tasks.scala
+++ /dev/null
@@ -1,230 +0,0 @@
-package scala.collection.parallel
-
-
-
-
-import scala.concurrent.forkjoin._
-
-
-
-
-
-
-
-
-
-
-/** A trait that declares task execution capabilities used
- * by parallel collections. Parallel collections inherit a subtrait
- * of this trait.
- *
- * One implementation trait of `TaskExecution` is `ForkJoinTaskExecution`.
- */
-trait Tasks {
-
- /** A task abstraction which allows starting a task with `start`,
- * waiting for it to finish with `sync` and attempting to cancel
- * the task with `tryCancel`.
- * It also defines a method `leaf` which must be called once the
- * the task is started and defines what this task actually does.
- * Method `split` allows splitting this task into smaller subtasks,
- * and method `shouldSplitFurther` decides if the task should be
- * partitioned further.
- * Method `merge` allows merging the results of the 2 tasks. It updates
- * the result of the receiver.
- * Finally, it defines the task result of type `U`.
- */
- trait Task[R, +Tp] {
- def repr = this.asInstanceOf[Tp]
- /** Code that gets called after the task gets started - it may spawn other tasks instead of calling `leaf`. */
- def compute
- /** Body of the task - non-divisible unit of work done by this task. Optionally is provided with the result from the previous task
- * or `None` if there was no previous task.
- */
- def leaf(result: Option[R])
- /** Start task. */
- def start
- /** Wait for task to finish. */
- def sync
- /** Try to cancel the task.
- * @return `true` if cancellation is successful.
- */
- def tryCancel: Boolean
- /** A result that can be accessed once the task is completed. */
- def result: R
- /** Decides whether or not this task should be split further. */
- def shouldSplitFurther: Boolean
- /** Splits this task into a list of smaller tasks. */
- protected[this] def split: Seq[Task[R, Tp]]
- /** Read of results of `that` task and merge them into results of this one. */
- protected[this] def merge(that: Tp) {}
- }
-
- type TaskType[R, +Tp] <: Task[R, Tp]
- type ExecutionEnvironment
-
- var environment: ExecutionEnvironment
-
- /** Executes a task and waits for it to finish. */
- def executeAndWait[R, Tp](task: TaskType[R, Tp])
-
- /** Executes a result task, waits for it to finish, then returns its result. */
- def executeAndWaitResult[R, Tp](task: TaskType[R, Tp]): R
-
- /** Retrieves the parallelism level of the task execution environment. */
- def parallelismLevel: Int
-
-}
-
-
-/** This trait implements scheduling by employing
- * an adaptive work stealing technique.
- */
-trait AdaptiveWorkStealingTasks extends Tasks {
-
- trait Task[R, Tp] extends super.Task[R, Tp] {
- var next: Task[R, Tp] = null
- var shouldWaitFor = true
- var result: R
-
- def split: Seq[Task[R, Tp]]
-
- /** The actual leaf computation. */
- def leaf(result: Option[R]): Unit
-
- def compute = if (shouldSplitFurther) internal else leaf(None)
-
- def internal = {
- var last = spawnSubtasks
-
- last.leaf(None)
- result = last.result
-
- while (last.next != null) {
- val lastresult = Option(last.result)
- last = last.next
- if (last.tryCancel) last.leaf(lastresult) else last.sync
- merge(last.repr)
- }
- }
-
- def spawnSubtasks = {
- var last: Task[R, Tp] = null
- var head: Task[R, Tp] = this
- do {
- val subtasks = head.split
- head = subtasks.head
- for (t <- subtasks.tail) {
- t.next = last
- last = t
- t.start
- }
- } while (head.shouldSplitFurther);
- head.next = last
- head
- }
-
- def printChain = {
- var curr = this
- var chain = "chain: "
- while (curr != null) {
- chain += curr + " ---> "
- curr = curr.next
- }
- println(chain)
- }
- }
-
-}
-
-
-/**
- * A trait describing objects that provide a fork/join pool.
- */
-trait HavingForkJoinPool {
- def forkJoinPool: ForkJoinPool
-}
-
-
-
-/** An implementation trait for parallel tasks based on the fork/join framework.
- *
- * @define fjdispatch
- * If the current thread is a fork/join worker thread, the task's `fork` method will
- * be invoked. Otherwise, the task will be executed on the fork/join pool.
- */
-trait ForkJoinTasks extends Tasks with HavingForkJoinPool {
-
- trait Task[R, +Tp] extends RecursiveAction with super.Task[R, Tp] {
- def start = fork
- def sync = join
- def tryCancel = tryUnfork
- var result: R
- }
-
- type TaskType[R, +Tp] = Task[R, Tp]
- type ExecutionEnvironment = ForkJoinPool
-
- /** The fork/join pool of this collection.
- */
- def forkJoinPool: ForkJoinPool = environment
- var environment = ForkJoinTasks.defaultForkJoinPool
-
- /** Executes a task on a fork/join pool and waits for it to finish.
- *
- * $fjdispatch
- */
- def executeAndWait[R, Tp](fjtask: Task[R, Tp]) {
- if (currentThread.isInstanceOf[ForkJoinWorkerThread]) {
- fjtask.fork
- } else {
- forkJoinPool.execute(fjtask)
- }
- fjtask.join
- }
-
- /** Executes a task on a fork/join pool and waits for it to finish.
- * Returns its result when it does.
- *
- * $fjdispatch
- *
- * @return the result of the task
- */
- def executeAndWaitResult[R, Tp](fjtask: Task[R, Tp]): R = {
- if (currentThread.isInstanceOf[ForkJoinWorkerThread]) {
- fjtask.fork
- } else {
- forkJoinPool.execute(fjtask)
- }
- fjtask.join
- fjtask.result
- }
-
- def parallelismLevel = forkJoinPool.getParallelism
-
-}
-
-object ForkJoinTasks {
- val defaultForkJoinPool = new ForkJoinPool
- defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors)
- defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors)
-}
-
-
-/* Some boilerplate due to no deep mixin composition. Not sure if it can be done differently without them.
- */
-trait AdaptiveWorkStealingForkJoinTasks extends ForkJoinTasks with AdaptiveWorkStealingTasks {
-
- trait Task[R, Tp] extends super[ForkJoinTasks].Task[R, Tp] with super[AdaptiveWorkStealingTasks].Task[R, Tp]
-
-}
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/immutable/ParallelHashTrie.scala b/src/library/scala/collection/parallel/immutable/ParallelHashTrie.scala
deleted file mode 100644
index a9e08913ea..0000000000
--- a/src/library/scala/collection/parallel/immutable/ParallelHashTrie.scala
+++ /dev/null
@@ -1,248 +0,0 @@
-package scala.collection.parallel.immutable
-
-
-
-
-
-
-
-import scala.collection.parallel.ParallelMap
-import scala.collection.parallel.ParallelMapLike
-import scala.collection.parallel.Combiner
-import scala.collection.parallel.EnvironmentPassingCombiner
-import scala.collection.generic.ParallelMapFactory
-import scala.collection.generic.CanCombineFrom
-import scala.collection.generic.GenericParallelMapTemplate
-import scala.collection.generic.GenericParallelMapCompanion
-import scala.collection.immutable.HashMap
-
-
-
-
-
-
-/** Parallel hash trie map.
- *
- * @author prokopec
- */
-class ParallelHashTrie[K, +V] private[immutable] (private[this] val trie: HashMap[K, V])
-extends ParallelMap[K, V]
- with GenericParallelMapTemplate[K, V, ParallelHashTrie]
- with ParallelMapLike[K, V, ParallelHashTrie[K, V], HashMap[K, V]]
-{
-self =>
-
- def this() = this(HashMap.empty[K, V])
-
- override def mapCompanion: GenericParallelMapCompanion[ParallelHashTrie] = ParallelHashTrie
-
- override def empty: ParallelHashTrie[K, V] = new ParallelHashTrie[K, V]
-
- def parallelIterator = new ParallelHashTrieIterator(trie) with SCPI
-
- def seq = trie
-
- def -(k: K) = new ParallelHashTrie(trie - k)
-
- def +[U >: V](kv: (K, U)) = new ParallelHashTrie(trie + kv)
-
- def get(k: K) = trie.get(k)
-
- override def size = trie.size
-
- protected override def reuse[S, That](oldc: Option[Combiner[S, That]], newc: Combiner[S, That]) = oldc match {
- case Some(old) => old
- case None => newc
- }
-
- type SCPI = SignalContextPassingIterator[ParallelHashTrieIterator]
-
- class ParallelHashTrieIterator(val ht: HashMap[K, V])
- extends super.ParallelIterator {
- self: SignalContextPassingIterator[ParallelHashTrieIterator] =>
- // println("created iterator " + ht)
- var i = 0
- lazy val triter = ht.iterator
- def split: Seq[ParallelIterator] = {
- // println("splitting " + ht + " into " + ht.split.map(new ParallelHashTrieIterator(_) with SCPI).map(_.toList))
- ht.split.map(new ParallelHashTrieIterator(_) with SCPI)
- }
- def next: (K, V) = {
- // println("taking next after " + i + ", in " + ht)
- i += 1
- triter.next
- }
- def hasNext: Boolean = {
- // println("hasNext: " + i + ", " + ht.size + ", " + ht)
- i < ht.size
- }
- def remaining = ht.size - i
- }
-
-}
-
-
-object ParallelHashTrie extends ParallelMapFactory[ParallelHashTrie] {
- def empty[K, V]: ParallelHashTrie[K, V] = new ParallelHashTrie[K, V]
-
- def newCombiner[K, V]: Combiner[(K, V), ParallelHashTrie[K, V]] = HashTrieCombiner[K, V]
-
- implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParallelHashTrie[K, V]] = {
- new CanCombineFromMap[K, V]
- }
-
- def fromTrie[K, V](t: HashMap[K, V]) = new ParallelHashTrie(t)
-
- var totalcombines = new java.util.concurrent.atomic.AtomicInteger(0)
-}
-
-
-trait HashTrieCombiner[K, V]
-extends Combiner[(K, V), ParallelHashTrie[K, V]] {
-self: EnvironmentPassingCombiner[(K, V), ParallelHashTrie[K, V]] =>
- import HashTrieCombiner._
- var heads = new Array[Unrolled[K, V]](rootsize)
- var lasts = new Array[Unrolled[K, V]](rootsize)
- var size: Int = 0
-
- def clear = {
- heads = new Array[Unrolled[K, V]](rootsize)
- lasts = new Array[Unrolled[K, V]](rootsize)
- }
-
- def +=(elem: (K, V)) = {
- size += 1
- val hc = elem._1.##
- val pos = hc & 0x1f
- if (lasts(pos) eq null) {
- // initialize bucket
- heads(pos) = new Unrolled[K, V]
- lasts(pos) = heads(pos)
- }
- // add to bucket
- lasts(pos) = lasts(pos).add(elem)
- this
- }
-
- def combine[N <: (K, V), NewTo >: ParallelHashTrie[K, V]](other: Combiner[N, NewTo]): Combiner[N, NewTo] = if (this ne other) {
- // ParallelHashTrie.totalcombines.incrementAndGet
- if (other.isInstanceOf[HashTrieCombiner[_, _]]) {
- val that = other.asInstanceOf[HashTrieCombiner[K, V]]
- var i = 0
- while (i < rootsize) {
- if (lasts(i) eq null) {
- heads(i) = that.heads(i)
- lasts(i) = that.lasts(i)
- } else {
- lasts(i).next = that.heads(i)
- if (that.lasts(i) ne null) lasts(i) = that.lasts(i)
- }
- i += 1
- }
- size = size + that.size
- this
- } else error("Unexpected combiner type.")
- } else this
-
- def result = {
- val buckets = heads.filter(_ != null)
- val root = new Array[HashMap[K, V]](buckets.length)
-
- executeAndWait(new CreateTrie(buckets, root, 0, buckets.length))
-
- var bitmap = 0
- var i = 0
- while (i < rootsize) {
- if (heads(i) ne null) bitmap |= 1 << i
- i += 1
- }
- val sz = root.foldLeft(0)(_ + _.size)
-
- if (sz == 0) new ParallelHashTrie[K, V]
- else if (sz == 1) new ParallelHashTrie[K, V](root(0))
- else {
- val trie = new HashMap.HashTrieMap(bitmap, root, sz)
- new ParallelHashTrie[K, V](trie)
- }
- }
-
- /* tasks */
-
- class CreateTrie(buckets: Array[Unrolled[K, V]], root: Array[HashMap[K, V]], offset: Int, howmany: Int) extends super.Task[Unit, CreateTrie] {
- var result = ()
- def leaf(prev: Option[Unit]) = {
- var i = offset
- val until = offset + howmany
- while (i < until) {
- root(i) = createTrie(buckets(i))
- i += 1
- }
- }
- private def createTrie(elems: Unrolled[K, V]): HashMap[K, V] = {
- var trie = new HashMap[K, V]
-
- var unrolled = elems
- var i = 0
- while (unrolled ne null) {
- val chunkarr = unrolled.array
- val chunksz = unrolled.size
- while (i < chunksz) {
- val kv = chunkarr(i)
- val hc = kv._1.##
- trie = trie.updated0(kv._1, hc, rootbits, kv._2, kv)
- i += 1
- }
- i = 0
- unrolled = unrolled.next
- }
-
- trie
- }
- def split = {
- val fp = howmany / 2
- List(new CreateTrie(buckets, root, offset, fp), new CreateTrie(buckets, root, offset + fp, howmany - fp))
- }
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(root.length, parallelismLevel)
- }
-
-}
-
-
-object HashTrieCombiner {
- def apply[K, V] = new HashTrieCombiner[K, V] with EnvironmentPassingCombiner[(K, V), ParallelHashTrie[K, V]] {}
-
- private[immutable] val rootbits = 5
- private[immutable] val rootsize = 1 << 5
- private[immutable] val unrolledsize = 16
-
- private[immutable] class Unrolled[K, V] {
- var size = 0
- var array = new Array[(K, V)](unrolledsize)
- var next: Unrolled[K, V] = null
- // adds and returns itself or the new unrolled if full
- def add(elem: (K, V)): Unrolled[K, V] = if (size < unrolledsize) {
- array(size) = elem
- size += 1
- this
- } else {
- next = new Unrolled[K, V]
- next.add(elem)
- }
- override def toString = "Unrolled(" + array.mkString(", ") + ")"
- }
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/immutable/ParallelIterable.scala.disabled b/src/library/scala/collection/parallel/immutable/ParallelIterable.scala.disabled
deleted file mode 100644
index 25a3217258..0000000000
--- a/src/library/scala/collection/parallel/immutable/ParallelIterable.scala.disabled
+++ /dev/null
@@ -1,53 +0,0 @@
-package scala.collection.parallel.immutable
-
-
-import scala.collection.generic._
-
-import scala.collection.parallel.ParallelIterableLike
-import scala.collection.parallel.Combiner
-
-
-// TODO uncomment when we add parallel vectors
-
-///** A template trait for immutable parallel iterable collections.
-// *
-// * $paralleliterableinfo
-// *
-// * $sideeffects
-// *
-// * @tparam A the element type of the collection
-// *
-// * @author prokopec
-// * @since 2.8
-// */
-//trait ParallelIterable[A] extends collection.immutable.Iterable[A]
-// with collection.parallel.ParallelIterable[A]
-// with GenericParallelTemplate[A, ParallelIterable]
-// with ParallelIterableLike[A, ParallelIterable[A], Iterable[A]] {
-// override def companion: GenericCompanion[ParallelIterable] with GenericParallelCompanion[ParallelIterable] = ParallelIterable
-//}
-//
-///** $factoryinfo
-// */
-//object ParallelIterable extends ParallelFactory[ParallelIterable] {
-// implicit def canBuildFrom[A]: CanBuildFromParallel[Coll, A, ParallelIterable[A]] =
-// new GenericCanBuildFromParallel[A]
-//
-// def newBuilder[A]: Combiner[A, ParallelIterable[A]] = null // TODO
-//
-// def newCombiner[A]: Combiner[A, ParallelIterable[A]] = null // TODO
-//}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/immutable/ParallelRange.scala b/src/library/scala/collection/parallel/immutable/ParallelRange.scala
deleted file mode 100644
index 85a33c7431..0000000000
--- a/src/library/scala/collection/parallel/immutable/ParallelRange.scala
+++ /dev/null
@@ -1,88 +0,0 @@
-package scala.collection.parallel.immutable
-
-
-
-import scala.collection.immutable.Range
-import scala.collection.immutable.RangeUtils
-import scala.collection.parallel.ParallelSeq
-import scala.collection.parallel.Combiner
-import scala.collection.generic.CanCombineFrom
-
-
-
-class ParallelRange(val start: Int, val end: Int, val step: Int, val inclusive: Boolean)
-extends ParallelSeq[Int]
- with RangeUtils[ParallelRange] {
- self =>
-
- def seq = new Range(start, end, step)
-
- def length = _length
-
- def apply(idx: Int) = _apply(idx)
-
- def create(_start: Int, _end: Int, _step: Int, _inclusive: Boolean) = new ParallelRange(_start, _end, _step, _inclusive)
-
- def parallelIterator = new ParallelRangeIterator with SCPI
-
- override def toString = seq.toString // TODO
-
- type SCPI = SignalContextPassingIterator[ParallelRangeIterator]
-
- class ParallelRangeIterator
- (var start: Int = self.start, val end: Int = self.end, val step: Int = self.step, val inclusive: Boolean = self.inclusive)
- extends ParallelIterator with RangeUtils[ParallelRangeIterator] {
- me: SignalContextPassingIterator[ParallelRangeIterator] =>
- def remaining = _length
- def next = { val r = start; start += step; r }
- def hasNext = remaining > 0
- def split: Seq[ParallelIterator] = psplit(remaining / 2, remaining - remaining / 2)
- def psplit(sizes: Int*): Seq[ParallelIterator] = {
- val incr = sizes.scanLeft(0)(_ + _)
- for ((from, until) <- incr.init zip incr.tail) yield _slice(from, until)
- }
- def create(_start: Int, _end: Int, _step: Int, _inclusive: Boolean) = {
- new ParallelRangeIterator(_start, _end, _step, _inclusive) with SCPI
- }
-
- override def toString = "ParallelRangeIterator(" + start + ", " + end + ", " + step + ", incl: " + inclusive + ")"
-
- /* accessors */
-
- override def foreach[U](f: Int => U): Unit = {
- _foreach(f)
- start = end + step
- }
-
- override def reduce[U >: Int](op: (U, U) => U): U = {
- var sum = next
- for (elem <- this) sum += elem
- sum
- }
-
- /* transformers */
-
- override def map2combiner[S, That](f: Int => S, cb: Combiner[S, That]): Combiner[S, That] = {
- //val cb = pbf(self.repr)
- val sz = remaining
- cb.sizeHint(sz)
- if (sz > 0) {
- val last = _last
- while (start != last) {
- f(start)
- start += step
- }
- }
- cb
- }
-
- }
-
-}
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/immutable/ParallelSeq.scala.disabled b/src/library/scala/collection/parallel/immutable/ParallelSeq.scala.disabled
deleted file mode 100644
index ddae095f0d..0000000000
--- a/src/library/scala/collection/parallel/immutable/ParallelSeq.scala.disabled
+++ /dev/null
@@ -1,44 +0,0 @@
-package scala.collection.parallel.immutable
-
-
-import scala.collection.generic.GenericParallelTemplate
-import scala.collection.generic.GenericCompanion
-import scala.collection.generic.GenericParallelCompanion
-import scala.collection.generic.CanCombineFrom
-import scala.collection.generic.ParallelFactory
-import scala.collection.parallel.ParallelSeqLike
-import scala.collection.parallel.Combiner
-
-
-
-// TODO uncomment when we add parallel vectors
-
-///** An immutable variant of `ParallelSeq`.
-// *
-// * @define Coll mutable.ParallelSeq
-// * @define coll mutable parallel sequence
-// */
-//trait ParallelSeq[A] extends collection.immutable.IndexedSeq[A]
-// with ParallelIterable[A]
-// with collection.parallel.ParallelSeq[A]
-// with GenericParallelTemplate[A, ParallelSeq]
-// with ParallelSeqLike[A, ParallelSeq[A], Seq[A]] {
-// override def companion: GenericCompanion[ParallelSeq] with GenericParallelCompanion[ParallelSeq] = ParallelSeq
-//
-//}
-//
-//
-///** $factoryInfo
-// * @define Coll mutable.ParallelSeq
-// * @define coll mutable parallel sequence
-// */
-//object ParallelSeq extends ParallelFactory[ParallelSeq] {
-// implicit def canBuildFrom[T]: CanBuildFromParallel[Coll, T, ParallelSeq[T]] = new GenericCanBuildFromParallel[T]
-//
-// def newBuilder[A]: Combiner[A, ParallelSeq[A]] = null // TODO
-//
-// def newCombiner[A]: Combiner[A, ParallelSeq[A]] = null // TODO
-//}
-
-
-
diff --git a/src/library/scala/collection/parallel/immutable/package.scala b/src/library/scala/collection/parallel/immutable/package.scala
deleted file mode 100644
index 054786afaf..0000000000
--- a/src/library/scala/collection/parallel/immutable/package.scala
+++ /dev/null
@@ -1,56 +0,0 @@
-package scala.collection.parallel
-
-
-
-
-
-
-
-
-
-
-
-package object immutable {
-
- /** A (parallel) sequence consisting of `length` elements `elem`. Used in the `padTo` method.
- *
- * @tparam T type of the elements
- * @param elem the element in the repetition
- * @param length the length of the collection
- */
- private[parallel] class Repetition[T](elem: T, val length: Int) extends ParallelSeq[T] {
- self =>
-
- def apply(idx: Int) = if (0 <= idx && idx < length) elem else throw new IndexOutOfBoundsException
- def seq = throw new UnsupportedOperationException
- def update(idx: Int, elem: T) = throw new UnsupportedOperationException
-
- type SCPI = SignalContextPassingIterator[ParallelIterator]
-
- class ParallelIterator(var i: Int = 0, val until: Int = length, elem: T = self.elem) extends super.ParallelIterator {
- me: SignalContextPassingIterator[ParallelIterator] =>
- def remaining = until - i
- def hasNext = i < until
- def next = { i += 1; elem }
- def psplit(sizes: Int*) = {
- val incr = sizes.scanLeft(0)(_ + _)
- for ((start, end) <- incr.init zip incr.tail) yield new ParallelIterator(i + start, (i + end) min until, elem) with SCPI
- }
- def split = psplit(remaining / 2, remaining - remaining / 2)
- }
-
- def parallelIterator = new ParallelIterator with SCPI
-
- }
-
-}
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/mutable/LazyCombiner.scala b/src/library/scala/collection/parallel/mutable/LazyCombiner.scala
deleted file mode 100644
index bd17d24ea8..0000000000
--- a/src/library/scala/collection/parallel/mutable/LazyCombiner.scala
+++ /dev/null
@@ -1,43 +0,0 @@
-package scala.collection.parallel.mutable
-
-
-
-
-import scala.collection.generic.Growable
-import scala.collection.generic.Sizing
-import scala.collection.mutable.ArrayBuffer
-import scala.collection.parallel.Combiner
-
-
-
-
-/** Implements combining contents of two combiners
- * by postponing the operation until `result` method is called. It chains
- * the leaf results together instead of evaluating the actual collection.
- *
- * @tparam Elem the type of the elements in the combiner
- * @tparam To the type of the collection the combiner produces
- * @tparam Buff the type of the buffers that contain leaf results and this combiner chains together
- */
-trait LazyCombiner[Elem, +To, Buff <: Growable[Elem] with Sizing] extends Combiner[Elem, To]
-{
- self: collection.parallel.EnvironmentPassingCombiner[Elem, To] =>
- val chain: ArrayBuffer[Buff]
- val lastbuff = chain.last
- def +=(elem: Elem) = { lastbuff += elem; this }
- def result: To = allocateAndCopy
- def clear = { chain.clear }
- def combine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]): Combiner[N, NewTo] = if (this ne other) {
- if (other.isInstanceOf[LazyCombiner[_, _, _]]) {
- val that = other.asInstanceOf[LazyCombiner[Elem, To, Buff]]
- newLazyCombiner(chain ++= that.chain)
- } else throw new UnsupportedOperationException("Cannot combine with combiner of different type.")
- } else this
- def size = chain.foldLeft(0)(_ + _.size)
-
- /** Method that allocates the data structure and copies elements into it using
- * `size` and `chain` members.
- */
- def allocateAndCopy: To
- def newLazyCombiner(buffchain: ArrayBuffer[Buff]): LazyCombiner[Elem, To, Buff]
-}
diff --git a/src/library/scala/collection/parallel/mutable/ParallelArray.scala b/src/library/scala/collection/parallel/mutable/ParallelArray.scala
deleted file mode 100644
index c16cc6da15..0000000000
--- a/src/library/scala/collection/parallel/mutable/ParallelArray.scala
+++ /dev/null
@@ -1,605 +0,0 @@
-package scala.collection.parallel.mutable
-
-
-
-import scala.collection.generic.GenericParallelTemplate
-import scala.collection.generic.GenericCompanion
-import scala.collection.generic.GenericParallelCompanion
-import scala.collection.generic.CanCombineFrom
-import scala.collection.generic.CanBuildFrom
-import scala.collection.generic.ParallelFactory
-import scala.collection.generic.Sizing
-import scala.collection.parallel.Combiner
-import scala.collection.parallel.ParallelSeqLike
-import scala.collection.parallel.CHECK_RATE
-import scala.collection.mutable.ArraySeq
-import scala.collection.mutable.Builder
-import scala.collection.Sequentializable
-
-
-
-
-/** Parallel sequence holding elements in a linear array.
- *
- * `ParallelArray` is a parallel sequence with a predefined size. The size of the array
- * cannot be changed after it's been created.
- *
- * `ParallelArray` internally keeps an array containing the elements. This means that
- * bulk operations based on traversal are fast, but those returning a parallel array as a result
- * are slightly slower. The reason for this is that `ParallelArray` uses lazy builders that
- * create the internal data array only after the size of the array is known. The fragments
- * are then copied into the resulting data array in parallel using fast array copy operations.
- * Operations for which the resulting array size is known in advance are optimised to use this
- * information.
- *
- * @tparam T type of the elements in the array
- *
- * @define Coll ParallelArray
- * @define coll parallel array
- */
-class ParallelArray[T] private[mutable] (val arrayseq: ArraySeq[T])
-extends ParallelSeq[T]
- with GenericParallelTemplate[T, ParallelArray]
- with ParallelSeqLike[T, ParallelArray[T], ArraySeq[T]]
-{
- self =>
-
- private val array: Array[Any] = arrayseq.array.asInstanceOf[Array[Any]]
-
- override def companion: GenericCompanion[ParallelArray] with GenericParallelCompanion[ParallelArray] = ParallelArray
-
- def this(sz: Int) = this {
- require(sz >= 0)
- new ArraySeq[T](sz)
- }
-
- def apply(i: Int) = array(i).asInstanceOf[T]
-
- def update(i: Int, elem: T) = array(i) = elem
-
- def length = arrayseq.length
-
- def seq = arrayseq
-
- type SCPI = SignalContextPassingIterator[ParallelArrayIterator]
-
- def parallelIterator: ParallelArrayIterator = {
- val pit = new ParallelArrayIterator with SCPI
- pit
- }
-
- class ParallelArrayIterator(var i: Int = 0, val until: Int = length, val arr: Array[Any] = array)
- extends super.ParallelIterator {
- me: SignalContextPassingIterator[ParallelArrayIterator] =>
-
- def hasNext = i < until
-
- def next = {
- val elem = arr(i)
- i += 1
- elem.asInstanceOf[T]
- }
-
- def remaining = until - i
-
- def psplit(sizesIncomplete: Int*): Seq[ParallelIterator] = {
- var traversed = i
- val total = sizesIncomplete.reduceLeft(_ + _)
- val left = remaining
- val sizes = if (total >= left) sizesIncomplete else sizesIncomplete :+ (left - total)
- for (sz <- sizes) yield if (traversed < until) {
- val start = traversed
- val end = (traversed + sz) min until
- traversed = end
- new ParallelArrayIterator(start, end, arr) with SCPI
- } else {
- new ParallelArrayIterator(traversed, traversed, arr) with SCPI
- }
- }
-
- override def split: Seq[ParallelIterator] = {
- val left = remaining
- if (left >= 2) {
- val splitpoint = left / 2
- Seq(new ParallelArrayIterator(i, i + splitpoint, arr) with SCPI,
- new ParallelArrayIterator(i + splitpoint, until, arr) with SCPI)
- } else {
- Seq(this)
- }
- }
-
- override def toString = "ParallelArrayIterator(" + i + ", " + until + ")"
-
- /* overrides for efficiency */
-
- /* accessors */
-
- override def foreach[U](f: T => U) = {
- foreach_quick(f, arr, until, i)
- i = until
- }
-
- private def foreach_quick[U](f: T => U, a: Array[Any], ntil: Int, from: Int) = {
- var j = from
- while (j < ntil) {
- f(a(j).asInstanceOf[T])
- j += 1
- }
- }
-
- override def count(p: T => Boolean) = {
- val c = count_quick(p, arr, until, i)
- i = until
- c
- }
-
- private def count_quick(p: T => Boolean, a: Array[Any], ntil: Int, from: Int) = {
- var cnt = 0
- var j = from
- while (j < ntil) {
- if (p(a(j).asInstanceOf[T])) cnt += 1
- j += 1
- }
- cnt
- }
-
- override def foldLeft[S](z: S)(op: (S, T) => S): S = {
- val r = foldLeft_quick(arr, until, op, z)
- i = until
- r
- }
-
- private def foldLeft_quick[S](a: Array[Any], ntil: Int, op: (S, T) => S, z: S): S = {
- var j = i
- var sum = z
- while (j < ntil) {
- sum = op(sum, a(j).asInstanceOf[T])
- j += 1
- }
- sum
- }
-
- def aggregate[S](z: S)(seqop: (S, T) => S, combop: (S, S) => S): S = foldLeft[S](z)(seqop)
-
- override def sum[U >: T](implicit num: Numeric[U]): U = {
- var s = sum_quick(num, arr, until, i, num.zero)
- i = until
- s
- }
-
- private def sum_quick[U >: T](num: Numeric[U], a: Array[Any], ntil: Int, from: Int, zero: U): U = {
- var j = from
- var sum = zero
- while (j < ntil) {
- sum = num.plus(sum, a(j).asInstanceOf[T])
- j += 1
- }
- sum
- }
-
- override def product[U >: T](implicit num: Numeric[U]): U = {
- var p = product_quick(num, arr, until, i, num.one)
- i = until
- p
- }
-
- private def product_quick[U >: T](num: Numeric[U], a: Array[Any], ntil: Int, from: Int, one: U): U = {
- var j = from
- var prod = one
- while (j < ntil) {
- prod = num.times(prod, a(j).asInstanceOf[T])
- j += 1
- }
- prod
- }
-
- override def forall(p: T => Boolean): Boolean = {
- if (isAborted) return false
-
- var all = true
- while (i < until) {
- val nextuntil = if (i + CHECK_RATE > until) until else i + CHECK_RATE
-
- all = forall_quick(p, array, nextuntil, i)
- if (all) i = nextuntil
- else {
- i = until
- abort
- }
-
- if (isAborted) return false
- }
- all
- }
-
- // it's faster to use a separate small method
- private def forall_quick(p: T => Boolean, a: Array[Any], nextuntil: Int, start: Int): Boolean = {
- var j = start
- while (j < nextuntil) {
- if (p(a(j).asInstanceOf[T])) j += 1
- else return false
- }
- return true
- }
-
- override def exists(p: T => Boolean): Boolean = {
- if (isAborted) return true
-
- var some = false
- while (i < until) {
- val nextuntil = if (i + CHECK_RATE > until) until else i + CHECK_RATE
-
- some = exists_quick(p, array, nextuntil, i)
- if (some) {
- i = until
- abort
- } else i = nextuntil
-
- if (isAborted) return true
- }
- some
- }
-
- // faster to use separate small method
- private def exists_quick(p: T => Boolean, a: Array[Any], nextuntil: Int, start: Int): Boolean = {
- var j = start
- while (j < nextuntil) {
- if (p(a(j).asInstanceOf[T])) return true
- else j += 1
- }
- return false
- }
-
- override def find(p: T => Boolean): Option[T] = {
- if (isAborted) return None
-
- var r: Option[T] = None
- while (i < until) {
- val nextuntil = if ((i + CHECK_RATE) < until) (i + CHECK_RATE) else until
-
- r = find_quick(p, array, nextuntil, i)
-
- if (r != None) {
- i = until
- abort
- } else i = nextuntil
-
- if (isAborted) return r
- }
- r
- }
-
- private def find_quick(p: T => Boolean, a: Array[Any], nextuntil: Int, start: Int): Option[T] = {
- var j = start
- while (j < nextuntil) {
- val elem = a(j).asInstanceOf[T]
- if (p(elem)) return Some(elem)
- else j += 1
- }
- return None
- }
-
- override def drop(n: Int): ParallelArrayIterator = {
- i += n
- this
- }
-
- override def copyToArray[U >: T](array: Array[U], from: Int, len: Int) {
- val totallen = (self.length - i) min len min (array.length - from)
- Array.copy(arr, i, array, from, totallen)
- i += totallen
- }
-
- override def prefixLength(pred: T => Boolean): Int = {
- val r = prefixLength_quick(pred, arr, until, i)
- i += r + 1
- r
- }
-
- private def prefixLength_quick(pred: T => Boolean, a: Array[Any], ntil: Int, startpos: Int): Int = {
- var j = startpos
- var endpos = ntil
- while (j < endpos) {
- if (pred(a(j).asInstanceOf[T])) j += 1
- else endpos = j
- }
- endpos - startpos
- }
-
- override def indexWhere(pred: T => Boolean): Int = {
- val r = indexWhere_quick(pred, arr, until, i)
- val ret = if (r != -1) r - i else r
- i = until
- ret
- }
-
- private def indexWhere_quick(pred: T => Boolean, a: Array[Any], ntil: Int, from: Int): Int = {
- var j = from
- var pos = -1
- while (j < ntil) {
- if (pred(a(j).asInstanceOf[T])) {
- pos = j
- j = ntil
- } else j += 1
- }
- pos
- }
-
- override def lastIndexWhere(pred: T => Boolean): Int = {
- val r = lastIndexWhere_quick(pred, arr, i, until)
- val ret = if (r != -1) r - i else r
- i = until
- ret
- }
-
- private def lastIndexWhere_quick(pred: T => Boolean, a: Array[Any], from: Int, ntil: Int): Int = {
- var pos = -1
- var j = ntil - 1
- while (j >= from) {
- if (pred(a(j).asInstanceOf[T])) {
- pos = j
- j = -1
- } else j -= 1
- }
- pos
- }
-
- override def sameElements(that: Iterator[_]): Boolean = {
- var same = true
- while (i < until && that.hasNext) {
- if (arr(i) != that.next) {
- i = until
- same = false
- }
- i += 1
- }
- same
- }
-
- /* transformers */
-
- override def map2combiner[S, That](f: T => S, cb: Combiner[S, That]): Combiner[S, That] = {
- //val cb = cbf(self.repr)
- cb.sizeHint(remaining)
- map2combiner_quick(f, arr, cb, until, i)
- i = until
- cb
- }
-
- private def map2combiner_quick[S, That](f: T => S, a: Array[Any], cb: Builder[S, That], ntil: Int, from: Int) {
- var j = from
- while (j < ntil) {
- cb += f(a(j).asInstanceOf[T])
- j += 1
- }
- }
-
- override def collect2combiner[S, That](pf: PartialFunction[T, S], pbf: CanCombineFrom[ParallelArray[T], S, That]): Combiner[S, That] = {
- val cb = pbf(self.repr)
- collect2combiner_quick(pf, arr, cb, until, i)
- i = until
- cb
- }
-
- private def collect2combiner_quick[S, That](pf: PartialFunction[T, S], a: Array[Any], cb: Builder[S, That], ntil: Int, from: Int) {
- var j = from
- while (j < ntil) {
- val curr = a(j).asInstanceOf[T]
- if (pf.isDefinedAt(curr)) cb += pf(curr)
- j += 1
- }
- }
-
- override def flatmap2combiner[S, That](f: T => Traversable[S], pbf: CanCombineFrom[ParallelArray[T], S, That]): Combiner[S, That] = {
- val cb = pbf(self.repr)
- while (i < until) {
- val traversable = f(arr(i).asInstanceOf[T])
- if (traversable.isInstanceOf[Iterable[_]]) cb ++= traversable.asInstanceOf[Iterable[S]].iterator
- else cb ++= traversable
- i += 1
- }
- cb
- }
-
- override def filter2combiner[U >: T, This >: ParallelArray[T]](pred: T => Boolean, cb: Combiner[U, This]) = {
- filter2combiner_quick(pred, cb, arr, until, i)
- i = until
- cb
- }
-
- private def filter2combiner_quick[U >: T, This >: ParallelArray[T]](pred: T => Boolean, cb: Builder[U, This], a: Array[Any], ntil: Int, from: Int) {
- var j = i
- while(j < ntil) {
- var curr = a(j).asInstanceOf[T]
- if (pred(curr)) cb += curr
- j += 1
- }
- }
-
- override def filterNot2combiner[U >: T, This >: ParallelArray[T]](pred: T => Boolean, cb: Combiner[U, This]) = {
- filterNot2combiner_quick(pred, cb, arr, until, i)
- i = until
- cb
- }
-
- private def filterNot2combiner_quick[U >: T, This >: ParallelArray[T]](pred: T => Boolean, cb: Builder[U, This], a: Array[Any], ntil: Int, from: Int) {
- var j = i
- while(j < ntil) {
- var curr = a(j).asInstanceOf[T]
- if (!pred(curr)) cb += curr
- j += 1
- }
- }
-
- override def copy2builder[U >: T, Coll, Bld <: Builder[U, Coll]](cb: Bld): Bld = {
- cb.sizeHint(remaining)
- cb.ifIs[ParallelArrayCombiner[T]] { pac =>
- val targetarr: Array[Any] = pac.lastbuff.internalArray.asInstanceOf[Array[Any]]
- Array.copy(arr, i, targetarr, pac.lastbuff.size, until - i)
- pac.lastbuff.setInternalSize(remaining)
- } otherwise {
- copy2builder_quick(cb, arr, until, i)
- i = until
- }
- cb
- }
-
- private def copy2builder_quick[U >: T, Coll](b: Builder[U, Coll], a: Array[Any], ntil: Int, from: Int) {
- var j = from
- while (j < ntil) {
- b += a(j).asInstanceOf[T]
- j += 1
- }
- }
-
- override def partition2combiners[U >: T, This >: ParallelArray[T]](pred: T => Boolean, btrue: Combiner[U, This], bfalse: Combiner[U, This]) = {
- partition2combiners_quick(pred, btrue, bfalse, arr, until, i)
- i = until
- (btrue, bfalse)
- }
-
- private def partition2combiners_quick[U >: T, This >: ParallelArray[T]](p: T => Boolean, btrue: Builder[U, This], bfalse: Builder[U, This], a: Array[Any], ntil: Int, from: Int) {
- var j = from
- while (j < ntil) {
- val curr = a(j).asInstanceOf[T]
- if (p(curr)) btrue += curr else bfalse += curr
- j += 1
- }
- }
-
- override def take2combiner[U >: T, This >: ParallelArray[T]](n: Int, cb: Combiner[U, This]) = {
- cb.sizeHint(n)
- val ntil = i + n
- val a = arr
- while (i < ntil) {
- cb += a(i).asInstanceOf[T]
- i += 1
- }
- cb
- }
-
- override def drop2combiner[U >: T, This >: ParallelArray[T]](n: Int, cb: Combiner[U, This]) = {
- drop(n)
- cb.sizeHint(remaining)
- while (i < until) {
- cb += arr(i).asInstanceOf[T]
- i += 1
- }
- cb
- }
-
- override def reverse2combiner[U >: T, This >: ParallelArray[T]](cb: Combiner[U, This]): Combiner[U, This] = {
- cb.ifIs[ParallelArrayCombiner[T]] { pac =>
- val sz = remaining
- pac.sizeHint(sz)
- val targetarr: Array[Any] = pac.lastbuff.internalArray.asInstanceOf[Array[Any]]
- reverse2combiner_quick(targetarr, arr, i, until)
- pac.lastbuff.setInternalSize(sz)
- pac
- } otherwise super.reverse2combiner(cb)
- cb
- }
-
- private def reverse2combiner_quick(targ: Array[Any], a: Array[Any], from: Int, ntil: Int) {
- var j = from
- var k = ntil - from - 1
- while (j < ntil) {
- targ(k) = a(j)
- j += 1
- k -= 1
- }
- }
-
- }
-
- /* operations */
-
- private def buildsArray[S, That](c: Builder[S, That]) = c.isInstanceOf[ParallelArrayCombiner[_]]
-
- override def map[S, That](f: T => S)(implicit bf: CanBuildFrom[ParallelArray[T], S, That]) = if (buildsArray(bf(repr))) {
- // reserve array
- val targetarr = new Array[Any](length)
-
- // fill it in parallel
- executeAndWait(new Map[S](f, targetarr, 0, length))
-
- // wrap it into a parallel array
- (new ParallelArray[S](new ExposedArraySeq[S](targetarr.asInstanceOf[Array[AnyRef]], length))).asInstanceOf[That]
- } else super.map(f)(bf)
-
- /* tasks */
-
- class Map[S](f: T => S, targetarr: Array[Any], offset: Int, howmany: Int) extends super.Task[Unit, Map[S]] {
- var result = ();
- def leaf(prev: Option[Unit]) = {
- val tarr = targetarr
- val sarr = array
- var i = offset
- val until = offset + howmany
- while (i < until) {
- tarr(i) = f(sarr(i).asInstanceOf[T])
- i += 1
- }
- }
- def split = {
- val fp = howmany / 2
- List(new Map(f, targetarr, offset, fp), new Map(f, targetarr, offset + fp, howmany - fp))
- }
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(length, parallelismLevel)
- }
-
-}
-
-
-
-
-
-object ParallelArray extends ParallelFactory[ParallelArray] {
- implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParallelArray[T]] = new GenericCanCombineFrom[T]
- def newBuilder[T]: Combiner[T, ParallelArray[T]] = newCombiner
- def newCombiner[T]: Combiner[T, ParallelArray[T]] = ParallelArrayCombiner[T]
-
- /** Creates a new parallel array by wrapping the specified array.
- */
- def handoff[T](arr: Array[T]): ParallelArray[T] = wrapOrRebuild(arr, arr.length)
-
- /** Creates a new parallel array by wrapping a part of the specified array.
- */
- def handoff[T](arr: Array[T], sz: Int): ParallelArray[T] = wrapOrRebuild(arr, sz)
-
- private def wrapOrRebuild[T](arr: AnyRef, sz: Int) = arr match {
- case arr: Array[AnyRef] => new ParallelArray[T](new ExposedArraySeq[T](arr, sz))
- case _ => new ParallelArray[T](new ExposedArraySeq[T](runtime.ScalaRunTime.toObjectArray(arr), sz))
- }
-
- def createFromCopy[T <: AnyRef : ClassManifest](arr: Array[T]): ParallelArray[T] = {
- val newarr = new Array[T](arr.length)
- Array.copy(arr, 0, newarr, 0, arr.length)
- handoff(newarr)
- }
-
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/mutable/ParallelArrayCombiner.scala b/src/library/scala/collection/parallel/mutable/ParallelArrayCombiner.scala
deleted file mode 100644
index 2991344be2..0000000000
--- a/src/library/scala/collection/parallel/mutable/ParallelArrayCombiner.scala
+++ /dev/null
@@ -1,105 +0,0 @@
-package scala.collection.parallel.mutable
-
-
-
-
-
-import scala.collection.generic.Sizing
-import scala.collection.mutable.ArraySeq
-import scala.collection.mutable.ArrayBuffer
-import scala.collection.parallel.TaskSupport
-import scala.collection.parallel.EnvironmentPassingCombiner
-
-
-
-
-
-
-
-trait ParallelArrayCombiner[T]
-extends LazyCombiner[T, ParallelArray[T], ExposedArrayBuffer[T]]
- with TaskSupport {
- self: EnvironmentPassingCombiner[T, ParallelArray[T]] =>
-
- override def sizeHint(sz: Int) = if (chain.length == 1) chain(0).sizeHint(sz)
-
- def newLazyCombiner(c: ArrayBuffer[ExposedArrayBuffer[T]]) = ParallelArrayCombiner(c)
-
- def allocateAndCopy = if (chain.size > 1) {
- val arrayseq = new ArraySeq[T](size)
- val array = arrayseq.array.asInstanceOf[Array[Any]]
-
- executeAndWait(new CopyChainToArray(array, 0, size))
-
- new ParallelArray(arrayseq)
- } else { // optimisation if there is only 1 array
- val pa = new ParallelArray(new ExposedArraySeq[T](chain(0).internalArray, size))
- pa
- }
-
- override def toString = "ParallelArrayCombiner(" + size + "): " + chain
-
- /* tasks */
-
- class CopyChainToArray(array: Array[Any], offset: Int, howmany: Int) extends super.Task[Unit, CopyChainToArray] {
- var result = ()
- def leaf(prev: Option[Unit]) = if (howmany > 0) {
- var totalleft = howmany
- val (stbuff, stind) = findStart(offset)
- var buffind = stbuff
- var ind = stind
- var arrayIndex = offset
- while (totalleft > 0) {
- val currbuff = chain(buffind)
- val chunksize = if (totalleft < (currbuff.size - ind)) totalleft else currbuff.size - ind
- val until = ind + chunksize
-
- copyChunk(currbuff.internalArray, ind, array, arrayIndex, until)
- arrayIndex += chunksize
- ind += chunksize
-
- totalleft -= chunksize
- buffind += 1
- ind = 0
- }
- }
- private def copyChunk(buffarr: Array[AnyRef], buffStart: Int, ra: Array[Any], arrayStart: Int, until: Int) {
- Array.copy(buffarr, buffStart, ra, arrayStart, until - buffStart)
- }
- private def findStart(pos: Int) = {
- var left = pos
- var buffind = 0
- while (left >= chain(buffind).size) {
- left -= chain(buffind).size
- buffind += 1
- }
- (buffind, left)
- }
- def split = {
- val fp = howmany / 2
- List(new CopyChainToArray(array, offset, fp), new CopyChainToArray(array, offset + fp, howmany - fp))
- }
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(size, parallelismLevel)
- }
-
-}
-
-
-object ParallelArrayCombiner {
- def apply[T](c: ArrayBuffer[ExposedArrayBuffer[T]]): ParallelArrayCombiner[T] = {
- new { val chain = c } with ParallelArrayCombiner[T] with EnvironmentPassingCombiner[T, ParallelArray[T]]
- }
- def apply[T]: ParallelArrayCombiner[T] = apply(new ArrayBuffer[ExposedArrayBuffer[T]] += new ExposedArrayBuffer[T])
-}
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/mutable/ParallelIterable.scala b/src/library/scala/collection/parallel/mutable/ParallelIterable.scala
deleted file mode 100644
index bd0a46bc43..0000000000
--- a/src/library/scala/collection/parallel/mutable/ParallelIterable.scala
+++ /dev/null
@@ -1,51 +0,0 @@
-package scala.collection.parallel.mutable
-
-
-import scala.collection.generic._
-
-import scala.collection.parallel.ParallelIterableLike
-import scala.collection.parallel.Combiner
-
-
-/** A template trait for parallel iterable collections.
- *
- * $paralleliterableinfo
- *
- * $sideeffects
- *
- * @tparam T the element type of the collection
- *
- * @author prokopec
- * @since 2.8
- */
-trait ParallelIterable[T] extends collection.mutable.Iterable[T]
- with collection.parallel.ParallelIterable[T]
- with GenericParallelTemplate[T, ParallelIterable]
- with ParallelIterableLike[T, ParallelIterable[T], Iterable[T]] {
- override def companion: GenericCompanion[ParallelIterable] with GenericParallelCompanion[ParallelIterable] = ParallelIterable
-}
-
-/** $factoryinfo
- */
-object ParallelIterable extends ParallelFactory[ParallelIterable] {
- implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParallelIterable[T]] =
- new GenericCanCombineFrom[T]
-
- def newBuilder[T]: Combiner[T, ParallelIterable[T]] = ParallelArrayCombiner[T]
-
- def newCombiner[T]: Combiner[T, ParallelIterable[T]] = ParallelArrayCombiner[T]
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/mutable/ParallelSeq.scala b/src/library/scala/collection/parallel/mutable/ParallelSeq.scala
deleted file mode 100644
index 636ba1ac3d..0000000000
--- a/src/library/scala/collection/parallel/mutable/ParallelSeq.scala
+++ /dev/null
@@ -1,61 +0,0 @@
-package scala.collection.parallel.mutable
-
-
-import scala.collection.generic.GenericParallelTemplate
-import scala.collection.generic.GenericCompanion
-import scala.collection.generic.GenericParallelCompanion
-import scala.collection.generic.CanCombineFrom
-import scala.collection.generic.ParallelFactory
-import scala.collection.parallel.ParallelSeqLike
-import scala.collection.parallel.Combiner
-
-
-
-
-
-
-
-/** A mutable variant of `ParallelSeq`.
- *
- * @define Coll mutable.ParallelSeq
- * @define coll mutable parallel sequence
- */
-trait ParallelSeq[T] extends collection.mutable.Seq[T]
- with ParallelIterable[T]
- with collection.parallel.ParallelSeq[T]
- with GenericParallelTemplate[T, ParallelSeq]
- with ParallelSeqLike[T, ParallelSeq[T], Seq[T]] {
- self =>
- override def companion: GenericCompanion[ParallelSeq] with GenericParallelCompanion[ParallelSeq] = ParallelSeq
-
- def update(i: Int, elem: T): Unit
-
-}
-
-
-/** $factoryInfo
- * @define Coll mutable.ParallelSeq
- * @define coll mutable parallel sequence
- */
-object ParallelSeq extends ParallelFactory[ParallelSeq] {
- implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParallelSeq[T]] = new GenericCanCombineFrom[T]
-
- def newBuilder[T]: Combiner[T, ParallelSeq[T]] = ParallelArrayCombiner[T]
-
- def newCombiner[T]: Combiner[T, ParallelSeq[T]] = ParallelArrayCombiner[T]
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/mutable/package.scala b/src/library/scala/collection/parallel/mutable/package.scala
deleted file mode 100644
index f670c7b7c5..0000000000
--- a/src/library/scala/collection/parallel/mutable/package.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-package scala.collection.parallel
-
-
-
-import scala.collection.mutable.ArrayBuffer
-import scala.collection.mutable.ArraySeq
-import scala.collection.generic.Sizing
-
-
-
-package object mutable {
-
- /* hack-arounds */
-
- private[mutable] class ExposedArrayBuffer[T] extends ArrayBuffer[T] with Sizing {
- def internalArray = array
- def setInternalSize(s: Int) = size0 = s
- override def sizeHint(len: Int) = { // delete once we start using 2.8.RC1+
- if (len > size && len >= 1) {
- val newarray = new Array[AnyRef](len)
- Array.copy(array, 0, newarray, 0, size0)
- array = newarray
- }
- }
- }
-
- private[mutable] class ExposedArraySeq[T](arr: Array[AnyRef], sz: Int) extends ArraySeq[T](sz) {
- override val array = arr
- override val length = sz
- }
-
-} \ No newline at end of file
diff --git a/src/library/scala/collection/parallel/package.scala b/src/library/scala/collection/parallel/package.scala
deleted file mode 100644
index cddf098966..0000000000
--- a/src/library/scala/collection/parallel/package.scala
+++ /dev/null
@@ -1,70 +0,0 @@
-package scala.collection
-
-
-import java.lang.Thread._
-
-import scala.collection.generic.CanBuildFrom
-import scala.collection.generic.CanCombineFrom
-
-
-/** Package object for parallel collections.
- */
-package object parallel {
- val MIN_FOR_COPY = -1 // TODO: set to 5000
- val CHECK_RATE = 512
-
- /** Computes threshold from the size of the collection and the parallelism level.
- */
- def thresholdFromSize(sz: Int, parallelismLevel: Int) = {
- val p = parallelismLevel
- if (p > 1) 1 + sz / (8 * p)
- else sz
- }
-
- /** An implicit conversion providing arrays with a `par` method, which
- * returns a parallel array.
- *
- * @tparam T type of the elements in the array, which is a subtype of AnyRef
- * @param array the array to be parallelized
- * @return a `Parallelizable` object with a `par` method
- */
- implicit def array2ParallelArray[T <: AnyRef](array: Array[T]) = new Parallelizable[mutable.ParallelArray[T]] {
- def par = mutable.ParallelArray.handoff[T](array)
- }
-
- implicit def factory2ops[From, Elem, To](bf: CanBuildFrom[From, Elem, To]) = new {
- def isParallel = bf.isInstanceOf[Parallel]
- def asParallel = bf.asInstanceOf[CanCombineFrom[From, Elem, To]]
- def ifParallel[R](isbody: CanCombineFrom[From, Elem, To] => R) = new {
- def otherwise(notbody: => R) = if (isParallel) isbody(asParallel) else notbody
- }
- }
-
- implicit def traversable2ops[T](t: TraversableOnce[T]) = new {
- def isParallel = t.isInstanceOf[Parallel]
- def isParallelIterable = t.isInstanceOf[ParallelIterable[_]]
- def asParallelIterable = t.asInstanceOf[ParallelIterable[T]]
- def isParallelSeq = t.isInstanceOf[ParallelSeq[_]]
- def asParallelSeq = t.asInstanceOf[ParallelSeq[T]]
- def ifParallelSeq[R](isbody: ParallelSeq[T] => R) = new {
- def otherwise(notbody: => R) = if (isParallel) isbody(asParallelSeq) else notbody
- }
- }
-
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/concurrent/SyncVar.scala b/src/library/scala/concurrent/SyncVar.scala
index 5b55be1326..46dc415e1f 100644
--- a/src/library/scala/concurrent/SyncVar.scala
+++ b/src/library/scala/concurrent/SyncVar.scala
@@ -6,10 +6,12 @@
** |/ **
\* */
+
+
package scala.concurrent
-/** A class to provide safe concurrent access to a mutable cell.
- * All methods are synchronized.
+
+/** The class <code>SyncVar</code> ...
*
* @author Martin Odersky, Stepan Koltsov
* @version 1.0, 10/03/2003
@@ -27,17 +29,24 @@ class SyncVar[A] {
def get(timeout: Long): Option[A] = synchronized {
if (!isDefined) {
- try wait(timeout)
- catch { case _: InterruptedException => () }
+ try {
+ wait(timeout)
+ } catch {
+ case _: InterruptedException =>
+ }
}
- if (exception.isDefined) throw exception.get
- else if (isDefined) Some(value)
- else None
+ if (exception.isEmpty) {
+ if (isDefined) Some(value) else None
+ } else
+ throw exception.get
}
def take() = synchronized {
- try get
- finally unset()
+ try {
+ get
+ } finally {
+ unset()
+ }
}
def set(x: A) = synchronized {
@@ -47,6 +56,12 @@ class SyncVar[A] {
notifyAll()
}
+ private def setException(e: Throwable) = synchronized {
+ exception = Some(e)
+ isDefined = true
+ notifyAll()
+ }
+
def put(x: A) = synchronized {
while (isDefined) wait()
set(x)
@@ -60,5 +75,5 @@ class SyncVar[A] {
isDefined = false
notifyAll()
}
-}
+}
diff --git a/src/library/scala/io/Source.scala b/src/library/scala/io/Source.scala
index cb7403e255..b5313ef61b 100644
--- a/src/library/scala/io/Source.scala
+++ b/src/library/scala/io/Source.scala
@@ -114,13 +114,11 @@ object Source {
/** Create a <code>Source</code> from array of bytes, assuming
* one byte per character (ISO-8859-1 encoding.)
*/
- def fromRawBytes(bytes: Array[Byte]): Source =
- fromString(new String(bytes, Codec.ISO8859.name))
+ def fromRawBytes(bytes: Array[Byte]): Source = fromString(new String(bytes, Codec.ISO8859.name))
/** creates <code>Source</code> from file with given file: URI
*/
- def fromURI(uri: URI)(implicit codec: Codec): BufferedSource =
- fromFile(new JFile(uri))(codec)
+ def fromURI(uri: URI)(implicit codec: Codec): BufferedSource = fromFile(new JFile(uri))(codec)
/** same as fromURL(new URL(s))(Codec(enc))
*/
@@ -198,8 +196,7 @@ abstract class Source extends Iterator[Char] {
*
*/
@deprecated("Use a collections method such as getLines().toIndexedSeq for random access.")
- def getLine(line: Int): String = lineNum(line)
- private def lineNum(line: Int): String = getLines() drop (line - 1) next
+ def getLine(line: Int): String = getLines() drop (line - 1) next
class LineIterator() extends Iterator[String] {
private[this] val sb = new StringBuilder
@@ -299,10 +296,10 @@ abstract class Source extends Iterator[Char] {
* @param out PrintStream to use
*/
def report(pos: Int, msg: String, out: PrintStream) {
- val line = Position line pos
- val col = Position column pos
+ val line = Position line pos
+ val col = Position column pos
- out println "%s:%d:%d: %s%s%s^".format(descr, line, col, msg, lineNum(line), spaces(col - 1))
+ out println "%s:%d:%d: %s%s%s^".format(descr, line, col, msg, getLine(line), spaces(col - 1))
}
/**
@@ -343,9 +340,8 @@ abstract class Source extends Iterator[Char] {
}
/** The close() method closes the underlying resource. */
- def close() {
+ def close(): Unit =
if (closeFunction != null) closeFunction()
- }
/** The reset() method creates a fresh copy of this Source. */
def reset(): Source =
diff --git a/src/library/scala/math/BigInt.scala b/src/library/scala/math/BigInt.scala
index a21057c400..79b377bc6c 100644
--- a/src/library/scala/math/BigInt.scala
+++ b/src/library/scala/math/BigInt.scala
@@ -339,9 +339,9 @@ class BigInt(val bigInteger: BigInteger) extends ScalaNumber with ScalaNumericCo
def floatValue = this.bigInteger.floatValue
/** Converts this BigInt to a <tt>double</tt>.
- * if this BigInt has too great a magnitude to represent as a double,
- * it will be converted to <code>Double.NEGATIVE_INFINITY</code> or
- * <code>Double.POSITIVE_INFINITY</code> as appropriate.
+ * if this BigInt has too great a magnitude to represent as a float,
+ * it will be converted to <code>Float.NEGATIVE_INFINITY</code> or
+ * <code>Float.POSITIVE_INFINITY</code> as appropriate.
*/
def doubleValue = this.bigInteger.doubleValue
diff --git a/src/library/scala/math/Ordering.scala b/src/library/scala/math/Ordering.scala
index fdfc4915d9..4527e18338 100644
--- a/src/library/scala/math/Ordering.scala
+++ b/src/library/scala/math/Ordering.scala
@@ -204,17 +204,15 @@ object Ordering extends LowPriorityOrderingImplicits {
}
implicit object String extends StringOrdering
- trait OptionOrdering[T] extends Ordering[Option[T]] {
- def optionOrdering: Ordering[T]
- def compare(x: Option[T], y: Option[T]) = (x, y) match {
- case (None, None) => 0
- case (None, _) => -1
- case (_, None) => 1
- case (Some(x), Some(y)) => optionOrdering.compare(x, y)
+ implicit def Option[T](implicit ord: Ordering[T]) : Ordering[Option[T]] =
+ new Ordering[Option[T]] {
+ def compare(x : Option[T], y : Option[T]) = (x, y) match {
+ case (None, None) => 0
+ case (None, _) => -1
+ case (_, None) => 1
+ case (Some(x), Some(y)) => ord.compare(x, y)
+ }
}
- }
- implicit def Option[T](implicit ord: Ordering[T]): Ordering[Option[T]] =
- new OptionOrdering[T] { val optionOrdering = ord }
implicit def Iterable[T](implicit ord: Ordering[T]): Ordering[Iterable[T]] =
new Ordering[Iterable[T]] {
diff --git a/src/library/scala/reflect/generic/Symbols.scala b/src/library/scala/reflect/generic/Symbols.scala
index f1226c7e19..2f5e0624ab 100755
--- a/src/library/scala/reflect/generic/Symbols.scala
+++ b/src/library/scala/reflect/generic/Symbols.scala
@@ -119,7 +119,6 @@ trait Symbols { self: Universe =>
def isTrait: Boolean = isClass && hasFlag(TRAIT) // refined later for virtual classes.
final def hasDefault = isParameter && hasFlag(DEFAULTPARAM)
final def isAbstractClass = isClass && hasFlag(ABSTRACT)
- // XXX This is unlikely to be correct: it's not looking for the ABSOVERRIDE flag?
final def isAbstractOverride = isTerm && hasFlag(ABSTRACT) && hasFlag(OVERRIDE)
final def isBridge = hasFlag(BRIDGE)
final def isCase = hasFlag(CASE)
diff --git a/src/library/scala/runtime/AnyValCompanion.scala b/src/library/scala/runtime/AnyValCompanion.scala
index 0fba1cfd60..0a6f93805a 100644
--- a/src/library/scala/runtime/AnyValCompanion.scala
+++ b/src/library/scala/runtime/AnyValCompanion.scala
@@ -6,7 +6,7 @@
** |/ **
\* */
-
+// $Id$
package scala.runtime
diff --git a/src/library/scala/runtime/ScalaRunTime.scala b/src/library/scala/runtime/ScalaRunTime.scala
index a8cb2340ff..a1d15c4b7d 100644
--- a/src/library/scala/runtime/ScalaRunTime.scala
+++ b/src/library/scala/runtime/ScalaRunTime.scala
@@ -235,12 +235,6 @@ object ScalaRunTime {
*
*/
def stringOf(arg: Any): String = {
- import collection.{SortedSet, SortedMap}
- def mapTraversable(x: Traversable[_], f: Any => String) = x match {
- case ss: SortedSet[_] => ss.map(f)
- case ss: SortedMap[_, _] => ss.map(f)
- case _ => x.map(f)
- }
def inner(arg: Any): String = arg match {
case null => "null"
// Node extends NodeSeq extends Seq[Node] strikes again
@@ -258,7 +252,7 @@ object ScalaRunTime {
// exception if you call iterator. What a world.
// And they can't be infinite either.
if (x.getClass.getName startsWith "scala.tools.nsc.io") x.toString
- else (mapTraversable(x, inner)) mkString (x.stringPrefix + "(", ", ", ")")
+ else (x map inner) mkString (x.stringPrefix + "(", ", ", ")")
case x => x toString
}
val s = inner(arg)
diff --git a/src/library/scala/util/Random.scala b/src/library/scala/util/Random.scala
index 53e721dcda..69cb4bb48d 100644
--- a/src/library/scala/util/Random.scala
+++ b/src/library/scala/util/Random.scala
@@ -17,8 +17,6 @@ import collection.immutable.List
*
*/
class Random(val self: java.util.Random) {
- import collection.mutable.ArrayBuffer
- import collection.generic.CanBuildFrom
/** Creates a new random number generator using a single long seed. */
def this(seed: Long) = this(new java.util.Random(seed))
@@ -99,6 +97,27 @@ class Random(val self: java.util.Random) {
}
def setSeed(seed: Long) { self.setSeed(seed) }
+}
+
+/** The object <code>Random</code> offers a default implementation
+ * of scala.util.Random and random-related convenience methods.
+ *
+ * @since 2.8
+ */
+object Random extends Random {
+ import collection.mutable.ArrayBuffer
+ import collection.generic.CanBuildFrom
+
+ /** Returns a Stream of pseudorandomly chosen alphanumeric characters,
+ * equally chosen from A-Z, a-z, and 0-9.
+ *
+ * @since 2.8
+ */
+ def alphanumeric: Stream[Char] = {
+ def isAlphaNum(c: Char) = (c >= 'A' && c <= 'Z') || (c >= 'a' && c <= 'z') || (c >= '0' && c <= '9')
+
+ Stream continually nextPrintableChar filter isAlphaNum
+ }
/** Returns a new collection of the same type in a randomly chosen order.
*
@@ -121,25 +140,4 @@ class Random(val self: java.util.Random) {
bf(xs) ++= buf result
}
-
-}
-
-/** The object <code>Random</code> offers a default implementation
- * of scala.util.Random and random-related convenience methods.
- *
- * @since 2.8
- */
-object Random extends Random {
-
- /** Returns a Stream of pseudorandomly chosen alphanumeric characters,
- * equally chosen from A-Z, a-z, and 0-9.
- *
- * @since 2.8
- */
- def alphanumeric: Stream[Char] = {
- def isAlphaNum(c: Char) = (c >= 'A' && c <= 'Z') || (c >= 'a' && c <= 'z') || (c >= '0' && c <= '9')
-
- Stream continually nextPrintableChar filter isAlphaNum
- }
-
}
diff --git a/src/library/scala/xml/parsing/MarkupParser.scala b/src/library/scala/xml/parsing/MarkupParser.scala
index 4f6b89c07b..24e0d78c6f 100644
--- a/src/library/scala/xml/parsing/MarkupParser.scala
+++ b/src/library/scala/xml/parsing/MarkupParser.scala
@@ -310,7 +310,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
def xEntityValue(): String = {
val endch = ch
nextch
- while (ch != endch && !eof) {
+ while (ch != endch) {
putChar(ch)
nextch
}
@@ -556,7 +556,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
if (ch != '\'' && ch != '"')
reportSyntaxError("quote ' or \" expected");
nextch
- while (ch != endch && !eof) {
+ while (ch != endch) {
putChar(ch)
nextch
}
@@ -572,7 +572,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
if (ch!='\'' && ch != '"')
reportSyntaxError("quote ' or \" expected");
nextch
- while (ch != endch && !eof) {
+ while (ch != endch) {
putChar(ch)
//Console.println("hello '"+ch+"'"+isPubIDChar(ch));
if (!isPubIDChar(ch))
diff --git a/src/manual/scala/man1/scaladoc.scala b/src/manual/scala/man1/scaladoc.scala
index 420bb08c4d..77a2b88ce7 100644
--- a/src/manual/scala/man1/scaladoc.scala
+++ b/src/manual/scala/man1/scaladoc.scala
@@ -1,7 +1,6 @@
/* NSC -- new Scala compiler
- * Copyright LAMP/EPFL
+ * Copyright 2005-2010 LAMP/EPFL
* @author Stephane Micheloud
- * @author Gilles Dubochet
*/
package scala.man1
@@ -45,92 +44,82 @@ object scaladoc extends Command {
CmdOption("d") & "(see " & Link(Bold("OPTIONS"), "#options") & ", below).",
// tags are defined in class "scala.tools.nsc.doc.DocGenerator"
- "The recognised format of comments in source is described in the " & Link("online documentation",
- "http://lampsvn.epfl.ch/trac/scala/wiki/Scaladoc"))
+ "Supported tag comments are:",
+
+ BulletList(
+ Mono("@author"), Mono("@deprecated"),
+ Mono("@exception") & " (two arguments)",
+ Mono("@param") & " (two arguments)", Mono("@pre"),
+ Mono("@return"), Mono("@see"), Mono("@since"),
+ Mono("@throws") & " (two arguments)",
+ Mono("@todo"), Mono("@version")),
+
+ "See also online document \"" & Link("How to Write Doc Comments for the Javadoc Tool",
+ "http://java.sun.com/j2se/javadoc/writingdoccomments/") & "\" from Sun.")
val options = Section("OPTIONS",
+ "The generator has a set of standard options that are supported on the " &
+ "current development environment and will be supported in future releases.",
+
Section("Standard Options",
DefinitionList(
Definition(
CmdOption("d", Argument("directory")),
- "Specify where to generate documentation."),
- Definition(
- CmdOption("version"),
- "Print product version and exit."),
+ "Specify where to place generated class files."),
Definition(
- /*CmdOption("?") & "| " &*/ CmdOption("help"),
- "Print a synopsis of available options."))),
-
- Section("Documentation Options",
- DefinitionList(
+ CmdOption("access:<access>"),
+ "Show only public, protected/public (default) or all classes " &
+ "and members (" & Mono("public") & ",protected,private)"),
Definition(
- CmdOption("doc-title", Argument("title")),
- "Define the overall title of the documentation, typically the name of the library being documented."),
+ CmdOption("windowtitle", Argument("windowtitle")),
+ "Specify window title of generated HTML documentation"),
Definition(
- CmdOption("doc-version", Argument("version")),
- "Define the overall version number of the documentation, typically the version of the library being documented."),
- Definition(
- CmdOption("doc-source-url", Argument("url")),
- "Define a URL to be concatenated with source locations for link to source files."))),
-
- Section("Compiler Options",
- DefinitionList(
+ CmdOption("doctitle", Argument("doctitle")),
+ "Include title for the overview page"),
Definition(
- CmdOption("verbose"),
- "Output messages about what the compiler is doing"),
+ CmdOption("stylesheetfile", Argument("stylesheetfile")),
+ "File to change style of the generated documentation"),
Definition(
- CmdOption("deprecation"),
- SeqPara(
- "Indicate whether source should be compiled with deprecation " &
- "information; defaults to " & Mono("off") & " (" &
- "accepted values are: " & Mono("on") & ", " & Mono("off") &
- ", " & Mono("yes") & " and " & Mono("no") & ")",
- "Available since Scala version 2.2.1")),
+ CmdOption("header", Argument("pageheader")),
+ "Include header text for each page"),
Definition(
- CmdOption("classpath", Argument("path")),
- SeqPara(
- "Specify where to find user class files (on Unix-based systems " &
- "a colon-separated list of paths, on Windows-based systems, a " &
- "semicolon-separate list of paths). This does not override the " &
- "built-in (" & Mono("\"boot\"") & ") search path.",
- "The default class path is the current directory. Setting the " &
- Mono("CLASSPATH") & " variable or using the " & Mono("-classpath") & " " &
- "command-line option overrides that default, so if you want to " &
- "include the current directory in the search path, you must " &
- "include " & Mono("\".\"") & " in the new settings.")),
+ CmdOption("footer", Argument("pagefooter")),
+ "Include footer text for each page"),
Definition(
- CmdOption("sourcepath", Argument("path")),
- "Specify where to find input source files."),
+ CmdOption("top", Argument("pagetop")),
+ "Include top text for each page"),
Definition(
- CmdOption("bootclasspath", Argument("path")),
- "Override location of bootstrap class files (where to find the " &
- "standard built-in classes, such as \"" & Mono("scala.List") & "\")."),
+ CmdOption("bottom", Argument("pagebottom")),
+ "Include bottom text for each page"),
Definition(
- CmdOption("extdirs", Argument("dirs")),
- "Override location of installed extensions."),
+ CmdOption("version"),
+ "Print product version and exit."),
Definition(
- CmdOption("encoding", Argument("encoding")),
- SeqPara(
- "Specify character encoding used by source files.",
- "The default value is platform-specific (Linux: " & Mono("\"UTF8\"") &
- ", Windows: " & Mono("\"Cp1252\"") & "). Executing the following " &
- "code in the Scala interpreter will return the default value " &
- "on your system:",
- MBold(" scala> ") &
- Mono("new java.io.InputStreamReader(System.in).getEncoding"))))))
+ /*CmdOption("?") & "| " &*/ CmdOption("help"),
+ "Print a synopsis of standard options."))))
+
+ val examples = Section("EXAMPLES",
+
+ DefinitionList(
+ Definition(
+ "Generate documentation for a Scala program",
+ CmdLine("HelloWorld.scala")),
+ Definition(
+ "Generation documentation for a Scala program to the destination " &
+ "directory " & Bold("classes"),
+ CmdLine(CmdOption("d", "api") & "HelloWorld.scala")),
+ Definition(
+ "Generate documentation for all Scala files found in the source " &
+ "directory " & Bold("src") & " to the destination directory " &
+ Bold("api"),
+ CmdLine(CmdOption("d", "api") & "src/*.scala"))))
val exitStatus = Section("EXIT STATUS",
MBold(command) & " returns a zero exist status if it succeeds to process " &
"the specified input files. Non zero is returned in case of failure.")
- override val authors = Section("AUTHORS",
-
- "This version of Scaladoc was written by Gilles Dubochet with contributions by Pedro Furlanetto and Johannes Rudolph. " &
- "It is based on the original Scaladoc (Sean McDirmid, Geoffrey Washburn, Vincent Cremet and Stéphane Michleoud), " &
- "on vScaladoc (David Bernard), as well as on an unreleased version of Scaladoc 2 (Manohar Jonnalagedda).")
-
val seeAlso = Section("SEE ALSO",
Link(Bold("fsc") & "(1)", "fsc.html") & ", " &
@@ -141,17 +130,20 @@ object scaladoc extends Command {
def manpage = new Document {
title = command
- date = "2 June 2010"
- author = "Gilles Dubochet"
- version = "2.0"
+ date = "May 1, 2007"
+ author = "Stephane Micheloud"
+ version = "0.4"
sections = List(
name,
synopsis,
parameters,
description,
options,
+ examples,
exitStatus,
authors,
+ bugs,
+ copyright,
seeAlso)
}
}
diff --git a/src/partest/scala/tools/partest/PartestTask.scala b/src/partest/scala/tools/partest/PartestTask.scala
index 55b4a2a637..230a6f73ec 100644
--- a/src/partest/scala/tools/partest/PartestTask.scala
+++ b/src/partest/scala/tools/partest/PartestTask.scala
@@ -182,10 +182,8 @@ class PartestTask extends Task with CompilationPathProperty {
private def getScalapFiles = getFiles(scalapFiles)
override def execute() {
- if (isPartestDebug || debug) {
+ if (isPartestDebug)
setProp("partest.debug", "true")
- nest.NestUI._verbose = true
- }
srcDir foreach (x => setProp("partest.srcdir", x))
diff --git a/src/partest/scala/tools/partest/nest/Diff.java b/src/partest/scala/tools/partest/nest/Diff.java
index f69fc6858b..f780712b6b 100644
--- a/src/partest/scala/tools/partest/nest/Diff.java
+++ b/src/partest/scala/tools/partest/nest/Diff.java
@@ -49,7 +49,7 @@ public class Diff {
an edit script, if desired.
*/
public Diff(Object[] a,Object[] b) {
- Hashtable<Object, Integer> h = new Hashtable<Object, Integer>(a.length + b.length);
+ Hashtable h = new Hashtable(a.length + b.length);
filevec[0] = new file_data(a,h);
filevec[1] = new file_data(b,h);
}
@@ -744,7 +744,7 @@ public class Diff {
nondiscarded_lines = j;
}
- file_data(Object[] data, Hashtable<Object, Integer> h) {
+ file_data(Object[] data,Hashtable h) {
buffered_lines = data.length;
equivs = new int[buffered_lines];
@@ -752,9 +752,9 @@ public class Diff {
realindexes = new int[buffered_lines];
for (int i = 0; i < data.length; ++i) {
- Integer ir = h.get(data[i]);
+ Integer ir = (Integer)h.get(data[i]);
if (ir == null)
- h.put(data[i], new Integer(equivs[i] = equiv_max++));
+ h.put(data[i],new Integer(equivs[i] = equiv_max++));
else
equivs[i] = ir.intValue();
}
diff --git a/src/partest/scala/tools/partest/nest/DiffPrint.java b/src/partest/scala/tools/partest/nest/DiffPrint.java
index 31f9a1bc79..eeb0dd5a09 100644
--- a/src/partest/scala/tools/partest/nest/DiffPrint.java
+++ b/src/partest/scala/tools/partest/nest/DiffPrint.java
@@ -505,7 +505,7 @@ public class DiffPrint {
*/
static String[] slurp(String file) throws IOException {
BufferedReader rdr = new BufferedReader(new FileReader(file));
- Vector<String> s = new Vector<String>();
+ Vector s = new Vector();
for (;;) {
String line = rdr.readLine();
if (line == null) break;
diff --git a/src/partest/scala/tools/partest/nest/Worker.scala b/src/partest/scala/tools/partest/nest/Worker.scala
index 931bc5cc13..7797c1095a 100644
--- a/src/partest/scala/tools/partest/nest/Worker.scala
+++ b/src/partest/scala/tools/partest/nest/Worker.scala
@@ -374,7 +374,7 @@ class Worker(val fileManager: FileManager) extends Actor {
def isInGroup(f: File, num: Int) = SFile(f).stripExtension endsWith ("_" + num)
val groups = (0 to 9).toList map (num => testFiles filter (f => isInGroup(f, num)))
- val noGroupSuffix = testFiles filterNot (groups.flatten contains)
+ val noGroupSuffix = testFiles -- groups.flatten
def compileGroup(g: List[File]) {
val (scalaFiles, javaFiles) = g partition isScala
diff --git a/src/swing/scala/swing/Font.scala.disabled b/src/swing/scala/swing/Font.scala
index 6eebd667bd..a58c8967d7 100644
--- a/src/swing/scala/swing/Font.scala.disabled
+++ b/src/swing/scala/swing/Font.scala
@@ -1,36 +1,36 @@
package scala.swing
-/*object Font {
- def apply(fontFormat: Int, fontFile: java.io.File) = java.awt.Font.createFont(fontFormat, fontFile)
- def apply(fontFormat: Int, fontStream: java.io.InputStream) = java.awt.Font.createFont(fontFormat, fontStream)
+/*object Font {
+ def apply(fontFormat: Int, fontFile: java.io.File) = java.awt.Font.createFont(fontFormat, fontFile)
+ def apply(fontFormat: Int, fontStream: java.io.InputStream) = java.awt.Font.createFont(fontFormat, fontStream)
def decode(str: String) = java.awt.Font.decode(str)
-
+
/* TODO: finish implementation
/**
* See [java.awt.Font.getFont].
*/
- def get(attributes: Map[_ <: java.text.AttributedCharacterIterator.Attribute, _]) =
+ def get(attributes: Map[_ <: java.text.AttributedCharacterIterator.Attribute, _]) =
java.awt.Font.getFont(ImmutableMapWrapper(attributes))
-
+
import java.{util => ju}
private case class ImmutableMapWrapper[A, B](underlying : Map[A, B])(m : ClassManifest[A]) extends ju.AbstractMap[A, B] {
self =>
override def size = underlying.size
- override def put(k : A, v : B) =
+ override def put(k : A, v : B) =
throw new UnsupportedOperationException("This is a wrapper that does not support mutation")
- override def remove(k : AnyRef) =
+ override def remove(k : AnyRef) =
throw new UnsupportedOperationException("This is a wrapper that does not support mutation")
-
+
override def entrySet : ju.Set[ju.Map.Entry[A, B]] = new ju.AbstractSet[ju.Map.Entry[A, B]] {
def size = self.size
def iterator = new ju.Iterator[ju.Map.Entry[A, B]] {
val ui = underlying.iterator
var prev : Option[A] = None
-
+
def hasNext = ui.hasNext
-
+
def next = {
val (k, v) = ui.next
prev = Some(k)
@@ -44,7 +44,7 @@ package scala.swing
}
}
}
-
+
def remove = prev match {
case Some(k) => val v = self.remove(k.asInstanceOf[AnyRef]) ; prev = None ; v
case _ => throw new IllegalStateException("next must be called at least once before remove")
@@ -53,7 +53,7 @@ package scala.swing
}
}
*/
-
+
/**
* See [java.awt.Font.getFont].
*/
@@ -62,9 +62,9 @@ package scala.swing
* See [java.awt.Font.getFont].
*/
def get(nm: String, font: Font) = java.awt.Font.getFont(nm, font)
-
+
def Insets(x: Int, y: Int, width: Int, height: Int) = new Insets(x, y, width, height)
def Rectangle(x: Int, y: Int, width: Int, height: Int) = new Insets(x, y, width, height)
def Point(x: Int, y: Int) = new Point(x, y)
- def Dimension(x: Int, y: Int) = new Dimension(x, y)
+ def Dimension(x: Int, y: Int) = new Dimension(x, y)
}*/ \ No newline at end of file
diff --git a/test/benchmarks/bench b/test/benchmarks/bench
deleted file mode 100755
index b441b283f8..0000000000
--- a/test/benchmarks/bench
+++ /dev/null
@@ -1,57 +0,0 @@
-
-#################################################################
-#
-# A simple script used to rebuild benchmarks using fsc and then run them.
-# If you need to rebuild, use:
-#
-# ./bench <arguments>
-#
-# Omitting <arguments> will print more information.
-# If you don't want to rebuild:
-#
-# ./bench skip <same-args-as-above>
-#
-#################################################################
-
-
-TOP_DIR=$PWD
-SCALA_BUILD_DIR=../../build/pack
-SRC_DIR=src/
-TARGET_DIR=target
-CLASS_DIR=$TARGET_DIR/classes
-
-FSC=$SCALA_BUILD_DIR/bin/fsc
-SCALA_LIBS_PATH=$SCALA_BUILD_DIR/lib
-CLASSPATH=$SCALA_LIBS_PATH/scala-library.jar:lib/jsr166_and_extra.jar
-
-ARTIFACT=benchmarks.jar
-ARTIFACT_PATH=$TARGET_DIR/$ARTIFACT
-
-
-if [ "$1" != "skip" ]
-then
- # fetch source file list
- find $SRC_DIR -name *.scala -print > source.list
-
- # recompile with fsc
- $FSC -cp $CLASSPATH -d $CLASS_DIR @source.list
-
- # jar it up
- rm $ARTIFACT_PATH
- cd $CLASS_DIR
- jar cf $ARTIFACT .
- mv $ARTIFACT $TOP_DIR/$ARTIFACT_PATH
- cd $TOP_DIR
-fi
-
-# run a benchmark
-RUNCOMMAND="java -Xms256m -Xmx512m -server -cp $CLASSPATH:$ARTIFACT_PATH scala.collection.parallel.Benchmarking "
-if [ "$1" != skip ]
-then
- $RUNCOMMAND "$@"
-else
- $RUNCOMMAND $2 $3 $4 $5 $6 $7 $8
-fi
-
-
-
diff --git a/test/benchmarks/lib/jsr166_and_extra.jar.desired.sha1 b/test/benchmarks/lib/jsr166_and_extra.jar.desired.sha1
deleted file mode 100644
index c879571eae..0000000000
--- a/test/benchmarks/lib/jsr166_and_extra.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-0392ecdeb306263c471ce51fa368223388b82b61 ?jsr166_and_extra.jar
diff --git a/test/benchmarks/source.list b/test/benchmarks/source.list
deleted file mode 100644
index 88d2b257b2..0000000000
--- a/test/benchmarks/source.list
+++ /dev/null
@@ -1,68 +0,0 @@
-src/scala/collection/parallel/Benchmarking.scala
-src/scala/collection/parallel/benchmarks/parallel_view/SeqViewBenches.scala
-src/scala/collection/parallel/benchmarks/arrays/ObjectAccess.scala
-src/scala/collection/parallel/benchmarks/arrays/IntAccess.scala
-src/scala/collection/parallel/benchmarks/arrays/Resetting.scala
-src/scala/collection/parallel/benchmarks/arrays/Arrays.scala
-src/scala/collection/parallel/benchmarks/arrays/UnknownManif.scala
-src/scala/collection/parallel/benchmarks/arrays/Dummy.scala
-src/scala/collection/parallel/benchmarks/parallel_array/FlatMapLight.scala
-src/scala/collection/parallel/benchmarks/parallel_array/FilterLight.scala
-src/scala/collection/parallel/benchmarks/parallel_array/CountHeavy.scala
-src/scala/collection/parallel/benchmarks/parallel_array/PartitionLight.scala
-src/scala/collection/parallel/benchmarks/parallel_array/SliceFew.scala
-src/scala/collection/parallel/benchmarks/parallel_array/SplitHalf.scala
-src/scala/collection/parallel/benchmarks/parallel_array/PadToDouble.scala
-src/scala/collection/parallel/benchmarks/parallel_array/IntersectHalf.scala
-src/scala/collection/parallel/benchmarks/parallel_array/ForallLight.scala
-src/scala/collection/parallel/benchmarks/parallel_array/AggregateLight.scala
-src/scala/collection/parallel/benchmarks/parallel_array/SumLight.scala
-src/scala/collection/parallel/benchmarks/parallel_array/MinLight.scala
-src/scala/collection/parallel/benchmarks/parallel_array/CountList.scala
-src/scala/collection/parallel/benchmarks/parallel_array/PatchHalf.scala
-src/scala/collection/parallel/benchmarks/parallel_array/DiffHalf.scala
-src/scala/collection/parallel/benchmarks/parallel_array/TakeMany.scala
-src/scala/collection/parallel/benchmarks/parallel_array/PartialMapLight.scala
-src/scala/collection/parallel/benchmarks/parallel_array/Reverse.scala
-src/scala/collection/parallel/benchmarks/parallel_array/SpanLight.scala
-src/scala/collection/parallel/benchmarks/parallel_array/PlusPlus.scala
-src/scala/collection/parallel/benchmarks/parallel_array/ReduceNew.scala
-src/scala/collection/parallel/benchmarks/parallel_array/Resettable.scala
-src/scala/collection/parallel/benchmarks/parallel_array/ReducePrime.scala
-src/scala/collection/parallel/benchmarks/parallel_array/DropMany.scala
-src/scala/collection/parallel/benchmarks/parallel_array/ReduceList.scala
-src/scala/collection/parallel/benchmarks/parallel_array/ForeachLight.scala
-src/scala/collection/parallel/benchmarks/parallel_array/MatrixMultiplication.scala
-src/scala/collection/parallel/benchmarks/parallel_array/SliceMedium.scala
-src/scala/collection/parallel/benchmarks/parallel_array/ReverseMap.scala
-src/scala/collection/parallel/benchmarks/parallel_array/Companion.scala
-src/scala/collection/parallel/benchmarks/parallel_array/CountLight.scala
-src/scala/collection/parallel/benchmarks/parallel_array/IndexWhere.scala
-src/scala/collection/parallel/benchmarks/parallel_array/LastIndexWhere.scala
-src/scala/collection/parallel/benchmarks/parallel_array/ReduceLight.scala
-src/scala/collection/parallel/benchmarks/parallel_array/CopyToArray.scala
-src/scala/collection/parallel/benchmarks/parallel_array/MapLight.scala
-src/scala/collection/parallel/benchmarks/parallel_array/RemoveDuplicates.scala
-src/scala/collection/parallel/benchmarks/parallel_array/SliceMany.scala
-src/scala/collection/parallel/benchmarks/parallel_array/TakeWhileLight.scala
-src/scala/collection/parallel/benchmarks/parallel_array/SegmentLength.scala
-src/scala/collection/parallel/benchmarks/parallel_array/ForeachHeavy.scala
-src/scala/collection/parallel/benchmarks/parallel_array/FindLight.scala
-src/scala/collection/parallel/benchmarks/parallel_array/ForallQuickStop.scala
-src/scala/collection/parallel/benchmarks/parallel_array/ForallStop80k.scala
-src/scala/collection/parallel/benchmarks/parallel_array/SameElementsLong.scala
-src/scala/collection/parallel/benchmarks/parallel_array/SequentialOps.scala
-src/scala/collection/parallel/benchmarks/parallel_array/ForallHeavy.scala
-src/scala/collection/parallel/benchmarks/parallel_array/ExistsLight.scala
-src/scala/collection/parallel/benchmarks/parallel_array/ReduceHeavy.scala
-src/scala/collection/parallel/benchmarks/parallel_array/Corresponds.scala
-src/scala/collection/parallel/benchmarks/generic/Operators.scala
-src/scala/collection/parallel/benchmarks/generic/ParallelBenches.scala
-src/scala/collection/parallel/benchmarks/generic/Dummy.scala
-src/scala/collection/parallel/benchmarks/parallel_range/RangeBenches.scala
-src/scala/collection/parallel/benchmarks/Bench.scala
-src/scala/collection/parallel/benchmarks/hashtries/Foreach.scala
-src/scala/collection/parallel/benchmarks/hashtries/Iterate.scala
-src/scala/collection/parallel/benchmarks/hashtries/Construct.scala
-src/scala/collection/parallel/benchmarks/hashtries/IntInit.scala
-src/scala/collection/parallel/benchmarks/hashtries/Lookup.scala
diff --git a/test/benchmarks/src/scala/collection/parallel/Benchmarking.scala b/test/benchmarks/src/scala/collection/parallel/Benchmarking.scala
deleted file mode 100644
index 0054893b8a..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/Benchmarking.scala
+++ /dev/null
@@ -1,186 +0,0 @@
-package scala.collection.parallel
-
-
-import scala.collection.mutable.LinkedHashSet
-
-import benchmarks._
-
-
-/**
- * All benchmarks are registered here.
- *
- * @author prokopec
- */
-trait BenchmarkRegister {
-
- val benchcreators = LinkedHashSet[BenchCompanion]()
-
- def register(companion: BenchCompanion) = benchcreators += companion
-
- // parallel array benchmarks
- register(parallel_array.ReduceLight)
- register(parallel_array.ReduceNew)
- register(parallel_array.ReduceList)
- register(parallel_array.ReducePrime)
- register(parallel_array.ReduceHeavy)
- register(parallel_array.CountLight)
- register(parallel_array.CountList)
- register(parallel_array.CountHeavy)
- register(parallel_array.ForeachLight)
- register(parallel_array.ForeachHeavy)
- register(parallel_array.SumLight)
- register(parallel_array.MinLight)
- register(parallel_array.MapLight)
- register(parallel_array.FilterLight)
- register(parallel_array.PartitionLight)
- register(parallel_array.PartialMapLight)
- register(parallel_array.FlatMapLight)
- register(parallel_array.PlusPlus)
- register(parallel_array.ForallLight)
- register(parallel_array.ForallQuickStop)
- register(parallel_array.ForallStop80k)
- register(parallel_array.ForallHeavy)
- register(parallel_array.ExistsLight)
- register(parallel_array.FindLight)
- register(parallel_array.TakeMany)
- register(parallel_array.DropMany)
- register(parallel_array.SliceMany)
- register(parallel_array.SliceMedium)
- register(parallel_array.SliceFew)
- register(parallel_array.SplitHalf)
- register(parallel_array.TakeWhileLight)
- register(parallel_array.SpanLight)
- register(parallel_array.CopyToArray)
- register(parallel_array.SegmentLength)
- register(parallel_array.IndexWhere)
- register(parallel_array.LastIndexWhere)
- register(parallel_array.Reverse)
- register(parallel_array.ReverseMap)
- register(parallel_array.SameElementsLong)
- register(parallel_array.Corresponds)
- register(parallel_array.DiffHalf)
- register(parallel_array.IntersectHalf)
- register(parallel_array.RemoveDuplicates)
- register(parallel_array.PatchHalf)
- register(parallel_array.PadToDouble)
- register(parallel_array.AggregateLight)
- register(parallel_array.MatrixMultiplication)
-
- // parallel views
- register(parallel_view.DummyViewBenchList.Reduce)
- register(parallel_view.DummyViewBenchList.MediumReduce)
- register(parallel_view.DummyViewBenchList.ModifyThenReduce)
- register(parallel_view.DummyViewBenchList.ModifyThenForce)
-
- // parallel ranges
- register(parallel_range.RangeBenches.Reduce)
- register(parallel_range.RangeBenches.ReduceMedium)
- register(parallel_range.RangeBenches.ForeachAdd)
- register(parallel_range.RangeBenches.ForeachAddCollatz)
- register(parallel_range.RangeBenches.ForeachModify)
- register(parallel_range.RangeBenches.ForeachModifyMedium)
- register(parallel_range.RangeBenches.ForeachModifyHeavy)
- register(parallel_range.RangeBenches.MapLight)
- register(parallel_range.RangeBenches.MapMedium)
-
- // array benchmarks
- register(arrays.ObjectAccess)
- register(arrays.IntAccess)
-
- // hash benchmarks
- register(hashtries.Foreach)
- register(hashtries.Iterate)
- register(hashtries.Construct)
- register(hashtries.Lookup)
- register(hashtries.Combine)
- register(hashtries.MultipleCombine)
-
- // parallel hash trie benchmarks
- register(hashtries.RefParallelHashTrieBenches.Reduce)
- register(hashtries.RefParallelHashTrieBenches.ReduceMedium)
- register(hashtries.RefParallelHashTrieBenches.Map)
- register(hashtries.RefParallelHashTrieBenches.Map2)
-}
-
-
-/**
- * Serves as an entrypoint to run all the benchmarks.
- */
-object Benchmarking extends BenchmarkRegister {
-
- def printHelp {
- println("Must enter at least four arguments: <collection> <benchmark> <size of the collection> <type>")
- println(" Example: ParallelArray reduce-light 50000 par")
- println(" Example: ParallelArray -all 50000 par")
- println
- println("General synthax: <collection> <benchmark> <size> <type> <parallelism-level>")
- println(" <collection> - name of the collection to test, `-all` runs benchmarks for all collections")
- println(" <benchmark> - name of the specific benchmark, `-all` runs all benchmarks for the chosen collections")
- println(" <size> - the size (number of elements) of the collection, or `-default` for default size per benchmark")
- println(" <type> - `seq` for benchmarking sequential version of the functionality")
- println(" `par` for benchmarking parallel version of the functionality")
- println(" `<something-else>` for running comparison benchmarks")
- println(" `-all` for running sequential, parallel and comparison benchmarks")
- println(" <parallelism-level> - the level of parallelism used (default 2)")
- }
-
- def otherOptions(args: Array[String]) {
- if (args.length == 0) printHelp
- else args(0) match {
- case "-list" => // lists all benchmarks
- for (bc <- benchcreators) println(bc.fullname)
- case _ => printHelp
- }
- }
-
- def main(args: Array[String]) {
- if (args.length < 4) {
- otherOptions(args)
- return
- }
-
- val collname = args(0)
- val benchname = args(1)
- val size = if (args(2) == "-default") -1 else args(2).toInt
- val tpe = args(3)
- val parlevel = if (args.length >= 5) args(4).toInt else 2
-
- // find all benchmarks to run
- val benches = benchcreators.filter(comp => {
- (collname, benchname) match {
- case ("-all", "-all") => true
- case ("-all", bn) if (benchname != "-all") => bn == comp.benchName
- case (cn, "-all") if (collname != "-all") => cn == comp.collectionName
- case (cn, bn) => cn == comp.collectionName && bn == comp.benchName
- }
- }).flatMap(comp => {
- val collsz = if (size != -1) size else comp.defaultSize
- if (tpe != "-all") List(comp.apply(collsz, parlevel, tpe))
- else for (benchtype <- "seq" :: "par" :: comp.comparisons)
- yield comp.apply(collsz, parlevel, benchtype)
- })
-
- println("Running benchmarks...")
- for (b <- benches) b.executeBenchmark
- }
-
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/Bench.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/Bench.scala
deleted file mode 100644
index 10e6201709..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/Bench.scala
+++ /dev/null
@@ -1,122 +0,0 @@
-package scala.collection.parallel.benchmarks
-
-
-import scala.collection._
-import scala.testing.Benchmark
-
-
-
-trait BenchCompanion {
- def benchName: String
- def collectionName: String
- def fullname = collectionName + "." + benchName
- def defaultSize = 100000
- def comparisons = List[String]()
- def apply(sz: Int, parallelism: Int, what: String): Bench
-}
-
-
-/**
- * An interface for all benchmark classes.
- * A benchmark runs some functionality a prespecified number of times.
- */
-trait Bench extends Benchmark {
- val size: Int
-
- val parallelism: Int
-
- val runWhat: String
-
- /**
- * Name of the benchmark. Convention is for it to start with the name of the collection being
- * tested, continuing '.' and ending with the name of the specific functionality being benchmarked.
- * @return
- */
- def name: String = companion.fullname
- def collectionName: String = companion.collectionName
- def benchName: String = companion.benchName
-
- def companion: BenchCompanion
-
- def runseq: Unit
-
- def runpar: Unit
-
- /**
- * Describes the number of runs of the test.
- */
- val runs = 10
-
- /**
- * Returns the number of repetitions for this benchmark.
- */
- def repetitionsPerRun = 500
-
- /**
- * Resets the benchmark object. Typically, this means recreating
- * the collection being tested.
- */
- def reset: Unit
-
- /**
- * Returns a map of available comparison tests.
- */
- def comparisons: List[String] = companion.comparisons
-
- def comparison(name: String): Option[() => Unit] = comparisonMap.get(name)
-
- def comparisonMap: Map[String, () => Unit]
-
- def run = runWhat match {
- case "seq" => for (i <- 0 until repetitionsPerRun) runseq
- case "par" => for (i <- 0 until repetitionsPerRun) runpar
- case _ => comparison(runWhat) match {
- case Some(fun) => for (i <- 0 until repetitionsPerRun) fun()
- case None => throw new IllegalArgumentException("Unknown bench option: `" + runWhat +
- "`, need `seq`, `par` or one of: " + comparisons.mkString("`", "`, `", "`"))
- }
- }
-
- /**
- * Prints results of the benchmark. May be overidden in benchmarks.
- */
- def printResults {}
-
- def executeBenchmark = {
- println("-----------------------")
- print(name + ", " + runWhat + ", par.=" + parallelism + ", sz=" + niceSize + ": ")
-
- val times = runBenchmark(runs)
-
- for (t <- times) print(t + " ")
- println
- printResults
- }
-
- private def niceSize = if (size < 1000 || size % 1000 != 0) size.toString else size / 1000 + "k"
-}
-
-
-trait HavingResult[T] extends Bench {
- var runresult: T = null.asInstanceOf[T]
-
- abstract override def printResults {
- println("result: " + (if (runresult != null) runresult else "<not set>"))
- super.printResults
- }
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/Arrays.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/Arrays.scala
deleted file mode 100644
index fd3b4aab08..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/Arrays.scala
+++ /dev/null
@@ -1,63 +0,0 @@
-package scala.collection.parallel.benchmarks.arrays
-
-
-
-
-
-
-
-object Arrays {
-
- @inline def genericApply[T](xs: Array[T], idx: Int): T = xs.asInstanceOf[AnyRef] match {
- case x: Array[AnyRef] => x(idx).asInstanceOf[T]
- case _ => genericApplyNotAnyRef(xs, idx)
- }
-
- @noinline private def genericApplyNotAnyRef[T](xs: Array[T], idx: Int): T = xs.asInstanceOf[AnyRef] match {
- case x: Array[Int] => x(idx).asInstanceOf[T]
- case x: Array[Double] => x(idx).asInstanceOf[T]
- case x: Array[Long] => x(idx).asInstanceOf[T]
- case x: Array[Float] => x(idx).asInstanceOf[T]
- case x: Array[Char] => x(idx).asInstanceOf[T]
- case x: Array[Byte] => x(idx).asInstanceOf[T]
- case x: Array[Short] => x(idx).asInstanceOf[T]
- case x: Array[Boolean] => x(idx).asInstanceOf[T]
- case x: Array[Unit] => x(idx).asInstanceOf[T]
- case null => throw new NullPointerException
- }
-
- @inline def apply(xs: AnyRef, idx: Int): Any = xs match {
- case x: Array[AnyRef] => x(idx).asInstanceOf[Any]
- case _ => applyNotAnyRef(xs, idx)
- }
-
- @noinline private def applyNotAnyRef(xs: AnyRef, idx: Int): Any = xs match {
- case x: Array[Int] => x(idx).asInstanceOf[Any]
- case x: Array[Double] => x(idx).asInstanceOf[Any]
- case x: Array[Long] => x(idx).asInstanceOf[Any]
- case x: Array[Float] => x(idx).asInstanceOf[Any]
- case x: Array[Char] => x(idx).asInstanceOf[Any]
- case x: Array[Byte] => x(idx).asInstanceOf[Any]
- case x: Array[Short] => x(idx).asInstanceOf[Any]
- case x: Array[Boolean] => x(idx).asInstanceOf[Any]
- case x: Array[Unit] => x(idx).asInstanceOf[Any]
- case null => throw new NullPointerException
- }
-
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/Dummy.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/Dummy.scala
deleted file mode 100644
index 56af7b9d85..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/Dummy.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-package scala.collection.parallel.benchmarks.arrays
-
-
-
-
-case class Dummy(in: Int) {
- def op = {}
-}
-
-object Dummy {
- def dummyOp(a: Int) = { if (a < 0) -1 }
- def dummyOp(d: Dummy) = { if (d.in < 0) d.op }
-}
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/IntAccess.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/IntAccess.scala
deleted file mode 100644
index 948ecb419e..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/IntAccess.scala
+++ /dev/null
@@ -1,68 +0,0 @@
-package scala.collection.parallel.benchmarks.arrays
-
-
-import scala.collection.parallel.benchmarks._
-
-
-
-object IntAccess extends BenchCompanion {
- def collectionName = "array";
- def benchName = "access-int";
- def apply(sz: Int, p: Int, what: String) = new IntAccess(sz, p, what)
- override def comparisons = List("any", "cast", "manif", "unknown")
- override def defaultSize = 100000
-}
-
-
-class IntAccess(sz: Int, p: Int, what: String)
-extends Resetting(n => n, sz, p, what) with UnknownManif[Int] {
- def companion = IntAccess
-
- def runseq {}
- def runpar {}
-
- def runany = {
- var i = 0
- while (i < sz) {
- val d = anyarray(i).asInstanceOf[Int]
- i += 1
- }
- }
-
- def runcast = {
- var i = 0
- while (i < sz) {
- val d = Arrays.apply(castarray, i).asInstanceOf[Int]
- i += 1
- }
- }
-
- def runmanif = {
- var i = 0
- while (i < sz) {
- val d = manifarray(i)
- if (op(d)) i += 1
- i += 1
- }
- }
-
- def op(a: Int) = a < 0
-
- def comparisonMap = collection.Map("any" -> runany _, "cast" -> runcast _,
- "manif" -> runmanif _, "unknown" -> rununknown _)
-
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/ObjectAccess.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/ObjectAccess.scala
deleted file mode 100644
index 3cc38f1b58..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/ObjectAccess.scala
+++ /dev/null
@@ -1,75 +0,0 @@
-package scala.collection.parallel.benchmarks.arrays
-
-
-import scala.collection.parallel.benchmarks._
-
-
-
-object ObjectAccess extends BenchCompanion {
- def collectionName = "array";
- def benchName = "access-obj";
- def apply(sz: Int, p: Int, what: String) = new ObjectAccess(sz, p, what)
- override def comparisons = List("any", "cast", "gencast", "manif", "unknown")
- override def defaultSize = 100000
-}
-
-
-class ObjectAccess(sz: Int, p: Int, what: String)
-extends Resetting(Dummy(_), sz, p, what) with UnknownManif[Dummy] {
- def companion = ObjectAccess
-
- def runseq {}
- def runpar {}
-
- def runany = {
- var i = 0
- while (i < sz) {
- val d = anyarray(i).asInstanceOf[Dummy]
- Dummy.dummyOp(d)
- i += 1
- }
- }
-
- def runcast = {
- var i = 0
- while (i < sz) {
- val d = Arrays.apply(castarray, i).asInstanceOf[Dummy]
- i += 1
- }
- }
-
- def rungenericcast = {
- var i = 0
- while (i < sz) {
- val d = Arrays.genericApply(gencastarray, i)
- i += 1
- }
- }
-
- def runmanif = {
- var i = 0
- while (i < sz) {
- val d = manifarray(i)
- if (d.in < 0) i += 1
- i += 1
- }
- }
-
- def comparisonMap = collection.Map("any" -> runany _, "cast" -> runcast _, "gencast" -> rungenericcast _,
- "manif" -> runmanif _, "unknown" -> rununknown _)
-
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/Resetting.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/Resetting.scala
deleted file mode 100644
index 9e6102fb94..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/Resetting.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-package scala.collection.parallel.benchmarks.arrays
-
-
-import scala.collection.parallel.benchmarks._
-
-
-abstract class Resetting[T: Manifest](elemcreate: Int => T, sz: Int, p: Int, what: String)
-extends Bench {
- val size = sz
- val parallelism = p
- val runWhat = what
-
- var anyarray: Array[Any] = null
- var castarray: AnyRef = null
- var gencastarray: Array[T] = null
- var manifarray: Array[T] = null
-
- reset
-
- def reset = what match {
- case "any" =>
- anyarray = new Array[Any](sz)
- for (i <- 0 until sz) anyarray(i) = elemcreate(i)
- case "cast" =>
- val arr = new Array[T](sz)
- for (i <- 0 until sz) arr(i) = elemcreate(i)
- castarray = arr
- case "gencast" =>
- gencastarray = new Array[T](sz)
- for (i <- 0 until sz) gencastarray(i) = elemcreate(i)
- case "manif" =>
- manifarray = new Array[T](sz)
- for (i <- 0 until sz) manifarray(i) = elemcreate(i)
- case "unknown" =>
- manifarray = new Array[T](sz)
- for (i <- 0 until sz) manifarray(i) = elemcreate(i)
- case _ =>
- }
-}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/UnknownManif.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/UnknownManif.scala
deleted file mode 100644
index d7196c0277..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/UnknownManif.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-package scala.collection.parallel.benchmarks.arrays
-
-
-
-
-trait UnknownManif[T] {
- def manifarray: Array[T]
- def size: Int
-
- def rununknown {
- val arr = manifarray
- val sz = size
- var i = 0
- while (i < sz) {
- val d = arr(i)
- op(d)
- i += 1
- }
- }
-
- def op(d: Any) {}
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/generic/Dummy.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/generic/Dummy.scala
deleted file mode 100644
index 2b2ad81af6..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/generic/Dummy.scala
+++ /dev/null
@@ -1,59 +0,0 @@
-package scala.collection.parallel.benchmarks.generic
-
-
-
-
-class Dummy(val in: Int) {
- var num = in
- override def toString = in.toString
- override def hashCode = in
-}
-
-
-object DummyOperators extends Operators[Dummy] {
- val reducer = (a: Dummy, b: Dummy) => {
- var i = 0
- if (a.in > b.in) {
- a.num = a.in + b.in + i
- a
- } else {
- b.num = a.in + b.in + i
- b
- }
- }
- private def rec(a: Int, b: Int): Int = {
- val result = if (b == 0) a else {
- rec(b, a - b * (a / b))
- }
- result + 1000
- }
- val mediumreducer = (a: Dummy, b: Dummy) => {
- var i = 0
- var sum = a.num + b.num
- b.num = rec(a.num, b.num)
- b
- }
- val filterer = (a: Dummy) => {
- a.in % 2 == 0
- }
- val mapper = (a: Dummy) => {
- a.num = a.in % 2
- a
- }
- val heavymapper = (a: Dummy) => {
- var i = -100
- while (i < 0) {
- if (a.in < i) a.num += 1
- i += 1
- }
- a
- }
- val taker = (a: Dummy) => {
- a.in >= 0
- }
-}
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/generic/Operators.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/generic/Operators.scala
deleted file mode 100644
index 1268f94bac..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/generic/Operators.scala
+++ /dev/null
@@ -1,52 +0,0 @@
-package scala.collection.parallel.benchmarks.generic
-
-
-
-
-
-
-trait Operators[T] {
-
- def reducer: (T, T) => T
- def mediumreducer: (T, T) => T
- def filterer: T => Boolean
- def mapper: T => T
- def mapper2: T => T = error("unsupported")
- def heavymapper: T => T
- def taker: T => Boolean
-
-}
-
-
-
-trait IntOperators extends Operators[Int] {
-
- val reducer: (Int, Int) => Int = _ + _
- val mediumreducer: (Int, Int) => Int = (a: Int, b: Int) => {
- val result = if (b == 0) a else {
- mediumreducer.apply(b, a - b * (a / b))
- }
- result + 1000
- }
- val filterer: Int => Boolean = _ % 2 == 0
- val mapper: Int => Int = _ * 2
- val heavymapper: Int => Int = (n: Int) => {
- var i = -10
- var sum = 0
- while (i < 0) {
- sum += -i
- i += 1
- }
- n + sum
- }
- val taker: Int => Boolean = _ < 10000
-
-}
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/generic/ParallelBenches.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/generic/ParallelBenches.scala
deleted file mode 100644
index 3db33ebaed..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/generic/ParallelBenches.scala
+++ /dev/null
@@ -1,227 +0,0 @@
-package scala.collection.parallel
-package benchmarks
-package generic
-
-
-
-
-
-
-
-trait ParallelIterableBench[T, Coll <: ParallelIterable[T]] extends collection.parallel.benchmarks.Bench {
-self =>
-
- protected var seqcoll: Iterable[T] = null
- protected var parcoll: Coll = null.asInstanceOf[Coll]
-
- reset
-
- def reset = runWhat match {
- case "seq" => this.seqcoll = createSequential(size, parallelism)
- case "par" => this.parcoll = createParallel(size, parallelism)
- case _ =>
- }
-
- def nameOfCollection: String
- def operators: Operators[T]
- def createSequential(sz: Int, p: Int): Iterable[T]
- def createParallel(sz: Int, p: Int): Coll
-
- trait IterableBenchCompanion extends BenchCompanion {
- def collectionName = self.nameOfCollection
- }
-
- trait IterableBench extends ParallelIterableBench[T, Coll] {
- def nameOfCollection = self.nameOfCollection
- def operators = self.operators
- def createSequential(sz: Int, p: Int) = self.createSequential(size, parallelism)
- def createParallel(sz: Int, p: Int) = self.createParallel(size, parallelism)
- def forkJoinPool: scala.concurrent.forkjoin.ForkJoinPool = self.forkJoinPool
- }
-
- def forkJoinPool: scala.concurrent.forkjoin.ForkJoinPool
-
- override def printResults {
- println(" --- Fork join pool state --- ")
- println("Parallelism: " + forkJoinPool.getParallelism)
- println("Active threads: " + forkJoinPool.getActiveThreadCount)
- println("Work stealings: " + forkJoinPool.getStealCount)
- }
-
-}
-
-
-trait ParallelSeqBench[T, Coll <: ParallelSeq[T]] extends ParallelIterableBench[T, Coll] {
- self =>
-
- def createSequential(sz: Int, p: Int): Seq[T]
-
- trait SeqBenchCompanion extends BenchCompanion {
- def collectionName = self.nameOfCollection
- }
-
- trait SeqBench extends IterableBench with ParallelSeqBench[T, Coll] {
- override def createSequential(sz: Int, p: Int) = self.createSequential(size, parallelism)
- }
-
-}
-
-
-trait NotBenchmark {
- lazy val runWhat = "";
- val size = -1
- val parallelism = -1
- def runpar {}
- def runseq {}
- def companion = throw new UnsupportedOperationException
-}
-
-
-/**
- * Standard benchmarks for collections.
- */
-trait StandardParallelIterableBench[T, Coll <: ParallelIterable[T]] extends ParallelIterableBench[T, Coll] {
-
- object Reduce extends IterableBenchCompanion {
- override def defaultSize = 50000
- def benchName = "reduce";
- def apply(sz: Int, p: Int, w: String) = new Reduce(sz, p, w)
- }
-
- class Reduce(val size: Int, val parallelism: Int, val runWhat: String)
- extends IterableBench with StandardParallelIterableBench[T, Coll] {
- def comparisonMap = collection.Map()
- def runseq = this.seqcoll.reduceLeft(operators.reducer)
- def runpar = this.parcoll.reduce(operators.reducer)
- def companion = Reduce
- }
-
- object ReduceMedium extends IterableBenchCompanion {
- override def defaultSize = 5000
- def benchName = "reduce-medium";
- def apply(sz: Int, p: Int, w: String) = new ReduceMedium(sz, p, w)
- }
-
- class ReduceMedium(val size: Int, val parallelism: Int, val runWhat: String)
- extends IterableBench with StandardParallelIterableBench[T, Coll] {
- def comparisonMap = collection.Map()
- def runseq = this.seqcoll.reduceLeft(operators.mediumreducer)
- def runpar = this.parcoll.reduce(operators.mediumreducer)
- def companion = ReduceMedium
- }
-
- object Map extends IterableBenchCompanion {
- override def defaultSize = 5000
- def benchName = "map";
- def apply(sz: Int, p: Int, w: String) = new Map(sz, p, w)
- }
-
- class Map(val size: Int, val parallelism: Int, val runWhat: String)
- extends IterableBench with StandardParallelIterableBench[T, Coll] {
- def comparisonMap = collection.Map()
- def runseq = this.seqcoll.map(operators.mapper)
- def runpar = this.parcoll.map(operators.mapper)
- def companion = Map
- }
-
-}
-
-
-
-/**
- * Benchmarks for sequence views.
- */
-trait ParallelSeqViewBench[T, Coll <: ParallelSeqView[T, ParallelSeq[T], CollSeq], CollSeq] extends ParallelSeqBench[T, Coll] {
-
- object Reduce extends IterableBenchCompanion {
- override def defaultSize = 50000
- def benchName = "reduce";
- def apply(sz: Int, p: Int, w: String) = new Reduce(sz, p, w)
- }
-
- class Reduce(val size: Int, val parallelism: Int, val runWhat: String)
- extends SeqBench with ParallelSeqViewBench[T, Coll, CollSeq] {
- def comparisonMap = collection.Map()
- def runseq = this.seqcoll.reduceLeft(operators.reducer)
- def runpar = this.parcoll.reduce(operators.reducer)
- def companion = Reduce
- }
-
- object MediumReduce extends IterableBenchCompanion {
- override def defaultSize = 50000
- def benchName = "reduce-medium";
- def apply(sz: Int, p: Int, w: String) = new MediumReduce(sz, p, w)
- }
-
- class MediumReduce(val size: Int, val parallelism: Int, val runWhat: String)
- extends SeqBench with ParallelSeqViewBench[T, Coll, CollSeq] {
- def comparisonMap = collection.Map()
- def runseq = this.seqcoll.reduceLeft(operators.mediumreducer)
- def runpar = this.parcoll.reduce(operators.mediumreducer)
- def companion = Reduce
- }
-
- object ModifyThenReduce extends SeqBenchCompanion {
- override def defaultSize = 20000
- def benchName = "modify-then-reduce";
- def apply(sz: Int, p: Int, w: String) = new ModifyThenReduce(sz, p, w)
- }
-
- class ModifyThenReduce(val size: Int, val parallelism: Int, val runWhat: String)
- extends SeqBench with ParallelSeqViewBench[T, Coll, CollSeq] {
- val toadd = createSequential(size, parallelism)
- def comparisonMap = collection.Map()
- def runseq = {
- val modified = (seqcoll ++ toadd).drop(size).map(operators.mapper).++(toadd).take(size)
- modified.reduceLeft(operators.reducer)
- }
- def runpar = {
- val modified = (parcoll ++ toadd).drop(size).map(operators.mapper).++(toadd).take(size)
- modified.reduce(operators.reducer)
- }
- def companion = ModifyThenReduce
- }
-
- object ModifyThenForce extends SeqBenchCompanion {
- override def defaultSize = 20000
- def benchName = "modify-then-force";
- def apply(sz: Int, p: Int, w: String) = new ModifyThenForce(sz, p, w)
- }
-
- class ModifyThenForce(val size: Int, val parallelism: Int, val runWhat: String)
- extends SeqBench with ParallelSeqViewBench[T, Coll, CollSeq] {
- val toadd = createSequential(size, parallelism)
- def comparisonMap = collection.Map()
- def runseq = (seqcoll ++ toadd).drop(size).map(operators.mapper).++(toadd).take(size)
- def runpar = {
- val r: ParallelSeqView[T, ParallelSeq[T], Seq[T]] = (parcoll ++ toadd).drop(size).map(operators.mapper).++(toadd).take(size)
- r.force
- }
- def companion = ModifyThenForce
- }
-
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Combine.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Combine.scala
deleted file mode 100644
index 3a070fb6ff..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Combine.scala
+++ /dev/null
@@ -1,66 +0,0 @@
-package scala.collection.parallel.benchmarks
-package hashtries
-
-
-
-
-import collection.immutable.{HashMap => HashTrie}
-import collection.mutable.HashMap
-
-
-
-
-
-
-class Combine(val size: Int, val parallelism: Int, val runWhat: String) extends Bench with IntInit {
- var thattrie = new HashTrie[Int, Int]
- for (i <- size until 2 * size) thattrie += ((i, i))
- val thatmap = new HashMap[Int, Int]
- for (i <- size until 2 * size) thatmap += ((i, i))
-
- def runpar = throw new UnsupportedOperationException
- def runseq = runhashtrie
- def runhashtrie = {
- hashtrie merge thattrie
- // println
- // println("both tries: " + HashTrie.bothtries)
- // println("one trie, one item: " + HashTrie.onetrie)
- // println("both single: " + HashTrie.bothsingle)
- // System exit 1
- }
- def rundestructive = {
- hashtrie merge thattrie
- }
- def runappendtrie = hashtrie ++ thattrie
- def runhashmap = hashmap ++ thatmap
- def companion = Combine
- def comparisonMap = Map("hashtrie" -> runhashtrie _, "hashmap" -> runhashmap _, "destruct" -> rundestructive _, "appendtrie" -> runappendtrie _)
- override def reset = runWhat match {
- case "appendtrie" => initHashTrie
- case "destruct" => initHashTrie
- case _ => super.reset
- }
-}
-
-
-object Combine extends BenchCompanion {
- def collectionName = "HashTrie"
- def benchName = "combine";
- def apply(sz: Int, p: Int, what: String) = new Combine(sz, p, what)
- override def defaultSize = 5000
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Construct.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Construct.scala
deleted file mode 100644
index 7c15df1fe2..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Construct.scala
+++ /dev/null
@@ -1,54 +0,0 @@
-package scala.collection.parallel.benchmarks
-package hashtries
-
-
-
-
-import collection.immutable.{HashMap => HashTrie}
-import collection.mutable.HashMap
-
-
-
-
-
-
-class Construct(val size: Int, val parallelism: Int, val runWhat: String) extends Bench {
- def reset {}
-
- def runpar = throw new UnsupportedOperationException
- def runseq = throw new UnsupportedOperationException
- def runhashmap = {
- val hashmap = new HashMap[Int, Int]
- for (i <- 0 until size) hashmap += ((i, i))
- }
- def runhashtrie = {
- var hashtrie = new HashTrie[Int, Int]
- for (i <- 0 until size) hashtrie += ((i, i))
- }
-
- def companion = Construct
- def comparisonMap = Map("hashmap" -> runhashmap _, "hashtrie" -> runhashtrie _)
-}
-
-
-object Construct extends BenchCompanion {
- def collectionName = "HashTrie"
- def benchName = "construct";
- def apply(sz: Int, p: Int, what: String) = new Construct(sz, p, what)
- override def defaultSize = 5000
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Foreach.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Foreach.scala
deleted file mode 100644
index f53ea02e36..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Foreach.scala
+++ /dev/null
@@ -1,45 +0,0 @@
-package scala.collection.parallel.benchmarks
-package hashtries
-
-
-
-
-import collection.immutable.{HashMap => HashTrie}
-import collection.mutable.HashMap
-
-
-
-
-
-
-class Foreach(val size: Int, val parallelism: Int, val runWhat: String) extends Bench with IntInit {
- def runpar = throw new UnsupportedOperationException
- def runseq = runhashtrie
- def runhashmap = hashmap.foreach(n => ())
- def runhashtrie = hashtrie.foreach(n => ())
- def companion = Foreach
- def comparisonMap = Map("hashmap" -> runhashmap _, "hashtrie" -> runhashtrie _)
-}
-
-
-object Foreach extends BenchCompanion {
- def collectionName = "HashTrie"
- def benchName = "foreach-light";
- def apply(sz: Int, p: Int, what: String) = new Foreach(sz, p, what)
- override def defaultSize = 25000
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/IntInit.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/IntInit.scala
deleted file mode 100644
index dbbe64e290..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/IntInit.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-package scala.collection.parallel.benchmarks
-package hashtries
-
-
-
-
-import collection.immutable.{HashMap => HashTrie}
-import collection.mutable.HashMap
-
-
-
-trait IntInit extends Bench {
- var hashmap: HashMap[Int, Int] = null
- var hashtrie: HashTrie[Int, Int] = null
-
- reset
- def reset = runWhat match {
- case "hashmap" => initHashMap
- case "hashtrie" => initHashTrie
- case "seq" => initHashTrie
- }
- def initHashTrie = {
- hashtrie = new HashTrie
- for (i <- 0 until size) hashtrie += ((i, i))
- }
- def initHashMap = {
- hashmap = new HashMap
- for (i <- 0 until size) hashmap += ((i, i))
- }
-
-}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Iterate.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Iterate.scala
deleted file mode 100644
index d27aa200b8..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Iterate.scala
+++ /dev/null
@@ -1,51 +0,0 @@
-package scala.collection.parallel.benchmarks
-package hashtries
-
-
-
-
-import collection.immutable.{HashMap => HashTrie}
-import collection.mutable.HashMap
-
-
-
-
-
-
-class Iterate(val size: Int, val parallelism: Int, val runWhat: String) extends Bench with IntInit {
- def runpar = throw new UnsupportedOperationException
- def runseq = throw new UnsupportedOperationException
- def runhashmap = {
- val it = hashmap.iterator
- while (it.hasNext) it.next
- }
- def runhashtrie = {
- val it = hashtrie.iterator
- while (it.hasNext) it.next
- }
- def companion = Iterate
- def comparisonMap = Map("hashmap" -> runhashmap _, "hashtrie" -> runhashtrie _)
-}
-
-
-object Iterate extends BenchCompanion {
- def collectionName = "HashTrie"
- def benchName = "iterate-light";
- def apply(sz: Int, p: Int, what: String) = new Iterate(sz, p, what)
- override def defaultSize = 25000
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Lookup.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Lookup.scala
deleted file mode 100644
index 4ee8c17118..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/Lookup.scala
+++ /dev/null
@@ -1,57 +0,0 @@
-package scala.collection.parallel.benchmarks
-package hashtries
-
-
-
-
-import collection.immutable.{HashMap => HashTrie}
-import collection.mutable.HashMap
-
-
-
-
-
-
-class Lookup(val size: Int, val parallelism: Int, val runWhat: String) extends Bench with IntInit {
- def runpar = throw new UnsupportedOperationException
- def runseq = throw new UnsupportedOperationException
- def runhashmap = {
- var i = 0
- while (i < size) {
- hashmap(i)
- i += 1
- }
- }
- def runhashtrie = {
- var i = 0
- while (i < size) {
- hashtrie(i)
- i += 1
- }
- }
- def companion = Iterate
- def comparisonMap = Map("hashmap" -> runhashmap _, "hashtrie" -> runhashtrie _)
-}
-
-
-object Lookup extends BenchCompanion {
- def collectionName = "HashTrie"
- def benchName = "lookup";
- def apply(sz: Int, p: Int, what: String) = new Lookup(sz, p, what)
- override def defaultSize = 25000
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/MultipleCombine.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/MultipleCombine.scala
deleted file mode 100644
index 033c211849..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/MultipleCombine.scala
+++ /dev/null
@@ -1,87 +0,0 @@
-package scala.collection.parallel.benchmarks
-package hashtries
-
-
-
-
-import collection.immutable.{HashMap => HashTrie}
-import collection.mutable.HashMap
-
-
-
-
-
-
-class MultipleCombine(val size: Int, val parallelism: Int, val runWhat: String) extends Bench with IntInit {
- var combines = 10
-
- var thattries = new Array[HashTrie[Int, Int]](combines)
- def initTries = for (r <- 0 until combines) {
- var thattrie = new HashTrie[Int, Int]
- for (i <- ((r + 1) * size) until ((r + 2) * size)) thattrie += ((i, i))
- thattries(r) = thattrie
- }
- initTries
-
- val thatmaps = new Array[HashMap[Int, Int]](10)
- def initMaps = for (r <- 0 until combines) {
- var thatmap = new HashMap[Int, Int]
- for (i <- ((r + 1) * size) until ((r + 2) * size)) thatmap += ((i, i))
- thatmaps(r) = thatmap
- }
- initMaps
-
- override def repetitionsPerRun = 25
- def runpar = throw new UnsupportedOperationException
- def runseq = runhashtrie
- def runhashtrie = {
- initHashTrie
- var trie = hashtrie
- for (r <- 0 until combines) trie = trie merge thattries(r)
- }
- def runappendtrie = {
- initHashTrie
- var trie = hashtrie
- for (r <- 0 until combines) trie = trie ++ thattries(r)
- }
- def runhashmap = {
- initHashMap
- var map = hashmap
- for (r <- 0 until combines) map = map ++ thatmaps(r)
- }
- def rundestructive = {
- initHashTrie
- var trie = hashtrie
- for (r <- 0 until combines) trie = trie merge thattries(r)
- }
- def companion = MultipleCombine
- def comparisonMap = Map("hashtrie" -> runhashtrie _, "hashmap" -> runhashmap _, "appendtrie" -> runappendtrie _, "destruct" -> rundestructive _)
- override def reset = runWhat match {
- case "appendtrie" => initHashTrie
- case "destruct" => initHashTrie
- case _ => super.reset
- }
-}
-
-
-object MultipleCombine extends BenchCompanion {
- def collectionName = "HashTrie"
- def benchName = "multi-combine";
- def apply(sz: Int, p: Int, what: String) = new MultipleCombine(sz, p, what)
- override def defaultSize = 5000
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/ParallelHashTries.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/ParallelHashTries.scala
deleted file mode 100644
index c617f69161..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/hashtries/ParallelHashTries.scala
+++ /dev/null
@@ -1,121 +0,0 @@
-package scala.collection.parallel.benchmarks.hashtries
-
-
-
-
-import scala.collection.parallel.benchmarks.generic.StandardParallelIterableBench
-import scala.collection.parallel.benchmarks.generic.NotBenchmark
-import scala.collection.parallel.benchmarks.generic.Dummy
-import scala.collection.parallel.benchmarks.generic.Operators
-import scala.collection.parallel.immutable.ParallelHashTrie
-
-
-
-
-
-trait ParallelHashTrieBenches[K, V] extends StandardParallelIterableBench[(K, V), ParallelHashTrie[K, V]] {
-
- def nameOfCollection = "ParallelHashTrie"
- def comparisonMap = collection.Map()
- val forkJoinPool = new scala.concurrent.forkjoin.ForkJoinPool
-
- object Map2 extends IterableBenchCompanion {
- override def defaultSize = 5000
- def benchName = "map2";
- def apply(sz: Int, p: Int, w: String) = new Map2(sz, p, w)
- }
-
- class Map2(val size: Int, val parallelism: Int, val runWhat: String)
- extends IterableBench with StandardParallelIterableBench[(K, V), ParallelHashTrie[K, V]] {
- var result: Int = 0
- def comparisonMap = collection.Map()
- def runseq = {
- val r = this.seqcoll.asInstanceOf[collection.immutable.HashMap[K, V]].map(operators.mapper2)
- result = r.size
- }
- def runpar = {
- result = this.parcoll.map(operators.mapper2).size
- //println(collection.parallel.immutable.ParallelHashTrie.totalcombines)
- //System.exit(1)
- }
- def companion = Map2
- override def repetitionsPerRun = 50
- override def printResults {
- println("Total combines: " + collection.parallel.immutable.ParallelHashTrie.totalcombines)
- println("Size of last result: " + result)
- }
- }
-
-}
-
-
-
-
-
-object RefParallelHashTrieBenches extends ParallelHashTrieBenches[Dummy, Dummy] with NotBenchmark {
-
- type DPair = (Dummy, Dummy)
-
- object operators extends Operators[DPair] {
- def gcd(a: Int, b: Int): Int = {
- val result = if (b == 0) a else {
- gcd(b, a - b * (a / b))
- }
- result + 1000
- }
- def heavy(a: Int): Int = {
- var i = 0
- var sum = a
- while (i < 3000) {
- i += 1
- sum += a + i
- }
- sum
- }
- val reducer = (x: DPair, y: DPair) => {
- //y._2.num = x._2.in + y._2.in
- y
- }
- val mediumreducer = (x: DPair, y: DPair) => {
- y._2.num = gcd(x._2.in, y._2.in)
- y
- }
- val filterer = (p: DPair) => {
- p._1.num % 2 == 0
- }
- val mapper = (p: DPair) => {
- val a = p._1
- a.num = a.in % 2
- (a, p._2)
- }
- override val mapper2 = (p: DPair) => {
- val a = 1 //heavy(p._1.in)
- (new Dummy(p._1.in * -2 + a), p._2)
- }
- val heavymapper = (p: DPair) => {
- val a = p._1
- var i = -100
- while (i < 0) {
- if (a.in < i) a.num += 1
- i += 1
- }
- (a, p._2)
- }
- val taker = (p: DPair) => true
- }
-
- def createSequential(sz: Int, p: Int) = {
- var ht = new collection.immutable.HashMap[Dummy, Dummy]
- for (i <- 0 until sz) ht += ((new Dummy(i), new Dummy(i)))
- ht
- }
-
- def createParallel(sz: Int, p: Int) = {
- var pht = new ParallelHashTrie[Dummy, Dummy]
- for (i <- 0 until sz) pht += ((new Dummy(i), new Dummy(i)))
- forkJoinPool.setParallelism(p)
- pht.environment = forkJoinPool
- pht
- }
-
-}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/AggregateLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/AggregateLight.scala
deleted file mode 100644
index 540c7550c7..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/AggregateLight.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-import scala.collection.parallel.benchmarks._
-import scala.collection.parallel.mutable.ParallelArray
-import extra166y.{ParallelArray => JSR166Array}
-
-
-object AggregateLight extends Companion {
- def benchName = "aggregate-light";
- def apply(sz: Int, parallelism: Int, what: String) = new AggregateLight(sz, parallelism, what)
- override def comparisons = List()
- override def defaultSize = 200000
-
- val seqop = (a: Cont, b: Cont) => b
- val combop = (a: Cont, b: Cont) => a
-}
-
-
-class AggregateLight(sz: Int, p: Int, what: String)
-extends Resettable[Cont](sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
- def companion = AggregateLight
- override def repetitionsPerRun = 350
- override val runs = 20
-
- def runpar = pa.aggregate(new Cont(0))(companion.seqop, companion.combop)
- def runseq = sequentialReduce(companion.seqop, sz, new Cont(0))
- override def comparisonMap = collection.Map()
-}
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Companion.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Companion.scala
deleted file mode 100644
index 744351a39b..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Companion.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-import scala.collection.parallel.benchmarks._
-
-
-trait Companion extends BenchCompanion {
- def collectionName = "ParallelArray"
-}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CopyToArray.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CopyToArray.scala
deleted file mode 100644
index 0f743eeb96..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CopyToArray.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-
-
-object CopyToArray extends Companion {
- def benchName = "copytoarray";
- def apply(sz: Int, parallelism: Int, what: String) = new CopyToArray(sz, parallelism, what)
- override def comparisons = List()
- override def defaultSize = 200000
-}
-
-class CopyToArray(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
- def companion = CopyToArray
- val destarr = new Array[Any](sz)
-
- def runpar = pa.copyToArray(destarr, 0, sz)
- def runseq = sequentialCopyToArray(destarr, 0, sz)
- def comparisonMap = collection.Map()
-}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Corresponds.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Corresponds.scala
deleted file mode 100644
index 2e461460a8..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Corresponds.scala
+++ /dev/null
@@ -1,47 +0,0 @@
-package scala.collection.parallel.benchmarks
-package parallel_array
-
-
-
-
-
-
-
-class Corresponds(sz: Int, p: Int, what: String)
-extends Resettable[Cont](sz, p, what, (i: Int) => new Cont(i), new Array[Any](_), classOf[Cont])
-with HavingResult[Boolean] {
- def companion = Corresponds
- override def repetitionsPerRun = 400
-
- val same = {
- val p = new collection.parallel.mutable.ParallelArray[Cont](sz)
- for (i <- 0 until sz) p(i) = what match {
- case "seq" => arr(i).asInstanceOf[Cont]
- case "par" => pa(i)
- }
- p
- }
-
- def runpar = runresult = pa.corresponds(same)(corr)
- def runseq = runresult = sequentialCorresponds(same, corr, sz)
- override def comparisonMap = collection.Map()
-
- val corr = (a: Cont, b: Cont) => a.in == b.in
-}
-
-object Corresponds extends Companion {
- def benchName = "corresponds";
- def apply(sz: Int, p: Int, what: String) = new Corresponds(sz, p, what)
- override def comparisons = List()
-}
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CountHeavy.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CountHeavy.scala
deleted file mode 100644
index 722d721288..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CountHeavy.scala
+++ /dev/null
@@ -1,36 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-
-
-object CountHeavy extends Companion {
- def benchName = "count-heavy";
- def apply(sz: Int, parallelism: Int, what: String) = new CountHeavy(sz, parallelism, what)
- override def comparisons = List("jsr")
- override def defaultSize = 16
-
- val pred = (a: Cont) => heavyCheck(a)
- val predjsr = new extra166y.Ops.Predicate[Cont] {
- def op(a: Cont) = heavyCheck(a)
- }
-
- def heavyCheck(a: Cont) = {
- val n = a.in
- (n until (n + 200)).map(checkPrime(_)).reduceLeft(_ && _)
- }
- def checkPrime(n: Int) = {
- var isPrime = true
- for (i <- 2 until (scala.math.sqrt(n).toInt + 1)) if (n % i == 0) isPrime = false
- isPrime
- }
-}
-
-class CountHeavy(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
- def companion = CountHeavy
-
- def runpar = pa.count(CountHeavy.pred)
- def runseq = sequentialCount(CountHeavy.pred, sz)
- def runjsr = jsrarr.withFilter(CountHeavy.predjsr).size
- def comparisonMap = collection.Map("jsr" -> runjsr _)
-}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CountLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CountLight.scala
deleted file mode 100644
index 87eb07452f..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CountLight.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-
-
-object CountLight extends Companion {
- def benchName = "count-light";
- def apply(sz: Int, parallelism: Int, what: String) = new CountLight(sz, parallelism, what)
- override def comparisons = List("jsr")
- override def defaultSize = 200000
-}
-
-class CountLight(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
- def companion = CountLight
-
- def runpar = pa.count(Cont.pred)
- def runseq = sequentialCount(Cont.pred, sz)
- def runjsr = jsrarr.withFilter(Cont.predjsr).size
- def comparisonMap = collection.Map("jsr" -> runjsr _)
-}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CountList.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CountList.scala
deleted file mode 100644
index 0d9550d2bd..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/CountList.scala
+++ /dev/null
@@ -1,30 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-
-
-object CountList extends Companion {
- def benchName = "count-list";
- def apply(sz: Int, parallelism: Int, what: String) = new CountList(sz, parallelism, what)
- override def comparisons = List("jsr")
- override def defaultSize = 1000
-
- val listCreator = (i: Int) => (0 until (i % 50 + 50)).toList
- val pred = (lst: List[Int]) => check(lst)
- val predjsr = new extra166y.Ops.Predicate[List[Int]] {
- def op(lst: List[Int]) = check(lst)
- }
-
- def check(lst: List[Int]) = lst.foldLeft(0)((sum, n) => sum + n * n) % 2 == 0
-}
-
-class CountList(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, CountList.listCreator, new Array[Any](_), classOf[List[Int]]) {
- def companion = CountList
- override def repetitionsPerRun = 250
-
- def runpar = pa.count(CountList.pred)
- def runseq = sequentialCount(CountList.pred, sz)
- def runjsr = jsrarr.withFilter(CountList.predjsr).size
- def comparisonMap = collection.Map("jsr" -> runjsr _)
-}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/DiffHalf.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/DiffHalf.scala
deleted file mode 100644
index 3d4221d945..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/DiffHalf.scala
+++ /dev/null
@@ -1,48 +0,0 @@
-package scala.collection.parallel.benchmarks
-package parallel_array
-
-
-
-
-
-
-
-class DiffHalf(sz: Int, p: Int, what: String)
-extends Resettable[Cont](sz, p, what, (i: Int) => new Cont(i), new Array[Any](_), classOf[Cont])
-with HavingResult[Int] {
- def companion = DiffHalf
- override def repetitionsPerRun = 400
-
- val similar = {
- val p = new collection.parallel.mutable.ParallelArray[Cont](sz)
- for (i <- 0 until sz) p(i) = what match {
- case "seq" => arr(i).asInstanceOf[Cont]
- case "par" => pa(i)
- }
- p.drop(p.size / 2)
- }
-
- def runpar = runresult = pa.diff(similar).size
- def runseq = runresult = sequentialDiff(similar, sz).size
- override def comparisonMap = collection.Map()
-
- val corr = (a: Cont, b: Cont) => a.in == b.in
-}
-
-object DiffHalf extends Companion {
- def benchName = "diff-half";
- def apply(sz: Int, p: Int, what: String) = new DiffHalf(sz, p, what)
- override def comparisons = List()
- override def defaultSize = 10000
-}
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/DropMany.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/DropMany.scala
deleted file mode 100644
index d80ba91a29..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/DropMany.scala
+++ /dev/null
@@ -1,47 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-import scala.collection.parallel.benchmarks._
-
-
-object DropMany extends Companion {
- def benchName = "drop-many";
- def apply(sz: Int, parallelism: Int, what: String) = new DropMany(sz, parallelism, what)
- override def comparisons = Nil
- override def defaultSize = 50000
-}
-
-class DropMany(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont])
-with HavingResult[Int] {
- def companion = DropMany
- override def repetitionsPerRun = 400
- runresult = -1
-
- def runpar = runresult = pa.drop(pa.size / 2).size
- def runseq = runresult = sequentialDrop(sz / 2, sz).size
- def comparisonMap = collection.Map()
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ExistsLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ExistsLight.scala
deleted file mode 100644
index 401ab38e0b..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ExistsLight.scala
+++ /dev/null
@@ -1,49 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-import scala.collection.parallel.benchmarks._
-
-
-object ExistsLight extends Companion {
- def benchName = "exists-light";
- def apply(sz: Int, parallelism: Int, what: String) = new ExistsLight(sz, parallelism, what)
- override def comparisons = List("jsr")
- override def defaultSize = 200000
-
- val pred = (a: Cont) => a.in < 0
- val predjsr = new extra166y.Ops.Predicate[Cont] {
- def op(a: Cont) = a.in < 0
- }
-}
-
-class ExistsLight(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont])
-with HavingResult[Boolean] {
- def companion = ExistsLight
- runresult = false
-
- def runpar = runresult = pa.exists(ExistsLight.pred)
- def runseq = runresult = sequentialExists(ExistsLight.pred, sz)
- def runjsr = runresult = jsrarr.withFilter(ExistsLight.predjsr).size > 0
- def comparisonMap = collection.Map("jsr" -> runjsr _)
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/FilterLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/FilterLight.scala
deleted file mode 100644
index ee6545efbf..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/FilterLight.scala
+++ /dev/null
@@ -1,64 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-import scala.collection.parallel.benchmarks._
-
-
-object FilterLight extends Companion {
- def benchName = "filter-light";
- def apply(sz: Int, parallelism: Int, what: String) = new FilterLight(sz, parallelism, what)
- override def comparisons = List("jsr")
- override def defaultSize = 10000
-
- val pred = (a: Cont) => check(a.in)
- val predjsr = new extra166y.Ops.Predicate[Cont] {
- def op(a: Cont) = check(a.in)
- }
-
- def check(n: Int) = {
- var res = n
-// var i = 1
-// while (i < 10) {
-// res += n % i
-// i += 1
-// }
- res % 2 == 0
- }
-}
-
-class FilterLight(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont])
-with HavingResult[Int] {
- def companion = FilterLight
- override def repetitionsPerRun = 250
- override val runs = 30
- runresult = -1
-
- def runpar = runresult = pa.filter(FilterLight.pred).size
- def runseq = runresult = sequentialFilter(FilterLight.pred, sz).size
- def runjsr = runresult = { jsrarr.withFilter(FilterLight.predjsr).all.size }
- def comparisonMap = collection.Map("jsr" -> runjsr _)
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/FindLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/FindLight.scala
deleted file mode 100644
index 11cb6c69fd..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/FindLight.scala
+++ /dev/null
@@ -1,52 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-import scala.collection.parallel.benchmarks._
-
-
-object FindLight extends Companion {
- def benchName = "find-light";
- def apply(sz: Int, parallelism: Int, what: String) = new FindLight(sz, parallelism, what)
- override def comparisons = List("jsr")
- override def defaultSize = 200000
-
- val pred = (a: Cont) => a.in < -10
- val predjsr = new extra166y.Ops.Predicate[Cont] {
- def op(a: Cont) = a.in < -10
- }
-}
-
-class FindLight(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont])
-with HavingResult[Option[Cont]] {
- def companion = FindLight
- runresult = None
-
- def runpar = runresult = pa.find(FindLight.pred)
- def runseq = runresult = sequentialFind(FindLight.pred, sz)
- def runjsr = runresult = { jsrarr.withFilter(FindLight.predjsr).size > 0; None }
- def comparisonMap = collection.Map("jsr" -> runjsr _)
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/FlatMapLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/FlatMapLight.scala
deleted file mode 100644
index b1f8942d94..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/FlatMapLight.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-
-
-
-
-object FlatMapLight extends Companion {
- def benchName = "flatmap-light";
- def apply(sz: Int, parallelism: Int, what: String) = new FlatMapLight(sz, parallelism, what)
- override def comparisons = List("jsr")
- override def defaultSize = 10000
-
- def fun = (a: Cont) => { List(1, 2, 3, 4, a.in) }
-}
-
-class FlatMapLight(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
- def companion = FlatMapLight
-
- def runpar = pa.flatMap(FlatMapLight.fun)
- def runseq = sequentialFlatMap(FlatMapLight.fun, sz)
- def comparisonMap = collection.Map()
-}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallHeavy.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallHeavy.scala
deleted file mode 100644
index c354f65ec9..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallHeavy.scala
+++ /dev/null
@@ -1,59 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-
-
-object ForallHeavy extends Companion {
- def benchName = "forall-heavy";
- def apply(sz: Int, parallelism: Int, what: String) = new ForallHeavy(sz, parallelism, what)
- override def comparisons = List("jsr")
- override def defaultSize = 16
-
- val pred = (a: Cont) => heavyCheck(a)
- val predjsr = new extra166y.Ops.Predicate[Cont] {
- def op(a: Cont) = heavyCheck(a)
- }
-
- def heavyCheck(a: Cont) = {
- val init = a.in + 1
- var cnt = init
- var i = 0
- while (i < 10000) {
- cnt = -2 * cnt
- cnt /= 2
- i += 1
- }
- cnt += init * 5 + 10
- cnt >= 0
- }
-}
-
-class ForallHeavy(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
- def companion = ForallHeavy
-
- def runpar = pa.forall(ForallHeavy.pred)
- def runseq = sequentialForall(ForallHeavy.pred, sz)
- def runjsr = jsrarr.withFilter(ForallHeavy.predjsr).size == sz
- def comparisonMap = collection.Map("jsr" -> runjsr _)
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallLight.scala
deleted file mode 100644
index 079f2ccc32..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallLight.scala
+++ /dev/null
@@ -1,46 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-
-
-object ForallLight extends Companion {
- def benchName = "forall-light";
- def apply(sz: Int, parallelism: Int, what: String) = new ForallLight(sz, parallelism, what)
- override def comparisons = List("jsr")
- override def defaultSize = 200000
-
- val pred = (a: Cont) => a.in >= 0
- val predjsr = new extra166y.Ops.Predicate[Cont] {
- def op(a: Cont) = a.in >= 0
- }
-}
-
-class ForallLight(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
- def companion = ForallLight
-
- def runpar = pa.forall(ForallLight.pred)
- def runseq = sequentialForall(ForallLight.pred, sz)
- def runjsr = jsrarr.withFilter(ForallLight.predjsr).size == sz
- def comparisonMap = collection.Map("jsr" -> runjsr _)
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallQuickStop.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallQuickStop.scala
deleted file mode 100644
index 310105dd41..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallQuickStop.scala
+++ /dev/null
@@ -1,46 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-import scala.collection.parallel.benchmarks._
-
-
-object ForallQuickStop extends Companion {
- def benchName = "forall-quickstop";
- def apply(sz: Int, parallelism: Int, what: String) = new ForallQuickStop(sz, parallelism, what)
- override def defaultSize = 200000
-
- val pred = (a: Cont) => a.in != 50
- val predjsr = new extra166y.Ops.Predicate[Cont] {
- def op(a: Cont) = a.in != 50
- }
-}
-
-class ForallQuickStop(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont])
-with HavingResult[Boolean] {
- def companion = ForallQuickStop
-
- def runpar = runresult = pa.forall(ForallQuickStop.pred)
- def runseq = runresult = sequentialForall(ForallQuickStop.pred, sz)
- def comparisonMap = collection.Map()
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallStop80k.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallStop80k.scala
deleted file mode 100644
index cbfa6ebb42..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForallStop80k.scala
+++ /dev/null
@@ -1,46 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-import scala.collection.parallel.benchmarks._
-
-
-object ForallStop80k extends Companion {
- def benchName = "forall-stop80k";
- def apply(sz: Int, parallelism: Int, what: String) = new ForallStop80k(sz, parallelism, what)
- override def defaultSize = 100000
-
- val pred = (a: Cont) => a.in != 80000
- val predjsr = new extra166y.Ops.Predicate[Cont] {
- def op(a: Cont) = a.in != 80000
- }
-}
-
-class ForallStop80k(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont])
-with HavingResult[Boolean] {
- def companion = ForallStop80k
-
- def runpar = runresult = pa.forall(ForallStop80k.pred)
- def runseq = runresult = sequentialForall(ForallStop80k.pred, sz)
- def comparisonMap = collection.Map()
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForeachHeavy.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForeachHeavy.scala
deleted file mode 100644
index 17ad2f9882..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForeachHeavy.scala
+++ /dev/null
@@ -1,42 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-
-
-object ForeachHeavy extends Companion {
- def benchName = "foreach-heavy";
- def apply(sz: Int, parallelism: Int, what: String) = new ForeachHeavy(sz, parallelism, what)
- override def comparisons = List("jsr")
- override def defaultSize = 16
-
- val fun = (a: Cont) => heavyOperation(a)
- val funjsr = new extra166y.Ops.Procedure[Cont] {
- def op(a: Cont) = heavyOperation(a)
- }
-
- def heavyOperation(a: Cont) {
- checkPrime(a.in + 1000000000)
- }
-
- def checkPrime(n: Int) = {
- var isPrime = true
- var i = 2
- val until = scala.math.sqrt(n).toInt + 1
- while (i < until) {
- if (n % i == 0) isPrime = false
- i += 1
- }
- isPrime
- }
-}
-
-class ForeachHeavy(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
- def companion = ForeachHeavy
- override def repetitionsPerRun = 250
-
- def runpar = pa.foreach(ForeachHeavy.fun)
- def runseq = sequentialForeach(ForeachHeavy.fun, sz)
- def runjsr = jsrarr.apply(ForeachHeavy.funjsr)
- def comparisonMap = collection.Map("jsr" -> runjsr _)
-}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForeachLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForeachLight.scala
deleted file mode 100644
index 79901148ac..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ForeachLight.scala
+++ /dev/null
@@ -1,26 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-
-
-object ForeachLight extends Companion {
- def benchName = "foreach-light";
- def apply(sz: Int, parallelism: Int, what: String) = new ForeachLight(sz, parallelism, what)
- override def comparisons = List("jsr")
- override def defaultSize = 200000
-
- val fun = (a: Cont) => a.num = a.in
- val funjsr = new extra166y.Ops.Procedure[Cont] {
- def op(a: Cont) = a.num = a.in
- }
-}
-
-class ForeachLight(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
- def companion = ForeachLight
-
- def runpar = pa.foreach(ForeachLight.fun)
- def runseq = sequentialForeach(ForeachLight.fun, sz)
- def runjsr = jsrarr.apply(ForeachLight.funjsr)
- def comparisonMap = collection.Map("jsr" -> runjsr _)
-}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/IndexWhere.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/IndexWhere.scala
deleted file mode 100644
index e8a74286ae..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/IndexWhere.scala
+++ /dev/null
@@ -1,47 +0,0 @@
-package scala.collection.parallel.benchmarks
-package parallel_array
-
-
-
-
-
-
-
-class IndexWhere(sz: Int, p: Int, what: String)
-extends Resettable[Cont](sz, p, what, (i: Int) => new Cont(i), new Array[Any](_), classOf[Cont])
-with HavingResult[Int] {
- def companion = IndexWhere
- override def repetitionsPerRun = 400
-
- def runpar = runresult = pa.indexWhere(IndexWhere.pred2, 0)
- def runseq = runresult = sequentialIndexWhere(IndexWhere.pred2, 0, sz)
- override def comparisonMap = collection.Map()
-}
-
-object IndexWhere extends Companion {
- def benchName = "index-where";
- def apply(sz: Int, p: Int, what: String) = new IndexWhere(sz, p, what)
- override def comparisons = List()
-
- val pred = (c: Cont) => {
- var in = c.in
- var i = 2
- while (i < 5) {
- if (in % i == 0) in = 0
- i += 1
- }
- c.in >= 0 && in == -1
- }
- val pred2 = (c: Cont) => c.in == 280000
-}
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/IntersectHalf.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/IntersectHalf.scala
deleted file mode 100644
index 4d71bf8590..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/IntersectHalf.scala
+++ /dev/null
@@ -1,48 +0,0 @@
-package scala.collection.parallel.benchmarks
-package parallel_array
-
-
-
-
-
-
-
-class IntersectHalf(sz: Int, p: Int, what: String)
-extends Resettable[Cont](sz, p, what, (i: Int) => new Cont(i), new Array[Any](_), classOf[Cont])
-with HavingResult[Int] {
- def companion = IntersectHalf
- override def repetitionsPerRun = 400
-
- val similar = {
- val p = new collection.parallel.mutable.ParallelArray[Cont](sz)
- for (i <- 0 until sz) p(i) = what match {
- case "seq" => arr(i).asInstanceOf[Cont]
- case "par" => pa(i)
- }
- p.drop(p.size / 2)
- }
-
- def runpar = runresult = pa.intersect(similar).size
- def runseq = runresult = sequentialIntersect(similar, sz).size
- override def comparisonMap = collection.Map()
-
- val corr = (a: Cont, b: Cont) => a.in == b.in
-}
-
-object IntersectHalf extends Companion {
- def benchName = "intersect-half";
- def apply(sz: Int, p: Int, what: String) = new IntersectHalf(sz, p, what)
- override def comparisons = List()
- override def defaultSize = 10000
-}
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/LastIndexWhere.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/LastIndexWhere.scala
deleted file mode 100644
index dbba807390..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/LastIndexWhere.scala
+++ /dev/null
@@ -1,47 +0,0 @@
-package scala.collection.parallel.benchmarks
-package parallel_array
-
-
-
-
-
-
-
-class LastIndexWhere(sz: Int, p: Int, what: String)
-extends Resettable[Cont](sz, p, what, (i: Int) => new Cont(i), new Array[Any](_), classOf[Cont])
-with HavingResult[Int] {
- def companion = LastIndexWhere
- override def repetitionsPerRun = 400
-
- def runpar = runresult = pa.lastIndexWhere(LastIndexWhere.pred2, pa.size - 1)
- def runseq = runresult = sequentialLastIndexWhere(LastIndexWhere.pred2, sz - 1, sz)
- override def comparisonMap = collection.Map()
-}
-
-object LastIndexWhere extends Companion {
- def benchName = "last-index-where";
- def apply(sz: Int, p: Int, what: String) = new LastIndexWhere(sz, p, what)
- override def comparisons = List()
-
- val pred = (c: Cont) => {
- var in = c.in
- var i = 2
- while (i < 5) {
- if (in % i == 0) in = 0
- i += 1
- }
- c.in >= 0 || in == 0
- }
- val pred2 = (c: Cont) => c.in == 500
-}
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MapLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MapLight.scala
deleted file mode 100644
index f6a5985cb7..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MapLight.scala
+++ /dev/null
@@ -1,27 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-
-
-object MapLight extends Companion {
- def benchName = "map-light";
- def apply(sz: Int, parallelism: Int, what: String) = new MapLight(sz, parallelism, what)
- override def comparisons = List("jsr")
- override def defaultSize = 100000
-
- def fun = (a: Cont) => { a }
- def funjsr = new extra166y.Ops.Op[Cont, Cont] {
- def op(a: Cont) = { a }
- }
-}
-
-class MapLight(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
- def companion = MapLight
-
- def runpar = pa.map(MapLight.fun)
- def runseq = sequentialMap(MapLight.fun, sz)
-// def runseq = sequentialMapOpt(MapLight.fun, sz)
- def runjsr = jsrarr.replaceWithMapping(MapLight.funjsr).all
- def comparisonMap = collection.Map("jsr" -> runjsr _)
-}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MatrixMultiplication.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MatrixMultiplication.scala
deleted file mode 100644
index a8bb5ea1ca..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MatrixMultiplication.scala
+++ /dev/null
@@ -1,84 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-
-import collection.parallel.immutable.ParallelRange
-
-
-object MatrixMultiplication extends Companion {
- def benchName = "matrix-mult";
- def apply(sz: Int, parallelism: Int, what: String) = new MatrixMultiplication(sz, parallelism, what)
- override def comparisons = List()
- override def defaultSize = 100
-}
-
-class MatrixMultiplication(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
- def companion = MatrixMultiplication
-
- val a = Matrix.unit[Int](sz)
- val b = Matrix.unit[Int](sz)
- var c = new Matrix[Int](sz)
-
- def runpar = c = a * b //{ c.assignProduct(a, b) } //; println("--------"); c.output }
- def runseq = throw new UnsupportedOperationException
- def comparisonMap = collection.Map()
-
- class Matrix[T](n: Int)(implicit num: Numeric[T], man: Manifest[T]) {
- val array = new Array[T](n * n)
-
- def apply(y: Int, x: Int) = array(y * n + x)
-
- def update(y: Int, x: Int, elem: T) = array(y * n + x) = elem
-
- def *(b: Matrix[T]) = {
- val m = new Matrix[T](n)
- m.assignProduct(this, b)
- m
- }
-
- def assignProduct(a: Matrix[T], b: Matrix[T]) = {
- val range = new ParallelRange(0, n * n, 1, false)
- range.environment = forkjoinpool
- for (i <- range) this(i / n, i % n) = calcProduct(a, b, i / n, i % n);
- }
-
- private def calcProduct(a: Matrix[T], b: Matrix[T], y: Int, x: Int): T = {
- import num._
- var sum = zero
- for (i <- 0 until n) sum += a(y, i) * b(i, x)
- sum
- }
-
- def output = for (y <- 0 until n) {
- for (x <- 0 until n) print(this(y, x))
- println
- }
- }
-
- object Matrix {
- def unit[T](n: Int)(implicit num: Numeric[T], man: Manifest[T]) = {
- val m = new Matrix[T](n)
- for (i <- 0 until n) m(i, i) = num.one
- m
- }
- }
-
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MinLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MinLight.scala
deleted file mode 100644
index 66cd29807a..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MinLight.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-
-
-
-/** Tests reduce method using an operator creating an object as a result. */
-class MinLight(sz: Int, p: Int, what: String)
-extends Resettable[Int](sz, p, what, (i: Int) => i, new Array[Any](_), classOf[Int]) {
- def companion = MinLight
- override def repetitionsPerRun = 400
-
- def runpar = pa.min(Ordering[Int])
- def runseq = sequentialMin(sz)
- override def comparisonMap = collection.Map()
-}
-
-object MinLight extends Companion {
- def benchName = "min-light";
- def apply(sz: Int, p: Int, what: String) = new MinLight(sz, p, what)
- override def comparisons = List()
-}
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PadToDouble.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PadToDouble.scala
deleted file mode 100644
index 3bbe99516b..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PadToDouble.scala
+++ /dev/null
@@ -1,53 +0,0 @@
-package scala.collection.parallel.benchmarks
-package parallel_array
-
-
-
-
-
-
-
-class PadToDouble(sz: Int, p: Int, what: String)
-extends Resettable[Cont](sz, p, what, (i: Int) => new Cont(i), new Array[Any](_), classOf[Cont])
-with HavingResult[Int] {
- def companion = PadToDouble
- override def repetitionsPerRun = 400
-
- val similar = {
- val p = new collection.parallel.mutable.ParallelArray[Cont](sz)
- for (i <- 0 until sz) p(i) = what match {
- case "seq" => arr(i).asInstanceOf[Cont]
- case "par" => pa(i)
- }
- p.drop(p.size / 2)
- }
-
- def runpar = runresult = pa.padTo(size * 2, padder).size
- def runseq = runresult = sequentialPadTo(size * 2, padder, size).size
- override def comparisonMap = collection.Map()
-
- val padder = new Cont(0)
-}
-
-
-object PadToDouble extends Companion {
- def benchName = "padto-double";
- def apply(sz: Int, p: Int, what: String) = new PadToDouble(sz, p, what)
- override def comparisons = List()
- override def defaultSize = 25000
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PartialMapLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PartialMapLight.scala
deleted file mode 100644
index e06720ae37..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PartialMapLight.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-
-
-object PartialMapLight extends Companion {
- def benchName = "partmap-light";
- def apply(sz: Int, parallelism: Int, what: String) = new PartialMapLight(sz, parallelism, what)
- override def comparisons = List()
- override def defaultSize = 100000
-
- def fun: PartialFunction[Cont, Cont] = {
- case c: Cont if c.in >= 0 => c
- }
-}
-
-class PartialMapLight(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
- def companion = PartialMapLight
-
- def runpar = pa.collect(PartialMapLight.fun)
- def runseq = sequentialPartialMap(PartialMapLight.fun, sz)
- def comparisonMap = collection.Map()
-}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PartitionLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PartitionLight.scala
deleted file mode 100644
index c0fb0454ec..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PartitionLight.scala
+++ /dev/null
@@ -1,61 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-import scala.collection.parallel.benchmarks._
-
-
-object PartitionLight extends Companion {
- def benchName = "partition-light";
- def apply(sz: Int, parallelism: Int, what: String) = new PartitionLight(sz, parallelism, what)
- override def comparisons = Nil
- override def defaultSize = 20000
-
- val pred = (a: Cont) => check(a.in)
- val predjsr = new extra166y.Ops.Predicate[Cont] {
- def op(a: Cont) = check(a.in)
- }
-
- def check(n: Int) = {
- var res = n
- var i = 1
- while (i < 5) {
- res += n % i
- i += 1
- }
- (res % 2 == 0) && (res % 312 == 0)
- }
-}
-
-class PartitionLight(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont])
-with HavingResult[Int] {
- def companion = PartitionLight
- runresult = -1
-
- def runpar = runresult = pa.partition(PartitionLight.pred)._1.size
- def runseq = runresult = sequentialPartition(PartitionLight.pred, sz)._1.size
- def comparisonMap = collection.Map()
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PatchHalf.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PatchHalf.scala
deleted file mode 100644
index e1fc0f9f2a..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PatchHalf.scala
+++ /dev/null
@@ -1,46 +0,0 @@
-package scala.collection.parallel.benchmarks
-package parallel_array
-
-
-
-
-
-
-
-class PatchHalf(sz: Int, p: Int, what: String)
-extends Resettable[Cont](sz, p, what, (i: Int) => new Cont(i), new Array[Any](_), classOf[Cont])
-with HavingResult[Int] {
- def companion = PatchHalf
- override def repetitionsPerRun = 400
-
- val similar = {
- val p = new collection.parallel.mutable.ParallelArray[Cont](sz)
- for (i <- 0 until sz) p(i) = what match {
- case "seq" => arr(i).asInstanceOf[Cont]
- case "par" => pa(i)
- }
- p.drop(p.size / 2)
- }
-
- def runpar = runresult = pa.patch(size / 2, similar, 0).size
- def runseq = runresult = sequentialPatch(size / 2, similar, 0, size).size
- override def comparisonMap = collection.Map()
-}
-
-object PatchHalf extends Companion {
- def benchName = "patch-half";
- def apply(sz: Int, p: Int, what: String) = new PatchHalf(sz, p, what)
- override def comparisons = List()
- override def defaultSize = 25000
-}
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PlusPlus.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PlusPlus.scala
deleted file mode 100644
index ca500281e3..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/PlusPlus.scala
+++ /dev/null
@@ -1,29 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-import scala.collection.parallel.mutable.ParallelArray
-
-
-object PlusPlus extends Companion {
- def benchName = "plusplus";
- def apply(sz: Int, parallelism: Int, what: String) = new PlusPlus(sz, parallelism, what)
- override def comparisons = List()
- override def defaultSize = 50000
-}
-
-class PlusPlus(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
- def companion = PlusPlus
-
- val thatarr = new Array[Cont](sz)
- val thatpa = new ParallelArray[Cont](sz)
-
- def runpar = pa ++ thatpa
- def runseq = arr ++ thatarr
- def comparisonMap = collection.Map()
-}
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceHeavy.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceHeavy.scala
deleted file mode 100644
index 5806dd7831..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceHeavy.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-
-
-class ReduceHeavy(sz: Int, p: Int, what: String)
-extends Resettable[Cont](sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
- def companion = ReduceHeavy
- override def repetitionsPerRun = 100
-
- def runseq = sequentialReduce(Cont.opheavy, sz, new Cont(0))
- def runpar = pa.reduce(Cont.opheavy)
- def runjsr = jsrarr.reduce(Cont.reducerheavy, new Cont(0))
- override def comparisonMap = collection.Map("jsr" -> runjsr _)
-}
-
-object ReduceHeavy extends Companion {
- def benchName = "reduce-heavy";
- def apply(sz: Int, p: Int, what: String) = new ReduceHeavy(sz, p, what)
- override def comparisons = List("jsr")
- override def defaultSize = 16
-}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceLight.scala
deleted file mode 100644
index 890cbf5108..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceLight.scala
+++ /dev/null
@@ -1,50 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-import scala.collection.parallel.benchmarks._
-import scala.collection.parallel.mutable.ParallelArray
-import extra166y.{ParallelArray => JSR166Array}
-
-
-object ReduceLight extends Companion {
- def benchName = "reduce-light";
- def apply(sz: Int, parallelism: Int, what: String) = new ReduceLight(sz, parallelism, what)
- override def comparisons = List("jsr")
- override def defaultSize = 200000
-}
-
-
-class ReduceLight(sz: Int, p: Int, what: String)
-extends Resettable[Cont](sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
- def companion = ReduceLight
- override def repetitionsPerRun = 350
- override val runs = 20
-
- def runpar = {
- pa.reduce(Cont.op)
-// updatePar
- }
-
- def runjsr = {
- jsrarr.reduce(Cont.reducer, new Cont(0))
-// updateJsr
- }
-
- def runseq = {
- sequentialReduce(Cont.op, sz, new Cont(0))
-// updateSeq
- }
-
- override def comparisonMap = collection.Map("jsr" -> runjsr _)
-
-}
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceList.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceList.scala
deleted file mode 100644
index db4fb3331f..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceList.scala
+++ /dev/null
@@ -1,53 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-
-
-object ReduceList extends Companion {
- def benchName = "reduce-list";
- def apply(sz: Int, p: Int, what: String) = new ReduceList(sz, p, what)
- override def comparisons = List("jsr")
- override def defaultSize = 20000
-}
-
-object ListCreator extends (Int => List[Int]) {
- def apply(idx: Int) = {
- val len = 50 + idx % 100
- (for (i <- 0 until len) yield i).toList
- }
-}
-
-object ListOps {
- val redop = (a: List[Int], b: List[Int]) => combineLists(a, b)
- val reducer = new extra166y.Ops.Reducer[List[Int]] {
- def op(a: List[Int], b: List[Int]) = combineLists(a, b)
- }
- def combineLists(a: List[Int], b: List[Int]) = {
- if (a.foldLeft(0)(_ + _) > b.foldLeft(0)(_ + _)) a else b
- }
-}
-
-class ReduceList(sz: Int, p: Int, what: String)
-extends Resettable[List[Int]](sz, p, what, ListCreator, new Array[Any](_), classOf[List[Int]]) {
- def companion = ReduceList
- override def repetitionsPerRun = 10
- override val runs = 15
-
- def runpar = pa.reduce(ListOps.redop)
- def runseq = sequentialReduce(ListOps.redop, sz, List[Int]())
- def runjsr = jsrarr.reduce(ListOps.reducer, List[Int]())
- override def comparisonMap = collection.Map("jsr" -> runjsr _)
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceNew.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceNew.scala
deleted file mode 100644
index c69f64d329..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReduceNew.scala
+++ /dev/null
@@ -1,30 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-
-
-
-/** Tests reduce method using an operator creating an object as a result. */
-class ReduceNew(sz: Int, p: Int, what: String)
-extends Resettable[Cont](sz, p, what, (i: Int) => new Cont(i),
- new Array[Any](_), classOf[Cont]) {
- def companion = ReduceNew
- override def repetitionsPerRun = 200
-
- def runpar = pa.reduce(Cont.opnew)
- def runseq = sequentialReduce(Cont.opnew, sz, new Cont(0))
- def runjsr = jsrarr.reduce(Cont.reducernew, new Cont(0))
- override def comparisonMap = collection.Map("jsr" -> runjsr _)
-}
-
-object ReduceNew extends Companion {
- def benchName = "reduce-new";
- def apply(sz: Int, p: Int, what: String) = new ReduceNew(sz, p, what)
- override def comparisons = List("jsr")
-}
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReducePrime.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReducePrime.scala
deleted file mode 100644
index b6ff69e37b..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReducePrime.scala
+++ /dev/null
@@ -1,65 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-import scala.collection.parallel.benchmarks._
-
-
-object IntWrapCreator extends (Int => IntWrap) {
- def apply(idx: Int) = new IntWrap(shiftaround(idx))
- def shiftaround(idx: Int) = idx * 40192 + 717
-}
-
-case class IntWrap(val num: Int)
-
-object IntOps {
- val op = (a: IntWrap, b: IntWrap) => primereduce(a, b)
- val reducer = new extra166y.Ops.Reducer[IntWrap] {
- def op(a: IntWrap, b: IntWrap) = primereduce(a, b)
- }
-
- def primereduce(a: IntWrap, b: IntWrap) = {
- val check = (checkPrime(a.num), checkPrime(b.num))
- if (a.num > b.num) a else b
- }
-
- def checkPrime(n: Int) = {
- var isPrime = true
- var i = 2
- val until = scala.math.sqrt(n).toInt + 1
- while (i < until) {
- if (n % i == 0) isPrime = false
- i += 1
- }
- isPrime
- }
-}
-
-class ReducePrime(sz: Int, p: Int, what: String)
-extends Resettable[IntWrap](sz, p, what, IntWrapCreator, new Array[Any](_), classOf[IntWrap])
-with HavingResult[IntWrap] {
- def companion = ReducePrime
-
- def runseq = runresult = sequentialReduce(IntOps.op, sz, new IntWrap(0))
- def runpar = runresult = pa.reduce(IntOps.op)
- def runjsr = runresult = jsrarr.reduce(IntOps.reducer, new IntWrap(0))
- override def comparisonMap = collection.Map("jsr" -> runjsr _)
-}
-
-object ReducePrime extends Companion {
- def benchName = "reduce-prime";
- def apply(sz: Int, p: Int, what: String) = new ReducePrime(sz, p, what)
- override def comparisons = List("jsr")
- override def defaultSize = 100
-}
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/RemoveDuplicates.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/RemoveDuplicates.scala
deleted file mode 100644
index a66d2fb1f8..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/RemoveDuplicates.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-package scala.collection.parallel.benchmarks
-package parallel_array
-
-
-
-
-
-
-
-class RemoveDuplicates(sz: Int, p: Int, what: String)
-extends Resettable[Cont](sz, p, what, (i: Int) => new Cont(i), new Array[Any](_), classOf[Cont])
-with HavingResult[Int] {
- def companion = RemoveDuplicates
- override def repetitionsPerRun = 400
-
- def runpar = runresult = pa.distinct.size
- def runseq = runresult = sequentialRemoveDuplicates(size).size
- override def comparisonMap = collection.Map()
-}
-
-object RemoveDuplicates extends Companion {
- def benchName = "remove-duplicates";
- def apply(sz: Int, p: Int, what: String) = new RemoveDuplicates(sz, p, what)
- override def comparisons = List()
- override def defaultSize = 10000
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Resettable.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Resettable.scala
deleted file mode 100644
index 83168ca979..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Resettable.scala
+++ /dev/null
@@ -1,127 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-import scala.collection.parallel.benchmarks._
-import scala.collection.parallel.mutable.ParallelArray
-import extra166y.{ParallelArray => JSR166Array}
-
-
-class Cont(val in: Int) {
- var num = in
- override def toString = in.toString
-}
-
-object Cont {
- val pred = (a: Cont) => a.in > 100
-
- val predjsr = new extra166y.Ops.Predicate[Cont] {
- def op(a: Cont) = a.in > 100
- }
-
- val op = (a: Cont, b: Cont) => {
- b.num = a.in + b.in
- b
- }
-
- val opnew = (a: Cont, b: Cont) => new Cont(a.in + b.in)
-
- val opheavy = (a: Cont, b: Cont) => {
- heavyComputation(a, b)
- }
-
- val reducer = new extra166y.Ops.Reducer[Cont] {
- def op(a: Cont, b: Cont) = {
- b.num = a.in + b.in
- b
- }
- }
-
- val reducernew = new extra166y.Ops.Reducer[Cont] {
- def op(a: Cont, b: Cont) = new Cont(a.in + b.in)
- }
-
- val reducerheavy = new extra166y.Ops.Reducer[Cont] {
- def op(a: Cont, b: Cont) = heavyComputation(a, b)
- }
-
- def heavyComputation(a: Cont, b: Cont) = {
- val f = a.in
- val s = b.in
- var i = 0
- var res = f * s
- while (i < 50000) {
- if ((i + f) % 3 == 0) res += s
- else res -= f
- i += 1
- }
- b.num = res
- b
- }
-}
-
-abstract class Resettable[T](val size: Int, val parallelism: Int, val runWhat: String,
- elemcreator: Int => T, arrcreator: Int => Array[Any], cls: Class[T])
-extends Bench with SequentialOps[T] {
- val forkjoinpool = new scala.concurrent.forkjoin.ForkJoinPool(parallelism)
- forkjoinpool.setMaximumPoolSize(parallelism)
- val papool = new jsr166y.ForkJoinPool(parallelism)
- papool.setMaximumPoolSize(parallelism)
-
- var pa: ParallelArray[T] = null
- var jsrarr: JSR166Array[T] = null
- reset
-
- def reset = runWhat match {
- case "seq" =>
- arr = arrcreator(size)
- for (i <- 0 until size) arr(i) = elemcreator(i)
- case "par" =>
- pa = new ParallelArray[T](size)
- pa.environment = forkjoinpool
- for (i <- 0 until size) pa(i) = elemcreator(i)
- case "jsr" =>
- jsrarr = JSR166Array.create(size, cls, papool)
- for (i <- 0 until size) jsrarr.set(i, elemcreator(i))
- case _ => throw new IllegalArgumentException("Unknown type: " + runWhat)
- }
-
- var updateCounter = 0
- def incUpdateCounter {
- updateCounter += 1
- if (updateCounter > size) updateCounter = 0
- }
-
- def updateSeq {
- val tmp = arr(updateCounter)
- arr(updateCounter) = arr(size - updateCounter - 1)
- arr(size - updateCounter - 1) = tmp
- incUpdateCounter
- }
-
- def updatePar {
- val tmp = pa(updateCounter)
- pa(updateCounter) = pa(size - updateCounter - 1)
- pa(size - updateCounter - 1) = tmp
- incUpdateCounter
- }
-
- def updateJsr {
- val tmp = jsrarr.get(updateCounter)
- jsrarr.set(updateCounter, jsrarr.get(size - updateCounter - 1))
- jsrarr.set(size - updateCounter - 1, tmp)
- incUpdateCounter
- }
-
- override def printResults {
- println(" --- Fork join pool state --- ")
- println("Parallelism: " + forkjoinpool.getParallelism)
- println("Active threads: " + forkjoinpool.getActiveThreadCount)
- println("Work stealings: " + forkjoinpool.getStealCount)
- }
-}
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Reverse.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Reverse.scala
deleted file mode 100644
index 0d00e60731..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/Reverse.scala
+++ /dev/null
@@ -1,35 +0,0 @@
-package scala.collection.parallel.benchmarks
-package parallel_array
-
-
-
-
-
-
-
-class Reverse(sz: Int, p: Int, what: String)
-extends Resettable[Cont](sz, p, what, (i: Int) => new Cont(i), new Array[Any](_), classOf[Cont]) {
- def companion = Reverse
- override def repetitionsPerRun = 400
-
- def runpar = pa.reverse
- def runseq = sequentialReverse(sz)
- override def comparisonMap = collection.Map()
-}
-
-object Reverse extends Companion {
- def benchName = "reverse";
- def apply(sz: Int, p: Int, what: String) = new Reverse(sz, p, what)
- override def comparisons = List()
-}
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReverseMap.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReverseMap.scala
deleted file mode 100644
index c9f4a02baa..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/ReverseMap.scala
+++ /dev/null
@@ -1,48 +0,0 @@
-package scala.collection.parallel.benchmarks
-package parallel_array
-
-
-
-
-
-
-
-class ReverseMap(sz: Int, p: Int, what: String)
-extends Resettable[Cont](sz, p, what, (i: Int) => new Cont(i), new Array[Any](_), classOf[Cont]) {
- def companion = ReverseMap
- override def repetitionsPerRun = 100
-
- def runpar = pa.reverseMap(compl)
- def runseq = sequentialReverseMap(compl, sz)
- override def comparisonMap = collection.Map()
-
- val id = (c: Cont) => c
- val compl = (c: Cont) => {
- var in = c.in
- var i = 2
- while (i < 6) {
- if (in % i == 0) in = 0
- i += 1
- }
- if (in < 0) null
- else c
- }
-}
-
-object ReverseMap extends Companion {
- def benchName = "reverse-map";
- def apply(sz: Int, p: Int, what: String) = new ReverseMap(sz, p, what)
- override def comparisons = List()
- override def defaultSize = 100000
-}
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SameElementsLong.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SameElementsLong.scala
deleted file mode 100644
index 54f9519a1b..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SameElementsLong.scala
+++ /dev/null
@@ -1,45 +0,0 @@
-package scala.collection.parallel.benchmarks
-package parallel_array
-
-
-
-
-
-
-
-class SameElementsLong(sz: Int, p: Int, what: String)
-extends Resettable[Cont](sz, p, what, (i: Int) => new Cont(i), new Array[Any](_), classOf[Cont])
-with HavingResult[Boolean] {
- def companion = SameElementsLong
- override def repetitionsPerRun = 400
-
- val same = {
- val p = new collection.parallel.mutable.ParallelArray[Cont](sz)
- for (i <- 0 until sz) p(i) = what match {
- case "seq" => arr(i).asInstanceOf[Cont]
- case "par" => pa(i)
- }
- p
- }
-
- def runpar = runresult = pa.sameElements(same)
- def runseq = runresult = sequentialSameElements(same, sz)
- override def comparisonMap = collection.Map()
-}
-
-object SameElementsLong extends Companion {
- def benchName = "same-elements-long";
- def apply(sz: Int, p: Int, what: String) = new SameElementsLong(sz, p, what)
- override def comparisons = List()
-}
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SegmentLength.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SegmentLength.scala
deleted file mode 100644
index 1f9041c373..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SegmentLength.scala
+++ /dev/null
@@ -1,42 +0,0 @@
-package scala.collection.parallel.benchmarks
-package parallel_array
-
-
-
-
-
-
-
-class SegmentLength(sz: Int, p: Int, what: String)
-extends Resettable[Cont](sz, p, what, (i: Int) => new Cont(i), new Array[Any](_), classOf[Cont])
-with HavingResult[Int] {
- def companion = SegmentLength
- override def repetitionsPerRun = 400
-
- def runpar = runresult = pa.segmentLength(SegmentLength.pred2, 0)
- def runseq = runresult = sequentialSegmentLength(SegmentLength.pred2, 0, sz)
- override def comparisonMap = collection.Map()
-}
-
-object SegmentLength extends Companion {
- def benchName = "segment-length";
- def apply(sz: Int, p: Int, what: String) = new SegmentLength(sz, p, what)
- override def comparisons = List()
-
- val pred = (c: Cont) => {
- var in = c.in
- var i = 2
- while (i < 5) {
- if (in % i == 0) in = 0
- i += 1
- }
- c.in >= 0 || in == 0
- }
- val pred2 = (c: Cont) => c.in >= 0
-}
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SequentialOps.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SequentialOps.scala
deleted file mode 100644
index 36e1d22d7e..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SequentialOps.scala
+++ /dev/null
@@ -1,547 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-
-trait SequentialOps[T] {
-
- var arr: Array[Any] = null
-
- def sequentialReduce(op: (T, T) => T, sz: Int, init: T) = {
- var i = 0
- val until = sz
- var sum = init
- while (i < until) {
- sum = op(sum, arr(i).asInstanceOf[T])
- i += 1
- }
- sum
- }
-
- def sequentialCount(pred: T => Boolean, sz: Int) = {
- var i = 0
- val until = sz
- var sum = 0
- while (i < until) {
- if (pred(arr(i).asInstanceOf[T])) sum += 1
- i += 1
- }
- sum
- }
-
- def sequentialForeach[U](f: T => U, sz: Int) = {
- var i = 0
- val until = sz
- var sum = 0
- while (i < until) {
- f(arr(i).asInstanceOf[T])
- i += 1
- }
- }
-
- def sequentialSum[U >: T](sz: Int)(implicit num: Numeric[U]) = {
- var i = 0
- val until = sz
- var sum = num.zero
- while (i < until) {
- sum = num.plus(sum, arr(i).asInstanceOf[T])
- i += 1
- }
- sum
- }
-
- def sequentialMin[U >: T](sz: Int)(implicit ord: Ordering[U]) = {
- var i = 1
- val until = sz
- var min = arr(0).asInstanceOf[U]
- while (i < until) {
- val elem = arr(i).asInstanceOf[U]
- if (ord.lt(elem, min)) min = elem
- i += 1
- }
- min
- }
-
- def sequentialForall(pred: T => Boolean, sz: Int) = {
- var i = 0
- val until = sz
- var all = true
- while (i < until) {
- if (pred(arr(i).asInstanceOf[T])) i += 1
- else {
- all = false
- i = until
- }
- }
- all
- }
-
- def sequentialExists(pred: T => Boolean, sz: Int) = {
- var i = 0
- val until = sz
- var some = false
- while (i < until) {
- if (pred(arr(i).asInstanceOf[T])) {
- some = true
- i = until
- } else i += 1
- }
- some
- }
-
- def sequentialFind(pred: T => Boolean, sz: Int) = {
- var i = 0
- val until = sz
- var opt: Option[T] = None
- while (i < until) {
- if (pred(arr(i).asInstanceOf[T])) {
- opt = Some(arr(i).asInstanceOf[T])
- i = until
- } else i += 1
- }
- opt
- }
-
- def sequentialFilter(pred: T => Boolean, sz: Int) = {
- var i = 0
- val buff = new collection.mutable.ArrayBuffer[T]
- while (i < sz) {
- val elem = arr(i).asInstanceOf[T]
- if (pred(elem)) buff += elem
- i += 1
- }
- val resarr = new Array[Any](buff.size)
- buff.copyToArray(resarr, 0)
- resarr
- }
-
- def sequentialPartition(pred: T => Boolean, sz: Int) = {
- var i = 0
- val btrue = new collection.mutable.ArrayBuffer[T]
- val bfalse = new collection.mutable.ArrayBuffer[T]
- while (i < sz) {
- val elem = arr(i).asInstanceOf[T]
- if (pred(elem)) btrue += elem
- else bfalse += elem
- i += 1
- }
- val restrue = new Array[Any](btrue.size)
- val resfalse = new Array[Any](bfalse.size)
- btrue.copyToArray(restrue, 0)
- bfalse.copyToArray(resfalse, 0)
- (restrue, resfalse)
- }
-
- def sequentialTakeOpt(n: Int, sz: Int) = {
- var i = 0
- val until = if (n < sz) n else sz
- val res = new Array[Any](until)
- Array.copy(arr, 0, res, 0, until)
-// while (i < until) {
-// res(i) = arr(i)
-// i += 1
-// }
- res
- }
-
- def sequentialTake(n: Int, sz: Int) = {
- var i = 0
- val b = new collection.mutable.ArrayBuffer[T]
- val until = if (n < sz) n else sz
- b.sizeHint(until)
- while (i < until) {
- val elem = arr(i).asInstanceOf[T]
- b += elem
- i += 1
- }
- val res = new Array[Any](n)
- b.copyToArray(res, 0)
- res
- }
-
- def sequentialDrop(n: Int, sz: Int) = {
- var i = n
- val b = new collection.mutable.ArrayBuffer[T]
- b.sizeHint(sz - n)
- while (i < sz) {
- val elem = arr(i).asInstanceOf[T]
- b += elem
- i += 1
- }
- val res = new Array[Any](n)
- b.copyToArray(res, 0)
- res
- }
-
- def sequentialSlice(from: Int, until: Int, sz: Int) = {
- var i = from
- val b = new collection.mutable.ArrayBuffer[T]
- b.sizeHint(until - from)
- while (i < until) {
- val elem = arr(i).asInstanceOf[T]
- b += elem
- i += 1
- }
- val res = new Array[Any](until - from)
- b.copyToArray(res, 0)
- res
- }
-
- def sequentialSplitAtOpt(n: Int, sz: Int) = {
- var i = 0
- val before = new Array[Any](n)
- val after = new Array[Any](sz - n)
- Array.copy(arr, 0, before, 0, n)
- Array.copy(arr, n, after, 0, sz - n)
- (before, after)
- }
-
- def sequentialSplitAt(n: Int, sz: Int) = {
- var i = 0
- val before = new collection.mutable.ArrayBuffer[T]
- before.sizeHint(n)
- val after = new collection.mutable.ArrayBuffer[T]
- after.sizeHint(sz - n)
- while (i < sz) {
- if (i < n) before += arr(i).asInstanceOf[T]
- else after += arr(i).asInstanceOf[T]
- i += 1
- }
- val resbef = new Array[Any](n)
- val resaft = new Array[Any](sz - n)
- before.copyToArray(resbef, 0)
- after.copyToArray(resaft, 0)
- (resbef, resaft)
- }
-
- def sequentialTakeWhile(p: T => Boolean, sz: Int) = {
- var i = 0
- val b = new collection.mutable.ArrayBuffer[T]
- while (i < sz) {
- val elem = arr(i).asInstanceOf[T]
- if (p(elem)) {
- b += elem
- i += 1
- } else i = sz
- }
- val res = new Array[Any](sz)
- b.copyToArray(res, 0)
- res
- }
-
- def sequentialSpan(p: T => Boolean, sz: Int) = {
- val bpref = new collection.mutable.ArrayBuffer[T]
- val brest = new collection.mutable.ArrayBuffer[T]
- var i = 0
- var prefix = true
- var pos = sz
- while (i < sz) {
- val elem = arr(i).asInstanceOf[T]
- if (prefix) {
- if (p(elem)) bpref += elem
- else {
- pos = i
- prefix = false
- brest += elem
- }
- } else brest += elem
- i += 1
- }
- val respref = new Array[Any](pos)
- val resrest = new Array[Any](sz - pos)
- bpref.copyToArray(respref, 0)
- brest.copyToArray(resrest, 0)
- (respref, resrest)
- }
-
- def sequentialMap(f: T => T, sz: Int) = {
- val b = new collection.mutable.ArrayBuffer[T](sz)
-
- var i = 0
- while (i < sz) {
- b += f(arr(i).asInstanceOf[T])
- i += 1
- }
-
- val res = new Array[Any](sz)
- b.copyToArray(res, 0)
- res
- }
-
- def sequentialMapOpt(f: T => T, sz: Int) = {
- val res = new Array[Any](sz)
-
- var i = 0
- while (i < sz) {
- res(i) = f(arr(i).asInstanceOf[T])
- i += 1
- }
-
- res
- }
-
- def sequentialPartialMap(f: PartialFunction[T, T], sz: Int) = {
- val b = new collection.mutable.ArrayBuffer[T](sz)
-
- var i = 0
- while (i < sz) {
- val elem = arr(i).asInstanceOf[T]
- if (f.isDefinedAt(elem)) b += f(elem)
- i += 1
- }
-
- val res = new Array[Any](b.size)
- b.copyToArray(res, 0)
- res
- }
-
- def sequentialFlatMap(f: T => Traversable[Int], sz: Int) = {
- val b = new collection.mutable.ArrayBuffer[Int](sz)
-
- var i = 0
- while (i < sz) {
- val ts = f(arr(i).asInstanceOf[T])
- for (elem <- ts) b += elem
- i += 1
- }
-
- val res = new Array[Any](b.size)
- b.copyToArray(res, 0)
- res
- }
-
- def sequentialCopyToArray(destarr: Array[Any], pos: Int, sz: Int) = {
- Array.copy(arr, 0, destarr, pos, sz)
- }
-
- def sequentialSegmentLength(pred: T => Boolean, from: Int, sz: Int) = {
- var i = from
- var cnt = 0
-
- while (i < sz) {
- if (pred(arr(i).asInstanceOf[T])) {
- cnt += 1
- i += 1
- } else i = sz
- }
-
- cnt
- }
-
- def sequentialIndexWhere(pred: T => Boolean, from: Int, sz: Int) = {
- var i = from
- var pos = -1
-
- while (i < sz) {
- if (pred(arr(i).asInstanceOf[T])) {
- pos = i
- i = sz
- } else i += 1
- }
-
- pos
- }
-
- def sequentialLastIndexWhere(pred: T => Boolean, end: Int, sz: Int) = {
- var i = end
- var pos = -1
-
- while (i >= 0) {
- if (pred(arr(i).asInstanceOf[T])) {
- pos = i
- i = -1
- } else i -= 1
- }
-
- pos
- }
-
- def sequentialReverse(sz: Int) = {
- val res = new Array[Any](sz)
-
- var i = sz - 1
- var j = 0
- while (i >= 0) {
- res(j) = arr(i)
- i -= 1
- j += 1
- }
- res
- }
-
- def sequentialReverseMap(f: T => T, sz: Int) = {
- val res = new Array[Any](sz)
-
- var i = sz - 1
- var j = 0
- while (i >= 0) {
- res(j) = f(arr(i).asInstanceOf[T])
- i -= 1
- j += 1
- }
- res
- }
-
- def sequentialSameElements(sq: Seq[T], sz: Int): Boolean = {
- if (sz != sq.length) false
- else {
- var i = 0
- val jt = sq.iterator
- while (i < sz) {
- if (arr(i) == jt.next) i += 1
- else i = sz + 1
- }
- if (i == sz) true
- else false
- }
- }
-
- def sequentialCorresponds(sq: Seq[T], f: (T, T) => Boolean, sz: Int): Boolean = {
- if (sz != sq.length) false
- else {
- var i = 0
- val jt = sq.iterator
- while (i < sz) {
- if (f(arr(i).asInstanceOf[T], jt.next)) i += 1
- else i = sz + 1
- }
- if (i == sz) true
- else false
- }
- }
-
- def sequentialDiff(sq: Seq[T], sz: Int) = {
- val occmap = occurences(sq)
- val b = new collection.mutable.ArrayBuffer[T]
-
- var i = 0
- while (i < sz) {
- val elem = arr(i).asInstanceOf[T]
- if (occmap(elem) == 0) b += elem
- else occmap(elem) -= 1
- i += 1
- }
-
- val res = new Array[Any](b.size)
- b.copyToArray(res, 0)
- res
- }
-
- def sequentialIntersect(sq: Seq[T], sz: Int) = {
- val occmap = occurences(sq)
- val b = new collection.mutable.ArrayBuffer[T]
-
- var i = 0
- while (i < sz) {
- val elem = arr(i).asInstanceOf[T]
- val num = occmap(elem)
- if (num > 0) {
- b += elem
- occmap(elem) = num - 1
- }
- i += 1
- }
-
- val res = new Array[Any](b.size)
- b.copyToArray(res, 0)
- res
- }
-
- private def occurences(sq: Seq[T]) = {
- val occmap = new collection.mutable.HashMap[T, Int] { override def default(k: T) = 0 }
- for (elem <- sq.iterator) occmap(elem) += 1
- occmap
- }
-
- def sequentialRemoveDuplicates(sz: Int) = {
- val occ = new collection.mutable.HashSet[T]
- val b = new collection.mutable.ArrayBuffer[T]
-
- var i = 0
- while (i < sz) {
- val elem = arr(i).asInstanceOf[T]
- if (!occ.contains(elem)) {
- b += elem
- occ.add(elem)
- }
- i += 1
- }
-
- val res = new Array[Any](b.size)
- b.copyToArray(res, 0)
- res
- }
-
- def sequentialPatch(from: Int, p: Seq[T], replaced: Int, sz: Int) = {
- val b = new collection.mutable.ArrayBuffer[T]
- b.sizeHint(from + (sz - from - replaced) + p.size)
-
- var i = 0
- while (i < from) {
- b += arr(i).asInstanceOf[T]
- i += 1
- }
-
- val jt = p.iterator
- while (jt.hasNext) b += jt.next
-
- val skipto = from + replaced
- while (i < from + replaced) i += 1
-
- while (i < sz) {
- b += arr(i).asInstanceOf[T]
- i += 1
- }
-
- val res = new Array[Any](b.size)
- b.copyToArray(res, 0)
- res
- }
-
- def sequentialPadTo(tosize: Int, elem: T, sz: Int) = {
- val b = new collection.mutable.ArrayBuffer[T]
- b.sizeHint(tosize)
-
- var i = 0
- while (i < sz) {
- b += arr(i).asInstanceOf[T]
- i += 1
- }
-
- while (i < tosize) {
- b += elem
- i += 1
- }
-
- val res = new Array[Any](b.size)
- b.copyToArray(res, 0)
- res
- }
-
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SliceFew.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SliceFew.scala
deleted file mode 100644
index c22ae47400..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SliceFew.scala
+++ /dev/null
@@ -1,47 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-import scala.collection.parallel.benchmarks._
-
-
-object SliceFew extends Companion {
- def benchName = "slice-few";
- def apply(sz: Int, parallelism: Int, what: String) = new SliceFew(sz, parallelism, what)
- override def comparisons = Nil
- override def defaultSize = 50000
-}
-
-class SliceFew(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont])
-with HavingResult[Int] {
- def companion = SliceFew
- override def repetitionsPerRun = 200
- runresult = -1
-
- def runpar = runresult = pa.slice(5, 25).size
- def runseq = runresult = sequentialSlice(5, 25, sz).size
- def comparisonMap = collection.Map()
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SliceMany.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SliceMany.scala
deleted file mode 100644
index 37ad666d93..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SliceMany.scala
+++ /dev/null
@@ -1,47 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-import scala.collection.parallel.benchmarks._
-
-
-object SliceMany extends Companion {
- def benchName = "slice-many";
- def apply(sz: Int, parallelism: Int, what: String) = new SliceMany(sz, parallelism, what)
- override def comparisons = Nil
- override def defaultSize = 50000
-}
-
-class SliceMany(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont])
-with HavingResult[Int] {
- def companion = SliceMany
- override def repetitionsPerRun = 200
- runresult = -1
-
- def runpar = runresult = pa.slice(pa.size / 4, pa.size * 3 / 4).size
- def runseq = runresult = sequentialSlice(sz / 4, sz * 3 / 4, sz).size
- def comparisonMap = collection.Map()
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SliceMedium.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SliceMedium.scala
deleted file mode 100644
index 7da94a4a20..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SliceMedium.scala
+++ /dev/null
@@ -1,47 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-import scala.collection.parallel.benchmarks._
-
-
-object SliceMedium extends Companion {
- def benchName = "slice-medium";
- def apply(sz: Int, parallelism: Int, what: String) = new SliceMedium(sz, parallelism, what)
- override def comparisons = Nil
- override def defaultSize = 50000
-}
-
-class SliceMedium(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont])
-with HavingResult[Int] {
- def companion = SliceMedium
- override def repetitionsPerRun = 200
- runresult = -1
-
- def runpar = runresult = pa.slice(pa.size / 7, pa.size * 4 / 7).size
- def runseq = runresult = sequentialSlice(sz / 7, sz * 4 / 7, sz).size
- def comparisonMap = collection.Map()
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SpanLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SpanLight.scala
deleted file mode 100644
index 4d8b128e1f..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SpanLight.scala
+++ /dev/null
@@ -1,62 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-import scala.collection.parallel.benchmarks._
-
-
-object SpanLight extends Companion {
- def benchName = "span-light";
- def apply(sz: Int, parallelism: Int, what: String) = new SpanLight(sz, parallelism, what)
- override def comparisons = Nil
- override def defaultSize = 20000
-
- val pred = (a: Cont) => check(a.in)
- val predjsr = new extra166y.Ops.Predicate[Cont] {
- def op(a: Cont) = check(a.in)
- }
-
- def check(n: Int) = {
- var res = n
- var i = 1
- while (i < 10) {
- res += n % i
- i += 1
- }
- if (n != 10000) res % 2 == 0 || n != 10000
- else false
- }
-}
-
-class SpanLight(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont])
-with HavingResult[Int] {
- def companion = SpanLight
- runresult = -1
-
- def runpar = runresult = pa.span(SpanLight.pred)._1.size
- def runseq = runresult = sequentialSpan(SpanLight.pred, sz)._1.size
- def comparisonMap = collection.Map()
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SplitHalf.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SplitHalf.scala
deleted file mode 100644
index d671e56c3d..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SplitHalf.scala
+++ /dev/null
@@ -1,47 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-import scala.collection.parallel.benchmarks._
-
-
-object SplitHalf extends Companion {
- def benchName = "split-half";
- def apply(sz: Int, parallelism: Int, what: String) = new SplitHalf(sz, parallelism, what)
- override def comparisons = Nil
- override def defaultSize = 50000
-}
-
-class SplitHalf(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont])
-with HavingResult[Int] {
- def companion = SplitHalf
- override def repetitionsPerRun = 300
- runresult = -1
-
- def runpar = runresult = pa.splitAt(pa.size / 2)._1.size
- def runseq = runresult = sequentialSplitAtOpt(sz / 2, sz)._1.size
- def comparisonMap = collection.Map()
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SumLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SumLight.scala
deleted file mode 100644
index 67ac1c6478..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/SumLight.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-
-
-
-/** Tests reduce method using an operator creating an object as a result. */
-class SumLight(sz: Int, p: Int, what: String)
-extends Resettable[Int](sz, p, what, (i: Int) => i, new Array[Any](_), classOf[Int]) {
- def companion = SumLight
- override def repetitionsPerRun = 500
-
- def runpar = pa.sum
- def runseq = sequentialSum(sz)
- override def comparisonMap = collection.Map()
-}
-
-object SumLight extends Companion {
- def benchName = "sum-light";
- def apply(sz: Int, p: Int, what: String) = new SumLight(sz, p, what)
- override def comparisons = List()
-}
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/TakeMany.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/TakeMany.scala
deleted file mode 100644
index 2f735c1e45..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/TakeMany.scala
+++ /dev/null
@@ -1,47 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-import scala.collection.parallel.benchmarks._
-
-
-object TakeMany extends Companion {
- def benchName = "take-many";
- def apply(sz: Int, parallelism: Int, what: String) = new TakeMany(sz, parallelism, what)
- override def comparisons = Nil
- override def defaultSize = 250000
-}
-
-class TakeMany(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont])
-with HavingResult[Int] {
- def companion = TakeMany
- override def repetitionsPerRun = 400
- runresult = -1
-
- def runpar = runresult = pa.take(pa.size / 2).size
- def runseq = runresult = sequentialTake(sz / 2, sz).size
- def comparisonMap = collection.Map()
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/TakeWhileLight.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/TakeWhileLight.scala
deleted file mode 100644
index 255cb695d7..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/TakeWhileLight.scala
+++ /dev/null
@@ -1,61 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_array
-
-
-import scala.collection.parallel.benchmarks._
-
-
-object TakeWhileLight extends Companion {
- def benchName = "takewhile-light";
- def apply(sz: Int, parallelism: Int, what: String) = new TakeWhileLight(sz, parallelism, what)
- override def comparisons = Nil
- override def defaultSize = 10000
-
- val pred = (a: Cont) => check(a.in)
- val predjsr = new extra166y.Ops.Predicate[Cont] {
- def op(a: Cont) = check(a.in)
- }
-
- def check(n: Int) = {
- var res = n
- var i = 1
- while (i < 10) {
- res += n % i
- i += 1
- }
- res % 2 == 0 || n > 0
- }
-}
-
-class TakeWhileLight(sz: Int, p: Int, what: String)
-extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont])
-with HavingResult[Int] {
- def companion = TakeWhileLight
- runresult = -1
-
- def runpar = runresult = pa.takeWhile(TakeWhileLight.pred).size
- def runseq = runresult = sequentialTakeWhile(TakeWhileLight.pred, sz).size
- def comparisonMap = collection.Map()
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_range/RangeBenches.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_range/RangeBenches.scala
deleted file mode 100644
index ccc0568b2b..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_range/RangeBenches.scala
+++ /dev/null
@@ -1,208 +0,0 @@
-package scala.collection.parallel.benchmarks.parallel_range
-
-
-
-
-
-import scala.collection.parallel.benchmarks.generic._
-import scala.collection.parallel.immutable.ParallelRange
-
-
-
-
-
-
-object RangeBenches extends StandardParallelIterableBench[Int, ParallelRange] with NotBenchmark {
-
- def nameOfCollection = "ParallelRange"
- def operators = new IntOperators {}
- def comparisonMap = collection.Map()
- val forkJoinPool = new scala.concurrent.forkjoin.ForkJoinPool
- def createSequential(sz: Int, p: Int) = new collection.immutable.Range(0, sz, 1)
- def createParallel(sz: Int, p: Int) = {
- val pr = new collection.parallel.immutable.ParallelRange(0, sz, 1, false)
- forkJoinPool.setParallelism(p)
- pr.environment = forkJoinPool
- pr
- }
-
- object MapLight extends IterableBenchCompanion {
- override def defaultSize = 20000
- def benchName = "map-light";
- def apply(sz: Int, p: Int, w: String) = new MapLight(sz, p, w)
- }
-
- class MapLight(val size: Int, val parallelism: Int, val runWhat: String)
- extends IterableBench with StandardParallelIterableBench[Int, ParallelRange] {
- def calc(n: Int) = n % 2 + 1
-
- def comparisonMap = collection.Map()
- def runseq = for (n <- this.seqcoll) yield calc(n)
- def runpar = for (n <- this.parcoll) yield calc(n)
- def companion = MapLight
- }
-
- object MapMedium extends IterableBenchCompanion {
- override def defaultSize = 5000
- def benchName = "map-medium";
- def apply(sz: Int, p: Int, w: String) = new MapMedium(sz, p, w)
- }
-
- class MapMedium(val size: Int, val parallelism: Int, val runWhat: String)
- extends IterableBench with StandardParallelIterableBench[Int, ParallelRange] {
- def calc(n: Int) = {
- var i = 0
- var sum = n
- while (i < 40) {
- i += 1
- sum += n % i
- }
- sum
- }
-
- def comparisonMap = collection.Map()
- def runseq = for (n <- this.seqcoll) yield calc(n)
- def runpar = for (n <- this.parcoll) yield calc(n)
- def companion = MapMedium
- }
-
- object ForeachModify extends IterableBenchCompanion {
- override def defaultSize = 150000
- def benchName = "foreach-modify";
- def apply(sz: Int, p: Int, w: String) = new ForeachModify(sz, p, w)
- }
-
- class ForeachModify(val size: Int, val parallelism: Int, val runWhat: String)
- extends IterableBench with StandardParallelIterableBench[Int, ParallelRange] {
- val array = new Array[Int](size)
- def modify(n: Int) = array(n) += 1
-
- def comparisonMap = collection.Map()
- def runseq = for (n <- this.seqcoll) modify(n)
- def runpar = for (n <- this.parcoll) modify(n)
- def companion = ForeachModify
- }
-
- object ForeachModifyMedium extends IterableBenchCompanion {
- override def defaultSize = 20000
- def benchName = "foreach-modify-medium";
- def apply(sz: Int, p: Int, w: String) = new ForeachModifyMedium(sz, p, w)
- }
-
- class ForeachModifyMedium(val size: Int, val parallelism: Int, val runWhat: String)
- extends IterableBench with StandardParallelIterableBench[Int, ParallelRange] {
- val array = new Array[Int](size)
- def modify(n: Int) = array(n) = {
- var i = 0
- var sum = 0
- while (i < 15) {
- sum += i % 3
- i += i + 1
- }
- sum
- }
-
- def comparisonMap = collection.Map()
- def runseq = for (n <- this.seqcoll) modify(n)
- def runpar = for (n <- this.parcoll) modify(n)
- def companion = ForeachModifyMedium
- }
-
- object ForeachModifyHeavy extends IterableBenchCompanion {
- override def defaultSize = 1000
- def benchName = "foreach-modify-heavy";
- def apply(sz: Int, p: Int, w: String) = new ForeachModifyHeavy(sz, p, w)
- }
-
- class ForeachModifyHeavy(val size: Int, val parallelism: Int, val runWhat: String)
- extends IterableBench with StandardParallelIterableBench[Int, ParallelRange] {
- val array = new Array[Int](size)
- def modify(n: Int) = array(n) = collatz(10000 + array(n))
-
- def comparisonMap = collection.Map()
- def runseq = for (n <- this.seqcoll) modify(n)
- def runpar = for (n <- this.parcoll) modify(n)
- def companion = ForeachModifyHeavy
- }
-
- object ForeachAdd extends IterableBenchCompanion {
- override def defaultSize = 10000
- def benchName = "foreach-add";
- def apply(sz: Int, p: Int, w: String) = new ForeachAdd(sz, p, w)
- override def comparisons = List("seq-hashmap")
- }
-
- class ForeachAdd(val size: Int, val parallelism: Int, val runWhat: String)
- extends IterableBench with StandardParallelIterableBench[Int, ParallelRange] {
- val cmap = new java.util.concurrent.ConcurrentHashMap[Int, Int]
- val hmap = new java.util.HashMap[Int, Int]
-
- override def reset = runWhat match {
- case "seq-hashmap" => seqcoll = createSequential(size, parallelism)
- case _ => super.reset
- }
-
- def comparisonMap = collection.Map("seq-hashmap" -> runseqhashmap _)
- def runseqhashmap = for (i <- seqcoll) hmap put (i, onesum(i))
- def runseq = for (i <- seqcoll) cmap put (i, onesum(i))
- def runpar = for (i <- parcoll) cmap put (i, onesum(i))
- def companion = ForeachAdd
- }
-
- object ForeachAddCollatz extends IterableBenchCompanion {
- override def defaultSize = 5000
- def benchName = "foreach-add-collatz";
- def apply(sz: Int, p: Int, w: String) = new ForeachAddCollatz(sz, p, w)
- override def comparisons = List("seq-hashmap")
- }
-
- class ForeachAddCollatz(val size: Int, val parallelism: Int, val runWhat: String)
- extends IterableBench with StandardParallelIterableBench[Int, ParallelRange] {
- val cmap = new java.util.concurrent.ConcurrentHashMap[Int, Int]
- val hmap = new java.util.HashMap[Int, Int]
-
- override def reset = runWhat match {
- case "seq-hashmap" => seqcoll = createSequential(size, parallelism)
- case _ => super.reset
- }
-
- def comparisonMap = collection.Map("seq-hashmap" -> runseqhashmap _)
- def runseqhashmap = for (i <- seqcoll) hmap put (i, collatz(i))
- def runseq = for (i <- seqcoll) cmap put (i, collatz(i))
- def runpar = for (i <- parcoll) cmap put (i, collatz(i))
- def companion = ForeachAddCollatz
- }
-
- def collatz(n: Int) = {
- var curr = n
- var sum = 0
- while (curr > 1) {
- sum += curr
- if (curr % 2 == 0) curr = curr / 2
- else curr = curr * 3 + 1
- }
- sum
- }
-
- def onesum(n: Int) = {
- var left = n
- var sum = 0
- while (left > 0) {
- sum += left % 2
- left /= 2
- }
- sum
- }
-
-}
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_view/SeqViewBenches.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_view/SeqViewBenches.scala
deleted file mode 100644
index f174dec7db..0000000000
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_view/SeqViewBenches.scala
+++ /dev/null
@@ -1,50 +0,0 @@
-package scala.collection.parallel
-package benchmarks.parallel_view
-
-
-
-import scala.collection.parallel.benchmarks.generic._
-import scala.collection.SeqView
-
-
-
-
-
-
-
-
-
-
-trait DummyViewBenches
-extends ParallelSeqViewBench[Dummy, ParallelSeqView[Dummy, ParallelSeq[Dummy], Seq[Dummy]], Seq[Dummy]] {
- def nameOfCollection = "ParallelView"
- def operators = DummyOperators
- def comparisonMap = collection.Map()
- val forkJoinPool = new scala.concurrent.forkjoin.ForkJoinPool
- def createSequential(sz: Int, p: Int) = {
- val s = new Array[Dummy](sz)
- for (i <- 0 until sz) s(i) = new Dummy(i)
- s
- }
- def createParallel(sz: Int, p: Int) = {
- val pa = new collection.parallel.mutable.ParallelArray[Dummy](sz)
- forkJoinPool.setParallelism(p)
- for (i <- 0 until sz) pa(i) = new Dummy(i)
- val v = pa.view
- v.environment = forkJoinPool
- v
- }
-}
-
-
-object DummyViewBenchList extends DummyViewBenches with NotBenchmark
-
-
-
-
-
-
-
-
-
-
diff --git a/test/files/jvm/actor-executor2.scala b/test/files/jvm/actor-executor2.scala
index f8fcaef69f..da64a7fc43 100644
--- a/test/files/jvm/actor-executor2.scala
+++ b/test/files/jvm/actor-executor2.scala
@@ -1,6 +1,6 @@
import scala.actors.{Actor, SchedulerAdapter, Exit}
import Actor._
-import java.util.concurrent.{Executors, RejectedExecutionException}
+import java.util.concurrent.Executors
object One extends AdaptedActor {
def act() {
@@ -57,15 +57,9 @@ object Test {
val scheduler =
new SchedulerAdapter {
def execute(block: => Unit) {
- val task = new Runnable {
+ executor.execute(new Runnable {
def run() { block }
- }
- try {
- executor.execute(task)
- } catch {
- case ree: RejectedExecutionException =>
- task.run() // run task on current thread
- }
+ })
}
}
diff --git a/test/files/jvm/libnatives.jnilib b/test/files/jvm/libnatives.jnilib
index 1fc104883a..daac50e3df 100755..100644
--- a/test/files/jvm/libnatives.jnilib
+++ b/test/files/jvm/libnatives.jnilib
Binary files differ
diff --git a/test/files/jvm/mkLibNatives.sh b/test/files/jvm/mkLibNatives.sh
index 537187eedd..ed80c24c3e 100755
--- a/test/files/jvm/mkLibNatives.sh
+++ b/test/files/jvm/mkLibNatives.sh
@@ -37,7 +37,7 @@ JAVAH_OPTIONS="-jni -force -classpath ${CLASS_DIR} -o ${OBJ_NAME}.h"
CC=gcc
if $darwin; then
- CC_OPTIONS="-c -arch ppc -arch i386 -arch x86_64"
+ CC_OPTIONS="-c -arch ppc -arch i386"
CC_INCLUDES="-I/System/Library/Frameworks/JavaVM.framework/Headers"
LNK_OPTIONS="-dynamiclib -framework JavaVM"
FULL_LIB_NAME=${LIB_NAME}.jnilib
diff --git a/test/files/neg/abstract-vars.check b/test/files/neg/abstract-vars.check
deleted file mode 100644
index 8aa47745f6..0000000000
--- a/test/files/neg/abstract-vars.check
+++ /dev/null
@@ -1,21 +0,0 @@
-abstract-vars.scala:5: error: class Fail1 needs to be abstract, since variable x is not defined
-(Note that variables need to be initialized to be defined)
-class Fail1 extends A {
- ^
-abstract-vars.scala:9: error: class Fail2 needs to be abstract, since variable x in class A of type Int is not defined
-(Note that variables need to be initialized to be defined)
-class Fail2 extends A { }
- ^
-abstract-vars.scala:11: error: class Fail3 needs to be abstract, since variable x in class A of type Int is not defined
-(Note that an abstract var requires a setter in addition to the getter)
-class Fail3 extends A {
- ^
-abstract-vars.scala:14: error: class Fail4 needs to be abstract, since variable x in class A of type Int is not defined
-(Note that an abstract var requires a setter in addition to the getter)
-class Fail4 extends A {
- ^
-abstract-vars.scala:18: error: class Fail5 needs to be abstract, since variable x in class A of type Int is not defined
-(Note that an abstract var requires a getter in addition to the setter)
-class Fail5 extends A {
- ^
-5 errors found
diff --git a/test/files/neg/abstract-vars.scala b/test/files/neg/abstract-vars.scala
deleted file mode 100644
index df6109d3a8..0000000000
--- a/test/files/neg/abstract-vars.scala
+++ /dev/null
@@ -1,29 +0,0 @@
-abstract class A {
- var x: Int
-}
-
-class Fail1 extends A {
- var x: Int
-}
-
-class Fail2 extends A { }
-
-class Fail3 extends A {
- val x: Int = 5
-}
-class Fail4 extends A {
- def x: Int = 5
-}
-
-class Fail5 extends A {
- def x_=(y: Int) = ()
-}
-
-class Success1 extends A {
- val x: Int = 5
- def x_=(y: Int) = ()
-}
-
-class Success2 extends A {
- var x: Int = 5
-}
diff --git a/test/files/neg/bug1275.check b/test/files/neg/bug1275.check
index 40c5d79d27..9f806c0689 100644
--- a/test/files/neg/bug1275.check
+++ b/test/files/neg/bug1275.check
@@ -1,6 +1,4 @@
-bug1275.scala:8: error: type mismatch;
- found : xs.MyType[a]
- required: s
- = xs f // xs: s <: Seq[a]{type MyType <: s }
- ^
+bug1275.scala:13: error: The kind of type MyType does not conform to the expected kind of type MyType[+t] <: TestCovariance.Seq[t] in trait Seq.
+ def span[a, s <: Seq[a] { type MyType <: s } ](xs: s): s = xs f
+ ^
one error found
diff --git a/test/files/neg/bug1275.scala b/test/files/neg/bug1275.scala
index 769156fff2..e9be13c763 100644
--- a/test/files/neg/bug1275.scala
+++ b/test/files/neg/bug1275.scala
@@ -1,12 +1,14 @@
-object Test {
- trait Seq[+t] {
- type MyType[+t] <: Seq[t]
- def f: MyType[t]
- }
-
- def span[a, s <: Seq[a] { type MyType <: s } ](xs: s): s
- = xs f // xs: s <: Seq[a]{type MyType <: s }
- // xs.f : xs.MyType[a] <: Seq[a]
- // ill-formed type in bound for s: Seq[a] { type MyType <: s }
- // refinements aren't checked -- should they?
-} \ No newline at end of file
+// tested using Scala compiler version 2.6.0-RC1 -- (c) 2002-2010 LAMP/EPFL
+
+// prompted by "Covariant return types" mailing list question
+object TestCovariance {
+
+ // see Type constructor polymorphism in http://www.scala-lang.org/docu/changelog.html
+ trait Seq[+t] {
+ type MyType[+t] <: Seq[t]
+
+ def f: MyType[t]
+ }
+
+ def span[a, s <: Seq[a] { type MyType <: s } ](xs: s): s = xs f
+}
diff --git a/test/files/neg/bug1845.check b/test/files/neg/bug1845.check
deleted file mode 100644
index 164f3f60c1..0000000000
--- a/test/files/neg/bug1845.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug1845.scala:9: error: illegal cyclic reference involving value <import>
- val lexical = new StdLexical
- ^
-one error found
diff --git a/test/files/neg/bug1845.scala b/test/files/neg/bug1845.scala
deleted file mode 100644
index ceb43a0552..0000000000
--- a/test/files/neg/bug1845.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-import scala.util.parsing.combinator.syntactical.TokenParsers
-import scala.util.parsing.combinator.lexical.StdLexical
-import scala.util.parsing.syntax.StdTokens
-
-class MyTokenParsers extends TokenParsers {
- import lexical._
- type Tokens = StdTokens
- type Elem = lexical.Token
- val lexical = new StdLexical
-}
diff --git a/test/files/neg/bug3209.check b/test/files/neg/bug3209.check
deleted file mode 100644
index fa50f4ce1d..0000000000
--- a/test/files/neg/bug3209.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug3209.scala:2: error: expected start of definition
-package test
-^
-one error found
diff --git a/test/files/neg/bug3209.scala b/test/files/neg/bug3209.scala
deleted file mode 100644
index d893726659..0000000000
--- a/test/files/neg/bug3209.scala
+++ /dev/null
@@ -1,2 +0,0 @@
-@javax.annotation.Generated(Array("test"))
-package test \ No newline at end of file
diff --git a/test/files/neg/bug3631.check b/test/files/neg/bug3631.check
deleted file mode 100644
index 12d94aa4dc..0000000000
--- a/test/files/neg/bug3631.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug3631.scala:3: error: Implementation restriction: case classes cannot have more than 22 parameters.
-case class X23(x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int, x14: Int, x15: Int, x16: Int, x17: Int, x18: Int, x19: Int, x20: Int, x21: Int, x22: Int, x23: Int) { }
- ^
-one error found
diff --git a/test/files/neg/bug3631.scala b/test/files/neg/bug3631.scala
deleted file mode 100644
index bcf91619ee..0000000000
--- a/test/files/neg/bug3631.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-case class X22(x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int, x14: Int, x15: Int, x16: Int, x17: Int, x18: Int, x19: Int, x20: Int, x21: Int, x22: Int) { }
-
-case class X23(x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int, x14: Int, x15: Int, x16: Int, x17: Int, x18: Int, x19: Int, x20: Int, x21: Int, x22: Int, x23: Int) { } \ No newline at end of file
diff --git a/test/files/neg/bug882.check b/test/files/neg/bug882.check
index 4e3e6d0860..8f47fefd9b 100644
--- a/test/files/neg/bug882.check
+++ b/test/files/neg/bug882.check
@@ -1,4 +1,4 @@
-bug882.scala:2: error: traits cannot have type parameters with context bounds `: ...' nor view bounds `<% ...'
+bug882.scala:2: error: traits cannot have type parameters with context bounds `: ...'
trait SortedSet[A <% Ordered[A]] {
^
one error found
diff --git a/test/files/neg/names-defaults-neg.check b/test/files/neg/names-defaults-neg.check
index b2b00b7050..38bacc0888 100644
--- a/test/files/neg/names-defaults-neg.check
+++ b/test/files/neg/names-defaults-neg.check
@@ -104,20 +104,17 @@ names-defaults-neg.scala:82: error: type mismatch;
Error occurred in an application involving default arguments.
new A2[String]()
^
-names-defaults-neg.scala:86: error: module extending its companion class cannot use default constructor arguments
- object C extends C()
- ^
-names-defaults-neg.scala:120: error: reference to var2 is ambiguous; it is both, a parameter
+names-defaults-neg.scala:115: error: reference to var2 is ambiguous; it is both, a parameter
name of the method and the name of a variable currently in scope.
delay(var2 = 40)
^
-names-defaults-neg.scala:123: error: missing parameter type for expanded function ((x$1) => a = x$1)
+names-defaults-neg.scala:118: error: missing parameter type for expanded function ((x$1) => a = x$1)
val taf2: Int => Unit = testAnnFun(a = _, b = get("+"))
^
-names-defaults-neg.scala:124: error: parameter specified twice: a
+names-defaults-neg.scala:119: error: parameter specified twice: a
val taf3 = testAnnFun(b = _: String, a = get(8))
^
-names-defaults-neg.scala:125: error: wrong number of parameters; expected = 2
+names-defaults-neg.scala:120: error: wrong number of parameters; expected = 2
val taf4: (Int, String) => Unit = testAnnFun(_, b = _)
^
-29 errors found
+28 errors found
diff --git a/test/files/neg/names-defaults-neg.scala b/test/files/neg/names-defaults-neg.scala
index 43883540a0..e73dc71c9b 100644
--- a/test/files/neg/names-defaults-neg.scala
+++ b/test/files/neg/names-defaults-neg.scala
@@ -81,11 +81,6 @@ object Test extends Application {
// correct error message
new A2[String]()
- object t3648 {
- class C(val s: String = "")
- object C extends C()
- }
-
// DEFINITIONS
def test1(a: Int, b: String) = a +": "+ b
def test2(x: Unit) = println("test2")
diff --git a/test/files/neg/t2416.check b/test/files/neg/t2416.check
deleted file mode 100644
index 0899ad09d5..0000000000
--- a/test/files/neg/t2416.check
+++ /dev/null
@@ -1,10 +0,0 @@
-t2416.scala:3: error: type arguments [Int] do not conform to trait A's type parameter bounds [X <: Double]
- def x : A[Int]#B = 10 // no you won't
- ^
-t2416.scala:8: error: type arguments [Boolean] do not conform to type B's type parameter bounds [Y <: Double]
- def x : A#B[Boolean] = 10 // seriously?
- ^
-t2416.scala:13: error: type arguments [String] do not conform to type B's type parameter bounds [Z <: Double]
- type C[Z <: A] = Z#B[String] // nuh-uh!
- ^
-three errors found
diff --git a/test/files/neg/t2416.scala b/test/files/neg/t2416.scala
deleted file mode 100644
index 6bb57a984b..0000000000
--- a/test/files/neg/t2416.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-object t2416a {
- trait A[X <: Double] { type B = X }
- def x : A[Int]#B = 10 // no you won't
-}
-
-object t2416b {
- trait A{type B[Y <: Double] = Int}
- def x : A#B[Boolean] = 10 // seriously?
-}
-
-object t2416c {
- trait A{type B[Z <: Double] = Int}
- type C[Z <: A] = Z#B[String] // nuh-uh!
-} \ No newline at end of file
diff --git a/test/files/neg/t3399.check b/test/files/neg/t3399.check
deleted file mode 100644
index eb6c679704..0000000000
--- a/test/files/neg/t3399.check
+++ /dev/null
@@ -1,4 +0,0 @@
-t3399.scala:23: error: could not find implicit value for parameter e: =:=[Nats.Add[Nats._1,Nats._1],Nats._1]
- implicitly[ Add[_1, _1] =:= _1]
- ^
-one error found
diff --git a/test/files/neg/t3399.scala b/test/files/neg/t3399.scala
deleted file mode 100644
index 3edaa0724f..0000000000
--- a/test/files/neg/t3399.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-object Nats {
- sealed trait Nat {
- // fold right on N, N-1, ..., 1
- type FoldR[Init <: Type, Type, F <: Fold[Nat, Type]] <: Type
- }
- sealed trait _0 extends Nat {
- type FoldR[Init <: Type, Type, F <: Fold[Nat, Type]] = Init
- }
- sealed trait Succ[N <: Nat] extends Nat {
- type FoldR[Init <: Type, Type, F <: Fold[Nat, Type]] =
- F#Apply[Succ[N], N#FoldR[Init, Type, F]]
- }
-
- type Add[A <: Nat, B <: Nat] = A#FoldR[B, Nat, Inc]
- trait Fold[-Elem, Value] {
- type Apply[N <: Elem, Acc <: Value] <: Value
- }
- type Inc = Fold[Any, Nat] {
- type Apply[N <: Any, Acc <: Nat] = Succ[Acc]
- }
-
- type _1 = Succ[_0]
- implicitly[ Add[_1, _1] =:= _1]
-} \ No newline at end of file
diff --git a/test/files/neg/t3507.check b/test/files/neg/t3507.check
deleted file mode 100644
index 1246a20d09..0000000000
--- a/test/files/neg/t3507.check
+++ /dev/null
@@ -1,4 +0,0 @@
-t3507.scala:13: error: could not find implicit value for evidence parameter of type Manifest[object _1.b.c]
- mani/*[object _1.b.c]*/(c) // kaboom in manifestOfType / TreeGen.mkAttributedQualifier
- ^
-one error found
diff --git a/test/files/neg/t3507.scala b/test/files/neg/t3507.scala
deleted file mode 100644
index 9a8c7c5462..0000000000
--- a/test/files/neg/t3507.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-class A {
- object b {
- object c
- }
- def m = b.c
-}
-
-object Test {
- var a: A = new A // mutable
- val c /*: object _1.b.c forSome { val _1: A } */ = a.m // widening using existential
-
- def mani[T: Manifest](x: T) = ()
- mani/*[object _1.b.c]*/(c) // kaboom in manifestOfType / TreeGen.mkAttributedQualifier
- // --> _1 is not in scope here
-} \ No newline at end of file
diff --git a/test/files/neg/t3604.check b/test/files/neg/t3604.check
deleted file mode 100644
index b07c5c9c71..0000000000
--- a/test/files/neg/t3604.check
+++ /dev/null
@@ -1,7 +0,0 @@
-t3604.scala:3: error: in XML literal: expected closing tag of abbr
- <abbr></div>
- ^
-t3604.scala:3: error: start tag was here: abbr>
- <abbr></div>
- ^
-two errors found
diff --git a/test/files/neg/t3604.scala b/test/files/neg/t3604.scala
deleted file mode 100644
index f890a58e58..0000000000
--- a/test/files/neg/t3604.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-object Main {
- <div>
- <abbr></div>
- { "..." }
- </div>
-}
diff --git a/test/files/neg/t3653.check b/test/files/neg/t3653.check
deleted file mode 100644
index ac6e2ca9dc..0000000000
--- a/test/files/neg/t3653.check
+++ /dev/null
@@ -1,7 +0,0 @@
-t3653.scala:3: error: double definition:
-method x:(implicit x: Int)Int and
-method x:(i: Int)Int at line 2
-have same type after erasure: (x: Int)Int
- def x(implicit x: Int) = 5
- ^
-one error found
diff --git a/test/files/neg/t3653.scala b/test/files/neg/t3653.scala
deleted file mode 100644
index 96cf96008a..0000000000
--- a/test/files/neg/t3653.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-class B {
- def x(i: Int) = 3
- def x(implicit x: Int) = 5
-} \ No newline at end of file
diff --git a/test/files/neg/t742.check b/test/files/neg/t742.check
deleted file mode 100644
index f587948ef1..0000000000
--- a/test/files/neg/t742.check
+++ /dev/null
@@ -1,5 +0,0 @@
-t742.scala:5: error: kinds of the type arguments (Crash._1,Crash._2,Any) do not conform to the expected kinds of the type parameters (type m,type n,type z).
-Crash._1's type parameters do not match type m's expected parameters: type s1 has one type parameter, but type n has two
- type p = mul[_1, _2, Any] // mul[_1, _1, Any] needs -Yrecursion
- ^
-one error found
diff --git a/test/files/neg/t742.scala b/test/files/neg/t742.scala
deleted file mode 100644
index bb1c2f85ea..0000000000
--- a/test/files/neg/t742.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Crash {
- type mul[m[n[s[_], z], z], n[s[_], z], z] = m[n, z]
- type _1[s1[_], z1] = s1[z1]
- type _2[s1[_], z1] = s1[z1]
- type p = mul[_1, _2, Any] // mul[_1, _1, Any] needs -Yrecursion
- // _1[_2, Zero]
- // _2[Zero]
-} \ No newline at end of file
diff --git a/test/files/neg/tailrec-2.check b/test/files/neg/tailrec-2.check
deleted file mode 100644
index ab6733946d..0000000000
--- a/test/files/neg/tailrec-2.check
+++ /dev/null
@@ -1,4 +0,0 @@
-tailrec-2.scala:6: error: could not optimize @tailrec annotated method: it contains a recursive call targetting a supertype
- @annotation.tailrec final def f[B >: A](mem: List[B]): List[B] = (null: Super[A]).f(mem)
- ^
-one error found
diff --git a/test/files/neg/tailrec-2.scala b/test/files/neg/tailrec-2.scala
deleted file mode 100644
index 4388815a06..0000000000
--- a/test/files/neg/tailrec-2.scala
+++ /dev/null
@@ -1,26 +0,0 @@
-sealed abstract class Super[+A] {
- def f[B >: A](mem: List[B]) : List[B]
-}
-// This one should fail, target is a supertype
-class Bop1[+A](val element: A) extends Super[A] {
- @annotation.tailrec final def f[B >: A](mem: List[B]): List[B] = (null: Super[A]).f(mem)
-}
-// These succeed
-class Bop2[+A](val element: A) extends Super[A] {
- @annotation.tailrec final def f[B >: A](mem: List[B]): List[B] = (null: Bop2[A]).f(mem)
-}
-object Bop3 extends Super[Nothing] {
- @annotation.tailrec final def f[B](mem: List[B]): List[B] = (null: Bop3.type).f(mem)
-}
-class Bop4[+A](val element: A) extends Super[A] {
- @annotation.tailrec final def f[B >: A](mem: List[B]): List[B] = Other.f[A].f(mem)
-}
-
-object Other {
- def f[T] : Bop4[T] = error("")
-}
-
-object Bop {
- def m1[A] : Super[A] = error("")
- def m2[A] : Bop2[A] = error("")
-} \ No newline at end of file
diff --git a/test/files/pos/bug0013.scala b/test/files/pos/bug0013.scala
deleted file mode 100644
index 999a2ab61c..0000000000
--- a/test/files/pos/bug0013.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-// covariant linked list
-abstract class M { self =>
-
- type T
- final type selfType = M {type T <: self.T}
- type actualSelfType >: self.type <: selfType
-
- def next: selfType
-
- // I don't understand why this doesn't compile, but that's a separate matter
- // error: method all2 cannot be accessed in M.this.selfType
- // because its instance type => Stream[M{type T <: M.this.selfType#T}]
- // contains a malformed type: M.this.selfType#T
- // def all2: Stream[M {type T <: self.T}] = Stream.cons(self: actualSelfType, next.all2)
-
-
- // compiles successfully
- // def all3: Stream[M {type T <: self.T}] = all3Impl(self: actualSelfType)
- // private def all3Impl(first: M {type T <: self.T}): Stream[M {type T <: self.T}] = Stream.cons(first, all3Impl(first.next))
-
-
-
- def all4: Stream[M {type T <: self.T}] = Unrelated.all4Impl[T](self: actualSelfType)
-}
-
-object Unrelated {
- def all4Impl[U](first: M {type T <: U}): Stream[M {type T <: U}] = Stream.cons(first, all4Impl(first.next))
-
- // compiles successfully
- // def all4Impl[U](first: M {type T <: U}): Stream[M {type T <: U}] = Stream.cons(first, all4Impl[U](first.next))
-} \ No newline at end of file
diff --git a/test/files/pos/bug0095.scala b/test/files/pos/bug0095.scala
deleted file mode 100644
index 71386cf5c7..0000000000
--- a/test/files/pos/bug0095.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-class ParseResult[+T]
-case class Success[+T](t: T) extends ParseResult[T]
-
-abstract class Nonterminal[Output] {
-
- type SubNonterminal = Nonterminal[T] forSome { type T <: Output }
-
- def parse: ParseResult[Output]
-
- def parse1(nts: List[SubNonterminal]): ParseResult[Output] =
- nts match {
- case nt::nts => nt.parse match { case Success(so) => Success(so) }
- case Nil => throw new Error
- }
-}
diff --git a/test/files/pos/bug1974.scala b/test/files/pos/bug1974.scala
deleted file mode 100644
index a0daa13c21..0000000000
--- a/test/files/pos/bug1974.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-object Broken {
- private var map = Map[Class[_], String]()
-
- def addToMap(c : Class[_], s : String) = map += (c -> s)
- def fetch(c : Class[_]) = map(c)
-}
-
-object Works {
- private var map = Map[Class[_], String]()
-
- def addToMap(c : Class[_], s : String) = map += ((c, s))
- def fetch(c : Class[_]) = map(c)
-}
-
-object Works2 {
- private var map = Map[Class[_], String]()
-
- def addToMap(c : Class[_], s : String) = map += ((c : Class[_]) -> s)
- def fetch(c : Class[_]) = map(c)
-} \ No newline at end of file
diff --git a/test/files/pos/bug261-ab.scala b/test/files/pos/bug261-ab.scala
deleted file mode 100644
index 80699e692c..0000000000
--- a/test/files/pos/bug261-ab.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-trait A { val foo: String = "A" }
-trait B {
- private val foo: String = "B"
- def f = println(foo)
-}
-object Test extends Application with B with A {
- println(foo) // prints "A", as expected
- f // prints "B", as expected
-} \ No newline at end of file
diff --git a/test/files/pos/bug261-ba.scala b/test/files/pos/bug261-ba.scala
deleted file mode 100644
index c66a68d101..0000000000
--- a/test/files/pos/bug261-ba.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-trait B {
- private val foo: String = "B"
- def f = println(foo)
-}
-trait A { val foo: String = "A" }
-object Test extends Application with B with A {
- println(foo) // prints "A", as expected
- f // prints "B", as expected
-} \ No newline at end of file
diff --git a/test/files/pos/bug3234.flags b/test/files/pos/bug3234.flags
deleted file mode 100644
index c9cefdc4b9..0000000000
--- a/test/files/pos/bug3234.flags
+++ /dev/null
@@ -1 +0,0 @@
--Yinline -Xfatal-warnings \ No newline at end of file
diff --git a/test/files/pos/bug3234.scala b/test/files/pos/bug3234.scala
deleted file mode 100644
index 1553f1fa05..0000000000
--- a/test/files/pos/bug3234.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-trait Trait1 {
- // need more work before this one works
- // @inline
- def foo2(n: Int) = n*n
-}
-
-trait Trait2 {
- @inline def foo3(n: Int) = 1
-}
-
-class Base extends Trait1 {
- @inline def foo(n: Int) = n
-}
-
-object Test extends Base with Trait2 {
- def main(args: Array[String]) = {
- println(foo(42) + foo2(11) + foo3(2))
- }
-} \ No newline at end of file
diff --git a/test/files/pos/bug3440.scala b/test/files/pos/bug3440.scala
deleted file mode 100644
index 026abfea1f..0000000000
--- a/test/files/pos/bug3440.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-object test {
- abstract class SampleFormat1 {
- def readerFactory: Any
- }
-
- case object Int8 extends SampleFormat1 {
- def readerFactory = error("")
- }
- case object Int16 extends SampleFormat1 {
- def readerFactory = error("")
- }
-
- (new {}: Any) match {
- case 8 => Int8
- case 16 => Int16
- case _ => error("")
- }
-} \ No newline at end of file
diff --git a/test/files/pos/bug3521/DoubleValue.java b/test/files/pos/bug3521/DoubleValue.java
deleted file mode 100644
index e8c093890b..0000000000
--- a/test/files/pos/bug3521/DoubleValue.java
+++ /dev/null
@@ -1,7 +0,0 @@
-import java.lang.annotation.*;
-
-@Retention(RetentionPolicy.RUNTIME)
-@Target(ElementType.FIELD)
-public @interface DoubleValue {
- double value();
-} \ No newline at end of file
diff --git a/test/files/pos/bug3521/a.scala b/test/files/pos/bug3521/a.scala
deleted file mode 100644
index 94bb451fc3..0000000000
--- a/test/files/pos/bug3521/a.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-class Test {
- @DoubleValue(-0.05)
- var a = 0
-} \ No newline at end of file
diff --git a/test/files/pos/bug3570.scala b/test/files/pos/bug3570.scala
deleted file mode 100644
index 8921f83b2a..0000000000
--- a/test/files/pos/bug3570.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-class test {
- object Break extends Throwable
- def break = throw Break
- def block(x: => Unit) {
- try { x } catch { case e: Break.type => }
- }
-}
diff --git a/test/files/pos/t1263/Test.java b/test/files/pos/t1263/Test.java
index 6ca88c21a1..0eb43e881a 100644
--- a/test/files/pos/t1263/Test.java
+++ b/test/files/pos/t1263/Test.java
@@ -5,7 +5,7 @@ import java.rmi.RemoteException;
import test.Map;
public class Test implements Map<String, String> {
- public Map.MapTo plus(String o) {
+ public Map<String, String>.MapTo plus(String o) {
return null;
}
diff --git a/test/files/pos/t2133.scala b/test/files/pos/t2133.scala
deleted file mode 100644
index c74d0a4bbf..0000000000
--- a/test/files/pos/t2133.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-trait Foo {
- object bar {
- private[this] def fn() = 5
- }
-}
-
-trait Foo2 {
- object bip {
- def fn() = 10
- }
-}
-
-class Bob extends AnyRef with Foo with Foo2 {
- import bip._
- import bar._
-
- def go() = fn()
-}
diff --git a/test/files/pos/t2331.scala b/test/files/pos/t2331.scala
deleted file mode 100644
index 9a15b5c2a9..0000000000
--- a/test/files/pos/t2331.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-trait C {
- def m[T]: T
-}
-
-object Test {
- val o /*: C --> no crash*/ = new C {
- def m[T]: Nothing /*: T --> no crash*/ = error("omitted")
- }
-
- o.m[Nothing]
-} \ No newline at end of file
diff --git a/test/files/pos/t2413/TestJava.java b/test/files/pos/t2413/TestJava.java
deleted file mode 100644
index 252c01fbc0..0000000000
--- a/test/files/pos/t2413/TestJava.java
+++ /dev/null
@@ -1,7 +0,0 @@
-package pack;
-
-public class TestJava {
- protected String repeatParam(String ... items) {
- return "nothing";
- }
-}
diff --git a/test/files/pos/t2413/TestScalac.scala b/test/files/pos/t2413/TestScalac.scala
deleted file mode 100644
index 098e852dd7..0000000000
--- a/test/files/pos/t2413/TestScalac.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-import pack.TestJava
-
-class Foo extends TestJava {
-
- // THIS METHOD YIELDS TO CRASH
-/* def foomethod : Option[String] => Unit = {
- case None =>
- val path = repeatParam("s","a","b","c")
- ()
- case Some(error) =>
- ()
- }
-
- // THIS IS OK
- def foomethod2 : String = repeatParam("s","a");
-
- // THIS IS OK
- val aVal = repeatParam("1","2","3") */
-
- // THIS YIELDS TO CRASH
- for (a <- 1 to 4 ; anotherVal = repeatParam("1","2","3"))
- yield anotherVal
-}
diff --git a/test/files/pos/t3174.scala b/test/files/pos/t3174.scala
deleted file mode 100755
index c3d90a4946..0000000000
--- a/test/files/pos/t3174.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-object test {
- def method() {
- class Foo extends AnyRef {
- object Color {
- object Blue
- }
-
- class Board {
- val grid = Color.Blue
- }
- }
- new Foo
- }
- }
diff --git a/test/files/pos/t3174b.scala b/test/files/pos/t3174b.scala
deleted file mode 100755
index 4df1bfe837..0000000000
--- a/test/files/pos/t3174b.scala
+++ /dev/null
@@ -1,12 +0,0 @@
-trait Foo[X] { def foo : Map[String,Foo[X]] }
-
-object Test {
- def f[T]() : Foo[T] = {
- class Anon extends Foo[T] {
- var foo: Map[String, Foo[T]] = Map[String,Foo[T]]()
- //def foo = Map[String,Foo[T]]()
- //def foo_=(x: Map[String,Foo[T]]) {}
- }
- new Anon
- }
-}
diff --git a/test/files/pos/t3249/Test.java b/test/files/pos/t3249/Test.java
deleted file mode 100644
index 4cc7cb2ab5..0000000000
--- a/test/files/pos/t3249/Test.java
+++ /dev/null
@@ -1,5 +0,0 @@
-public class Test {
- public static void meh() {
- new A<Integer>().f();
- }
-} \ No newline at end of file
diff --git a/test/files/pos/t3249/a.scala b/test/files/pos/t3249/a.scala
deleted file mode 100644
index 0394464549..0000000000
--- a/test/files/pos/t3249/a.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-class A[U] { def f[T] = { class X extends A[T] } }
-
-
-/*
-$ scalac a.scala
-$ javac -cp .:$SCALA_HOME/lib/scala-library.jar -Xprint 'A$X$1'
-
- public class X$1 extends A<java.lang.Object> implements scala.ScalaObject {
- public X$1(A<U> null);
- }
-*/ \ No newline at end of file
diff --git a/test/files/pos/t3374.scala b/test/files/pos/t3374.scala
deleted file mode 100644
index 4c0293181d..0000000000
--- a/test/files/pos/t3374.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-trait Parent {
- type Test[A, H[B <: A]]
-}
-trait Sub extends Parent {
- type Test[AS, HS[B <: AS]] = AS
-} \ No newline at end of file
diff --git a/test/files/pos/t3477.scala b/test/files/pos/t3477.scala
deleted file mode 100644
index 660aa55736..0000000000
--- a/test/files/pos/t3477.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-class J3 {
- def f[K, K1 >: K, V](x: Map[K1, V]): Map[K, V] = error("")
-}
-
-object Test {
- (new J3).f(Map[Int, Int]())
-} \ No newline at end of file
diff --git a/test/files/pos/t3486/JTest.java b/test/files/pos/t3486/JTest.java
deleted file mode 100644
index 0bf388b72d..0000000000
--- a/test/files/pos/t3486/JTest.java
+++ /dev/null
@@ -1,3 +0,0 @@
-public class JTest<A> extends T2<A> {
- public A m( A a ) { return a; }
-} \ No newline at end of file
diff --git a/test/files/pos/t3486/test.scala b/test/files/pos/t3486/test.scala
deleted file mode 100644
index 544232b0d1..0000000000
--- a/test/files/pos/t3486/test.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-trait Test[A] {
- def m( a: A ): A
- def specified(a:A):A = a
-}
-
-abstract class T2[A] extends Test[A] \ No newline at end of file
diff --git a/test/files/pos/t3494.scala b/test/files/pos/t3494.scala
deleted file mode 100644
index 35a4bcde5d..0000000000
--- a/test/files/pos/t3494.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-object Test {
- def f[T](xs: T*) = ()
-
- val x = "abc"
-
- f[x.type](x)
-} \ No newline at end of file
diff --git a/test/files/pos/t3568.scala b/test/files/pos/t3568.scala
deleted file mode 100755
index c8e3fcc4be..0000000000
--- a/test/files/pos/t3568.scala
+++ /dev/null
@@ -1,46 +0,0 @@
-import scala.annotation._
-import scala.annotation.unchecked._
-import scala.collection._
-
-
-package object buffer {
- val broken = new ArrayVec2() // commenting out this line causes the file to compile.
-
- val works = Class.forName("buffer.ArrayVec2").newInstance().asInstanceOf[ArrayVec2]
-}
-
-package buffer {
- object Main {
- // ArrayVec2 can be compiled, instantiated and used.
- def main(args: Array[String]) { println(works) }
- }
-
- trait ElemType { type Element; type Component <: ElemType }
- trait Float1 extends ElemType { type Element = Float; type Component = Float1}
- class Vec2 extends ElemType { type Element = Vec2; type Component = Float1 }
-
- abstract class BaseSeq[T <: ElemType, E]
- extends IndexedSeq[E] with IndexedSeqOptimized[E, IndexedSeq[E]] {
- def length = 1
- def apply(i: Int) :E
- }
-
- abstract class GenericSeq[T <: ElemType] extends BaseSeq[T, T#Element]
- trait DataArray[T <: ElemType] extends BaseSeq[T, T#Element]
- trait DataView[T <: ElemType] extends BaseSeq[T, T#Element]
- abstract class BaseFloat1 extends BaseSeq[Float1, Float]
-
- class ArrayFloat1 extends BaseFloat1 with DataArray[Float1] {
- def apply(i: Int) :Float = 0f
- }
-
- class ViewFloat1 extends BaseFloat1 with DataView[Float1] {
- def apply(i: Int) :Float = 0f
- }
-
- class ArrayVec2(val backingSeq: ArrayFloat1)
- extends GenericSeq[Vec2] with DataArray[Vec2] {
- def this() = this(new ArrayFloat1)
- def apply(i: Int) :Vec2 = null
- }
-}
diff --git a/test/files/pos/t3622/test/AsyncTask.java b/test/files/pos/t3622/test/AsyncTask.java
deleted file mode 100644
index cfcea3fe1a..0000000000
--- a/test/files/pos/t3622/test/AsyncTask.java
+++ /dev/null
@@ -1,5 +0,0 @@
-package test;
-
-public abstract class AsyncTask<Params, Progress, Result> {
- protected abstract Result doInBackground(Params... args);
-} \ No newline at end of file
diff --git a/test/files/pos/t3622/test/MyAsyncTask.java b/test/files/pos/t3622/test/MyAsyncTask.java
deleted file mode 100644
index 9ef4947052..0000000000
--- a/test/files/pos/t3622/test/MyAsyncTask.java
+++ /dev/null
@@ -1,9 +0,0 @@
-package test;
-
-public abstract class MyAsyncTask extends AsyncTask<String, String, String> {
- protected abstract String doInBackground1(String[] args);
- @Override
- protected String doInBackground(String... args) {
- return doInBackground1(new String[]{"dummy"});
- }
-} \ No newline at end of file
diff --git a/test/files/pos/t3622/test/Test.scala b/test/files/pos/t3622/test/Test.scala
deleted file mode 100644
index fb82c581f9..0000000000
--- a/test/files/pos/t3622/test/Test.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-package test
-
-class Test extends MyAsyncTask {
- protected[test] def doInBackground1(args: Array[String]): String = ""
-} \ No newline at end of file
diff --git a/test/files/run/bitsets-msil.check b/test/files/run/bitsets-msil.check
index b187571bff..9fefa3125e 100644
--- a/test/files/run/bitsets-msil.check
+++ b/test/files/run/bitsets-msil.check
@@ -1,23 +1,23 @@
-ms0 = BitSet(2)
-ms1 = BitSet(2)
-ms2 = BitSet(2)
+ms0 = Set(2)
+ms1 = Set(2)
+ms2 = Set(2)
mb0 = False
mb1 = True
mb2 = False
xs0 = List(2)
xs1 = List(2)
xs2 = List(2)
-ma0 = List(2)
-ma1 = List(2)
-ma2 = List(2)
-mi0 = BitSet(2)
-mi1 = BitSet(2)
-mi2 = BitSet(2)
+ma0 = List(4)
+ma1 = List(4)
+ma2 = List(4)
+mi0 = Set(2)
+mi1 = Set(2)
+mi2 = Set(2)
-is0 = BitSet()
-is1 = BitSet()
-is2 = BitSet(2)
-is3 = BitSet()
+is0 = Set()
+is1 = Set()
+is2 = Set(2)
+is3 = Set()
ib0 = False
ib1 = False
ib2 = True
@@ -26,8 +26,8 @@ ys0 = List()
ys1 = List()
ys2 = List(2)
ys3 = List()
-ia0 = List()
-ia1 = List()
-ia2 = List(2)
+ia0 = List(0)
+ia1 = List(0)
+ia2 = List(4)
ia3 = List()
diff --git a/test/files/run/bug1766.scala b/test/files/run/bug1766.scala
deleted file mode 100644
index 901f9ae664..0000000000
--- a/test/files/run/bug1766.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-object Test extends Application {
-
- class C(s: String) {
-
- def this(i: Int) = this("bar")
-
- def f = {
- val v: { def n: Int } = new { val n = 3 }
- v.n
- }
-
- }
-
- new C("foo").f
-
-}
diff --git a/test/files/run/bug2106.flags b/test/files/run/bug2106.flags
deleted file mode 100644
index eb4d19bcb9..0000000000
--- a/test/files/run/bug2106.flags
+++ /dev/null
@@ -1 +0,0 @@
--optimise \ No newline at end of file
diff --git a/test/files/run/bug2106.scala b/test/files/run/bug2106.scala
deleted file mode 100644
index e8124dabab..0000000000
--- a/test/files/run/bug2106.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-class A extends Cloneable {
- @inline final def foo = clone()
-}
-
-object Test {
- val x = new A
- def main(args: Array[String]) = x.foo
-}
diff --git a/test/files/run/bug3518.scala b/test/files/run/bug3518.scala
deleted file mode 100644
index 033cc19548..0000000000
--- a/test/files/run/bug3518.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-object Test {
- val r1 = 1.0 to 10.0 by 0.5
- val r2 = 1.0 to 1.0 by 1.0
- val r3 = 10.0 to 1.0 by -0.5
- val r4 = 1.0 until 1.0 by 1.0
- val r5 = 1 to 100 by 2
-
- def main(args: Array[String]): Unit = {
- assert(r3 forall (r1 contains _))
- assert(r1 forall (r3 contains _))
- assert(r2.size == 1)
- assert(r4.isEmpty)
- assert(List(1,3,5,97,99) forall (r5 contains _))
- assert(List(2,4,6,98,100) forall (x => !r5.contains(x)))
- }
-}
diff --git a/test/files/run/bug3540.scala b/test/files/run/bug3540.scala
deleted file mode 100644
index 5ffacb5dff..0000000000
--- a/test/files/run/bug3540.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-object Test {
- def main(args: Array[String]): Unit = {
- assert(List.iterate(List(1,2,3), 4)(_.tail).last.isEmpty)
- assert(Stream.iterate(Stream(1,2,3), 4)(_.tail).last.isEmpty)
- assert(Array.iterate(Array(1,2,3), 4)(_.tail).last.isEmpty)
- }
-}
diff --git a/test/files/run/bug3616.check b/test/files/run/bug3616.check
deleted file mode 100644
index f31e21baff..0000000000
--- a/test/files/run/bug3616.check
+++ /dev/null
@@ -1 +0,0 @@
-Fruit.ValueSet(A, B, C)
diff --git a/test/files/run/bug3616.scala b/test/files/run/bug3616.scala
deleted file mode 100644
index 777b97f9ab..0000000000
--- a/test/files/run/bug3616.scala
+++ /dev/null
@@ -1,12 +0,0 @@
-object X extends Enumeration {
- val Y = Value
-}
-object Fruit extends Enumeration {
- val x = X.Y
- val A,B,C = Value
-}
-object Test {
- def main(args: Array[String]): Unit = {
- println(Fruit.values)
- }
-}
diff --git a/test/files/run/exceptions-nest.check b/test/files/run/exceptions-nest.check
deleted file mode 100644
index ae66da0a99..0000000000
--- a/test/files/run/exceptions-nest.check
+++ /dev/null
@@ -1,12 +0,0 @@
-2
-23
-2
-5
-2
-4
-OK
-4
-OK
-10
-1
-()
diff --git a/test/files/run/exceptions-nest.scala b/test/files/run/exceptions-nest.scala
deleted file mode 100644
index 40b00988e4..0000000000
--- a/test/files/run/exceptions-nest.scala
+++ /dev/null
@@ -1,139 +0,0 @@
-object Test extends Application {
-
- println(test1)
- println(test2)
- println(test3)
- println(test4)
- println(test5)
- try { println(test6) } catch { case _ => println("OK") }
- println(test7)
- try { println(test8) } catch { case _ => println("OK") }
- println(test9)
- println(test10)
- println(test11)
-
- def test1 = {
- var x = 1
- try {
- x = 2
- } catch {
- case _: NullPointerException => x = 3
- case _ => x = 4
- }
- x
- }
-
- def test2 = {
- var x = 1
- try {
- x = 2
- try {
- x = 21
- } catch {
- case _ => x = 22
- }
- x = 23
- } catch {
- case _: NullPointerException => x = 3
- case _ => x = 4
- }
- x
- }
-
- def test3 = {
- var x = 1
- try {
- try{x = 2} catch { case _ => x = 4 }
- } catch {
- case _: NullPointerException => x = 3
- case _ => x = 4
- }
- x
- }
-
- def test4 = {
- var x = 1
- try {
- x = 2
- } catch {
- case _: NullPointerException => x = 3
- case _ => x = 4
- }
- try {
- x = 5
- } catch {
- case _: NullPointerException => x = 6
- }
- x
- }
-
- def test5 = {
- var x = 1
- try {
- x = 2
- } catch {
- case _: NullPointerException => try { x = 3 } catch { case f => throw f }
- case _ => x = 4; try { x = 41 } catch { case _: Exception => x = 42 }; x = 43
- }
- x
- }
-
- def test6: Int = {
- var x = 1
- try {
- x = 2
- (null: String).toString
- } catch {
- case e: NullPointerException =>
- throw e
- case _ =>
- x = 3
- return 1000
- } finally {
- x = 4
- println(x)
- }
- x
- }
-
- def test7 = {
- var x = 1
- try {
- x = 2
- } finally {
- try {
- x = 4
- } catch {
- case _ => x = 5
- }
- }
- x
- }
-
- def test8 = {
- var x = 1
- try {
- throw new NullPointerException
- } catch {
- case e => throw e
- }
- x
- }
-
- def test9 = {
- try { "" match {
- case s: String => 10
- }} catch { case _ => 20 }
- }
-
- var x10 = 1
- def test10: Int = {
- try { 1 }
- catch { case e if (x10 == 1) => 1 }
- }
-
- def test11 {
- try { () }
- catch { case e => () }
- }
-}
diff --git a/test/files/run/names-defaults.scala b/test/files/run/names-defaults.scala
index 8ddfcd950d..f197d2ff11 100644
--- a/test/files/run/names-defaults.scala
+++ b/test/files/run/names-defaults.scala
@@ -345,11 +345,6 @@ object Test extends Application {
(new t3338.Test).a
- // subclassing and defaults in both class constructors
- class CBLAH(val x: Int = 1)
- class DBLAH(val y: String = "2") extends CBLAH()
- (new DBLAH())
-
// DEFINITIONS
def test1(a: Int, b: String) = println(a +": "+ b)
diff --git a/test/files/run/slice-strings.scala b/test/files/run/slice-strings.scala
deleted file mode 100644
index 129314387a..0000000000
--- a/test/files/run/slice-strings.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-object Test {
- def cmp(x1: String) = {
- val x2 = x1.toList
-
- -10 to 10 foreach { i =>
- assert(x1.take(i) == x2.take(i).mkString)
- assert(x1.drop(i) == x2.drop(i).mkString)
- assert(x1.takeRight(i) == x2.takeRight(i).mkString)
- assert(x1.dropRight(i) == x2.dropRight(i).mkString)
- }
- for (idx1 <- -3 to 3 ; idx2 <- -3 to 3) {
- assert(x1.slice(idx1, idx2) == x2.slice(idx1, idx2).mkString)
- }
- }
-
- def main(args: Array[String]): Unit = {
- cmp("abcde")
- }
-}
diff --git a/test/files/run/t0432.scala b/test/files/run/t0432.scala
deleted file mode 100644
index 8ba9015d81..0000000000
--- a/test/files/run/t0432.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-object Test {
- type valueType = { def value: this.type }
-
- class StringValue(x: String) {
- def value: this.type = this
- }
-
- def m(x: valueType) = x.value
-
- val s = new StringValue("hei")
-
- def main(args: Array[String]) {
- m(s)
- }
-}
diff --git a/test/files/run/t3493.scala b/test/files/run/t3493.scala
deleted file mode 100644
index aafe7a3a4a..0000000000
--- a/test/files/run/t3493.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-
-
-
-
-object Test {
-
- def main(args: Array[String]) {
- import scala.collection.immutable._
- val x = TreeSet("a", "b", "c", "d")
- val x2 = x + "e"
- assert(x2.toString == "TreeSet(a, b, c, d, e)")
- assert(x2.toString == runtime.ScalaRunTime.stringOf(x2).trim)
- }
-
-}
diff --git a/test/files/run/t3496.scala b/test/files/run/t3496.scala
deleted file mode 100644
index e1aa032ab1..0000000000
--- a/test/files/run/t3496.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-
-
-
-
-// ticket #3496
-object Test {
-
- def main(args: Array[String]) {
- val s = Stream.from(1)
- s.take(5)
- s.drop(5)
- s.splitAt(5)
- }
-
-}
diff --git a/test/files/run/t3502.scala b/test/files/run/t3502.scala
deleted file mode 100644
index cc78e54c86..0000000000
--- a/test/files/run/t3502.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-
-
-
-
-
-// ticket #3502
-object Test {
-
- object GeneratePrimeFactorsLazy extends (Int => List[Int]) {
- override def apply(n:Int) = {
- val s = Stream.range(2, n / 2).filter(n % _ == 0)
- //val s = for (i <- Stream.range(2, n / 2); if n % i == 0) yield i
- s.headOption.map(x => x :: apply(n / x)).getOrElse(List(n))
- }
- }
-
- def main(args:Array[String]) {
- // a prime number
- //val num = 623456789
- val num = 2796203
- assert(GeneratePrimeFactorsLazy(num) == List(num))
- }
-
-}
diff --git a/test/files/run/t3508.scala b/test/files/run/t3508.scala
deleted file mode 100644
index 01d976ba0d..0000000000
--- a/test/files/run/t3508.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-import collection.immutable._
-
-
-// ticket #3508
-object Test {
- def main(args: Array[String]) {
- assert(Stream.tabulate(123)(_ + 1).toList == List.tabulate(123)(_ + 1))
- }
-}
diff --git a/test/files/run/t3511.scala b/test/files/run/t3511.scala
deleted file mode 100644
index 30757b1a66..0000000000
--- a/test/files/run/t3511.scala
+++ /dev/null
@@ -1,36 +0,0 @@
-
-
-
-import scala.collection.immutable._
-
-
-// ticket #3511
-object Test {
-
- def main(args: Array[String]) {
- assert(Stream.from(0).view.force.take(5) == List(0, 1, 2, 3, 4))
-
- val s = Stream.from(0)
- val smap = s.view.map(_ * 2).force.take(5)
- assert(smap == List(0, 2, 4, 6, 8))
-
- val sfilter = s.view.filter(_ % 2 == 0).force.take(5)
- assert(sfilter == List(0, 2, 4, 6, 8))
-
- val sflatmap = s.view.flatMap(n => List(n, n * 2)).force.take(6)
- assert(sflatmap == List(0, 0, 1, 2, 2, 4))
-
- val stakewhile = s.view.takeWhile(_ < 10).force
- assert(stakewhile == List.range(0, 10))
-
- val szip = s.view.zip(s.map(_ / 2)).force.take(5)
- assert(szip == List((0, 0), (1, 0), (2, 1), (3, 1), (4, 2)))
-
- val szipall = s.view.zipAll(List(0, 1, 2), 0, 0).force.take(5)
- assert(szipall == List((0, 0), (1, 1), (2, 2), (3, 0), (4, 0)))
-
- val spatch = s.view.patch(1, List(5, 5, 5), 5).force.take(5)
- assert(spatch == List(0, 5, 5, 5, 6))
- }
-
-}
diff --git a/test/files/run/t3580.scala b/test/files/run/t3580.scala
deleted file mode 100644
index 50ff6c4551..0000000000
--- a/test/files/run/t3580.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-
-
-
-
-
-object Test {
-
- class Empty extends Traversable[Nothing] {
- def foreach[U](f: Nothing => U) {}
- }
-
- def main(args: Array[String]) {
- val t = new Empty
- t.toStream
- }
-
-}
diff --git a/test/files/run/t3603.scala b/test/files/run/t3603.scala
deleted file mode 100644
index a89cb7080a..0000000000
--- a/test/files/run/t3603.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-
-
-
-object Test {
-
- def main(args: Array[String]) {
- import collection.immutable._
-
- val intmap = IntMap(1 -> 1, 2 -> 2)
- val intres = intmap.map { case (a, b) => (a, b.toString) }
- assert(intres.isInstanceOf[IntMap[_]])
-
- val longmap = LongMap(1L -> 1, 2L -> 2)
- val longres = longmap.map { case (a, b) => (a, b.toString) }
- assert(longres.isInstanceOf[LongMap[_]])
- }
-
-}
diff --git a/test/files/run/t3645.scala b/test/files/run/t3645.scala
deleted file mode 100644
index af2543377b..0000000000
--- a/test/files/run/t3645.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-object Test {
- def main(args: Array[String]) {
- val s = Stream.tabulate(5)(x => x+2)
- assert( s.toList == List(2,3,4,5,6) )
- }
-}
diff --git a/test/files/run/weakconform.scala b/test/files/run/weakconform.scala
deleted file mode 100755
index 1ea81c9f64..0000000000
--- a/test/files/run/weakconform.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object Test extends Application {
- val x: Float = 10/3
- assert(x == 3.0)
-}
diff --git a/test/files/run/xml-loop-bug.scala b/test/files/run/xml-loop-bug.scala
deleted file mode 100644
index 3894779f7c..0000000000
--- a/test/files/run/xml-loop-bug.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-object Test {
- def main(args: Array[String]): Unit = {
- scala.tools.nsc.io.NullPrintStream.setOutAndErr()
- scala.xml.parsing.ConstructingParser.fromSource(scala.io.Source.fromString("<!DOCTYPE xmeml SYSTEM> <xmeml> <sequence> </sequence> </xmeml> "), true).document.docElem
- }
-}
diff --git a/test/pending/continuations-neg/t3628.check b/test/pending/continuations-neg/t3628.check
deleted file mode 100644
index 4df94cdfcc..0000000000
--- a/test/pending/continuations-neg/t3628.check
+++ /dev/null
@@ -1,3 +0,0 @@
-ending/continuations-neg/t3628.scala:4: error: not found: type Actor
- val impl: Actor = actor {
- ^
diff --git a/test/pending/continuations-neg/t3628.scala b/test/pending/continuations-neg/t3628.scala
deleted file mode 100644
index c56e7752c4..0000000000
--- a/test/pending/continuations-neg/t3628.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-import scala.actors.Actor._
-
-object Test {
- val impl: Actor = actor {
- loop {
- react {
- case 1 => impl ! 2
- }
- }
- }
-}
diff --git a/test/pending/continuations-pos/t3620.scala b/test/pending/continuations-pos/t3620.scala
deleted file mode 100644
index 8496ae2858..0000000000
--- a/test/pending/continuations-pos/t3620.scala
+++ /dev/null
@@ -1,73 +0,0 @@
-import scala.collection.mutable.HashMap
-import scala.util.continuations._
-
-object Test extends Application {
-
- class Store[K,V] {
-
- trait Waiting {
- def key: K
- def inform(value: V): Unit
- }
-
- private val map = new HashMap[K, V]
- private var waiting: List[Waiting] = Nil
-
- def waitFor(k: K, f: (V => Unit)) {
- map.get(k) match {
- case Some(v) => f(v)
- case None => {
- val w = new Waiting {
- def key = k
- def inform(v: V) = f(v)
- }
- waiting = w :: waiting
- }
- }
- }
-
-
- def add(key: K, value: V) {
- map(key) = value
- val p = waiting.partition(_.key == key)
- waiting = p._2
- p._1.foreach(_.inform(value))
- }
-
- def required(key: K) = {
- shift {
- c: (V => Unit) => {
- waitFor(key, c)
- }
- }
- }
-
- def option(key: Option[K]) = {
- shift {
- c: (Option[V] => Unit) => {
- key match {
- case Some(key) => waitFor(key, (v: V) => c(Some(v)))
- case None => c(None)
- }
-
- }
- }
- }
-
- }
-
- val store = new Store[String, Int]
-
- def test(p: Option[String]): Unit = {
- reset {
- // uncommenting the following two lines makes the compiler happy!
-// val o = store.option(p)
-// println(o)
- val i = store.option(p).getOrElse(1)
- println(i)
- }
- }
-
- test(Some("a"))
-
-}
diff --git a/test/pending/jvm/serialization.check b/test/pending/jvm/serialization.check
new file mode 100644
index 0000000000..397578bcba
--- /dev/null
+++ b/test/pending/jvm/serialization.check
@@ -0,0 +1,198 @@
+x0 = List(1, 2, 3)
+y0 = List(1, 2, 3)
+x0 eq y0: false, y0 eq x0: false
+x0 equals y0: true, y0 equals x0: true
+
+x1 = List()
+y1 = List()
+x1 eq y1: true, y1 eq x1: true
+
+x2 = None
+y2 = None
+x2 eq y2: true, y2 eq x2: true
+
+x3 = Array[1,2,3]
+y3 = Array[1,2,3]
+arrayEquals(x3, y3): true
+
+x4 = <na>
+y4 = <na>
+x4(2): 4 - y4(2): 4
+
+x5 = 'hello
+y5 = 'hello
+x5 eq y5: true, y5 eq x5: true
+x5 equals y5: true, y5 equals x5: true
+
+x6 = (BannerLimit,12345)
+y6 = (BannerLimit,12345)
+x6 eq y6: false, y6 eq x6: false
+x6 equals y6: true, y6 equals x6: true
+
+x7 = RoundingMode
+y7 = RoundingMode
+x7 eq y7: true, y7 eq x7: true
+x7 equals y7: true, y7 equals x7: true
+
+x8 = WeekDay
+y8 = WeekDay
+x8 eq y8: true, y8 eq x8: true
+x8 equals y8: true, y8 equals x8: true
+
+x9 = UP
+y9 = UP
+x9 eq y9: true, y9 eq x9: true
+x9 equals y9: true, y9 equals x9: true
+
+x10 = WeekDay(0)
+y10 = WeekDay(0)
+x10 eq y10: true, y10 eq x10: true
+x10 equals y10: true, y10 equals x10: true
+
+x9 eq x10: false, x10 eq x9: false
+x9 equals x10: false, x10 equals x9: false
+x9 eq y10: false, y10 eq x9: false
+x9 equals y10: false, y10 equals x9: false
+
+x = List((buffers,20), (layers,2), (title,3))
+y = List((buffers,20), (layers,2), (title,3))
+x equals y: true, y equals x: true
+
+x = Map(2 -> C, 1 -> B, 0 -> A)
+y = Map(2 -> C, 1 -> B, 0 -> A)
+x equals y: true, y equals x: true
+
+x = Map(buffers -> 20, layers -> 2, title -> 3)
+y = Map(buffers -> 20, layers -> 2, title -> 3)
+x equals y: true, y equals x: true
+
+x = Set(2, 3)
+y = Set(2, 3)
+x equals y: true, y equals x: true
+
+x = Set(5, 3)
+y = Set(5, 3)
+x equals y: true, y equals x: true
+
+x = Queue(a, b, c)
+y = Queue(a, b, c)
+x equals y: true, y equals x: true
+
+x = Stack(a, b, c)
+y = Stack(a, b, c)
+x equals y: true, y equals x: true
+
+x = Map(42 -> FortyTwo)
+y = Map(42 -> FortyTwo)
+x equals y: true, y equals x: true
+
+x = TreeSet(0, 2)
+y = TreeSet(0, 2)
+x equals y: true, y equals x: true
+
+x = Vector(1, 2, 3)
+y = Vector(1, 2, 3)
+x equals y: true, y equals x: true
+
+x = ArrayBuffer(one, two)
+y = ArrayBuffer(one, two)
+x equals y: true, y equals x: true
+
+x = Map(title -> 3, buffers -> 20, layers -> 2)
+y = Map(title -> 3, buffers -> 20, layers -> 2)
+x equals y: true, y equals x: true
+
+x = Set(0, 8, 9)
+y = Set(0, 8, 9)
+x equals y: true, y equals x: true
+
+x = Set(layers, buffers, title)
+y = Set(layers, buffers, title)
+x equals y: true, y equals x: true
+
+x = LinkedList(2, 3)
+y = LinkedList(2, 3)
+x equals y: true, y equals x: true
+
+x = Queue(20, 2, 3)
+y = Queue(20, 2, 3)
+x equals y: true, y equals x: true
+
+x = Stack(3, 2, 20)
+y = Stack(3, 2, 20)
+x equals y: true, y equals x: true
+
+x = ListBuffer(white, black)
+y = ListBuffer(white, black)
+x equals y: true, y equals x: true
+
+x = History((Feed,hello))
+y = History((Feed,hello))
+x equals y: true, y equals x: true
+
+x = <html><title>title</title><body></body></html>
+y = <html><title>title</title><body></body></html>
+x equals y: true, y equals x: true
+
+x = <html>
+ <body>
+ <table cellpadding="2" cellspacing="0">
+ <tr>
+ <th>Last Name</th>
+ <th>First Name</th>
+ </tr>
+ <tr>
+ <td> Tom </td>
+ <td> 20 </td>
+ </tr><tr>
+ <td> Bob </td>
+ <td> 22 </td>
+ </tr><tr>
+ <td> James </td>
+ <td> 19 </td>
+ </tr>
+ </table>
+ </body>
+ </html>
+y = <html>
+ <body>
+ <table cellpadding="2" cellspacing="0">
+ <tr>
+ <th>Last Name</th>
+ <th>First Name</th>
+ </tr>
+ <tr>
+ <td> Tom </td>
+ <td> 20 </td>
+ </tr><tr>
+ <td> Bob </td>
+ <td> 22 </td>
+ </tr><tr>
+ <td> James </td>
+ <td> 19 </td>
+ </tr>
+ </table>
+ </body>
+ </html>
+x equals y: true, y equals x: true
+
+x = Tim
+y = Tim
+x equals y: true, y equals x: true
+
+x = Bob
+y = Bob
+x equals y: true, y equals x: true
+
+x = John
+y = John
+x equals y: true, y equals x: true
+
+x = Bill
+y = Bill
+x equals y: true, y equals x: true
+
+x = Paul
+y = Paul
+x equals y: true, y equals x: true
+
diff --git a/test/pending/jvm/serialization.scala b/test/pending/jvm/serialization.scala
new file mode 100644
index 0000000000..ff9413ae4b
--- /dev/null
+++ b/test/pending/jvm/serialization.scala
@@ -0,0 +1,411 @@
+//############################################################################
+// Serialization
+//############################################################################
+
+import java.lang.System
+
+object Serialize {
+ @throws(classOf[java.io.IOException])
+ def write[A](o: A): Array[Byte] = {
+ val ba = new java.io.ByteArrayOutputStream(512)
+ val out = new java.io.ObjectOutputStream(ba)
+ out.writeObject(o)
+ out.close()
+ ba.toByteArray()
+ }
+ @throws(classOf[java.io.IOException])
+ @throws(classOf[ClassNotFoundException])
+ def read[A](buffer: Array[Byte]): A = {
+ val in =
+ new java.io.ObjectInputStream(new java.io.ByteArrayInputStream(buffer))
+ in.readObject().asInstanceOf[A]
+ }
+ def check[A, B](x: A, y: B) {
+ println("x = " + x)
+ println("y = " + y)
+ println("x equals y: " + (x equals y) + ", y equals x: " + (y equals x))
+ println()
+ }
+}
+import Serialize._
+
+//############################################################################
+// Test classes in package "scala"
+
+object Test1_scala {
+
+ private def arrayToString[A](arr: Array[A]): String =
+ arr.mkString("Array[",",","]")
+
+ private def arrayEquals[A, B](a1: Array[A], a2: Array[B]): Boolean =
+ (a1.length == a2.length) &&
+ (Iterator.range(0, a1.length) forall { i => a1(i) == a2(i) })
+
+ @serializable
+ object WeekDay extends Enumeration {
+ type WeekDay = Value
+ val Monday, Tuesday, Wednesday, Thusday, Friday, Saturday, Sunday = Value
+ }
+ import WeekDay._, BigDecimal._, RoundingMode._
+
+ val x0 = List(1, 2, 3)
+ val x1 = Nil
+ val x2 = None
+ val x3 = Array(1, 2, 3)
+ val x4 = { x: Int => 2 * x }
+ val x5 = 'hello
+ val x6 = ("BannerLimit", 12345)
+ val x7 = BigDecimal.RoundingMode
+ val x8 = WeekDay
+ val x9 = UP // named element
+ val x10 = Monday // unamed element
+
+ try {
+ val y0: List[Int] = read(write(x0))
+ val y1: List[Nothing] = read(write(x1))
+ val y2: Option[Nothing] = read(write(x2))
+ val y3: Array[Int] = read(write(x3))
+ val y4: Function[Int, Int] = read(write(x4))
+ val y5: Symbol = read(write(x5))
+ val y6: (String, Int) = read(write(x6))
+ val y7: RoundingMode.type = read(write(x7))
+ val y8: WeekDay.type = read(write(x8))
+ val y9: RoundingMode = read(write(x9))
+ val y10: WeekDay = read(write(x10))
+
+ println("x0 = " + x0)
+ println("y0 = " + y0)
+ println("x0 eq y0: " + (x0 eq y0) + ", y0 eq x0: " + (y0 eq x0))
+ println("x0 equals y0: " + (x0 equals y0) + ", y0 equals x0: " + (y0 equals x0))
+ println()
+ println("x1 = " + x1)
+ println("y1 = " + y1)
+ println("x1 eq y1: " + (x1 eq y1) + ", y1 eq x1: " + (y1 eq x1))
+ println()
+ println("x2 = " + x2)
+ println("y2 = " + y2)
+ println("x2 eq y2: " + (x2 eq y2) + ", y2 eq x2: " + (y2 eq x2))
+ println()
+ println("x3 = " + arrayToString(x3))
+ println("y3 = " + arrayToString(y3))
+ println("arrayEquals(x3, y3): " + arrayEquals(x3, y3))
+ println()
+ println("x4 = <na>")
+ println("y4 = <na>")
+ println("x4(2): " + x4(2) + " - y4(2): " + y4(2))
+ println()
+ println("x5 = " + x5)
+ println("y5 = " + y5)
+ println("x5 eq y5: " + (x5 eq y5) + ", y5 eq x5: " + (y5 eq x5))
+ println("x5 equals y5: " + (x5 equals y5) + ", y5 equals x5: " + (y5 equals x5))
+ println()
+ println("x6 = " + x6)
+ println("y6 = " + y6)
+ println("x6 eq y6: " + (x6 eq y6) + ", y6 eq x6: " + (y6 eq x6))
+ println("x6 equals y6: " + (x6 equals y6) + ", y6 equals x6: " + (y6 equals x6))
+ println()
+ println("x7 = " + x7)
+ println("y7 = " + y7)
+ println("x7 eq y7: " + (x7 eq y7) + ", y7 eq x7: " + (y7 eq x7))
+ println("x7 equals y7: " + (x7 equals y7) + ", y7 equals x7: " + (y7 equals x7))
+ println()
+ println("x8 = " + x8)
+ println("y8 = " + y8)
+ println("x8 eq y8: " + (x8 eq y8) + ", y8 eq x8: " + (y8 eq x8))
+ println("x8 equals y8: " + (x8 equals y8) + ", y8 equals x8: " + (y8 equals x8))
+ println()
+ println("x9 = " + x9)
+ println("y9 = " + y9)
+ println("x9 eq y9: " + (x9 eq y9) + ", y9 eq x9: " + (y9 eq x9))
+ println("x9 equals y9: " + (x9 equals y9) + ", y9 equals x9: " + (y9 equals x9))
+ println()
+ println("x10 = " + x10)
+ println("y10 = " + y10)
+ println("x10 eq y10: " + (x10 eq y10) + ", y10 eq x10: " + (y10 eq x10))
+ println("x10 equals y10: " + (x10 equals y10) + ", y10 equals x10: " + (y10 equals x10))
+ println()
+ println("x9 eq x10: " + (x9 eq x10) + ", x10 eq x9: " + (x10 eq x9))
+ println("x9 equals x10: " + (x9 equals x10) + ", x10 equals x9: " + (x10 equals x9))
+ println("x9 eq y10: " + (x9 eq y10) + ", y10 eq x9: " + (y10 eq x9))
+ println("x9 equals y10: " + (x9 equals y10) + ", y10 equals x9: " + (y10 equals x9))
+ println()
+ }
+ catch {
+ case e: Exception =>
+ e.printStackTrace()
+ println("Error in Test1_scala: " + e)
+ }
+}
+
+//############################################################################
+// Test classes in package "scala.collection.immutable"
+
+@serializable
+object Test2_immutable {
+ import scala.collection.immutable.{
+ BitSet, HashMap, ListMap, ListSet, Queue, Stack, TreeSet, TreeMap, Vector}
+
+ val x1 = List(
+ ("buffers", 20),
+ ("layers", 2),
+ ("title", 3)
+ )
+
+ val m1 = new HashMap[Int, String] + (0 -> "A", 1 -> "B", 2 -> "C")
+
+ val x2 = new ListMap[String, Int] + ("buffers" -> 20, "layers" -> 2, "title" -> 3)
+
+ val x3 = {
+ val bs = new collection.mutable.BitSet()
+ bs += 2; bs += 3
+ bs.toImmutable
+ }
+
+ val x4 = new ListSet[Int]() + 3 + 5
+
+ val x5 = Queue("a", "b", "c")
+
+ val x6 = Stack("a", "b", "c")
+
+ val x7 = new TreeMap[Int, String] + (42 -> "FortyTwo")
+
+ val x8 = new TreeSet[Int]() + 2 + 0
+
+ val x9 = Vector(1, 2, 3)
+
+ try {
+ val y1: List[Pair[String, Int]] = read(write(x1))
+ val n1: HashMap[Int, String] = read(write(m1))
+ val y2: ListMap[String, Int] = read(write(x2))
+ val y3: BitSet = read(write(x3))
+ val y4: ListSet[Int] = read(write(x4))
+ val y5: Queue[String] = read(write(x5))
+ val y6: Stack[String] = read(write(x6))
+ val y7: TreeMap[Int, String] = read(write(x7))
+ val y8: TreeSet[Int] = read(write(x8))
+ val y9: Vector[Int] = read(write(x9))
+
+ check(x1, y1)
+ check(m1, n1)
+ check(x2, y2)
+ check(x3, y3)
+ check(x4, y4)
+ check(x5, y5)
+ check(x6, y6)
+ check(x7, y7)
+ check(x8, y8)
+ check(x9, y9)
+ }
+ catch {
+ case e: Exception =>
+ println("Error in Test2_immutable: " + e)
+ throw e
+ }
+}
+
+//############################################################################
+// Test classes in package "scala.collection.mutable"
+
+object Test3_mutable {
+ import scala.collection.mutable.{
+ ArrayBuffer, BitSet, HashMap, HashSet, History, LinkedList, ListBuffer,
+ Publisher, Queue, RevertableHistory, Stack}
+
+ val x0 = new ArrayBuffer[String]
+ x0 ++= List("one", "two")
+
+ val x2 = new BitSet()
+ x2 += 0
+ x2 += 8
+ x2 += 9
+
+ val x1 = new HashMap[String, Int]
+ x1 ++= Test2_immutable.x1
+
+ val x3 = new HashSet[String]
+ x3 ++= Test2_immutable.x1.map(p => p._1)
+
+ val x4 = new LinkedList[Int](2, null)
+ x4.append(new LinkedList(3, null))
+
+ val x5 = new Queue[Int]
+ x5 ++= Test2_immutable.x1.map(p => p._2)
+
+ val x6 = new Stack[Int]
+ x6 pushAll x5
+
+ val x7 = new ListBuffer[String]
+ x7 ++= List("white", "black")
+
+ @serializable
+ class Feed extends Publisher[String, Feed] {
+ override def toString() = "Feed"
+ }
+ val feed = new Feed
+
+ val x8 = new History[String, Feed]
+ x8.notify(feed, "hello")
+
+ try {
+ val y0: ArrayBuffer[String] = read(write(x0))
+ val y1: HashMap[String, Int] = read(write(x1))
+ val y2: BitSet = read(write(x2))
+ val y3: HashSet[String] = read(write(x3))
+ val y4: LinkedList[Int] = read(write(x4))
+ val y5: Queue[Int] = read(write(x5))
+ val y6: Stack[Int] = read(write(x6))
+ val y7: ListBuffer[String] = read(write(x7))
+ val y8: History[String, Feed] = read(write(x8))
+
+ check(x0, y0)
+ check(x1, y1)
+ check(x2, y2)
+ check(x3, y3)
+ check(x4, y4)
+ check(x5, y5)
+ check(x6, y6)
+ check(x7, y7)
+ check(x8, y8)
+ }
+ catch {
+ case e: Exception =>
+ println("Error in Test3_mutable: " + e)
+ }
+}
+
+//############################################################################
+// Test classes in package "scala.xml"
+
+object Test4_xml {
+ import scala.xml.Elem
+
+ val x1 = <html><title>title</title><body></body></html>;
+
+ case class Person(name: String, age: Int)
+
+ class AddressBook(a: Person*) {
+ private val people: List[Person] = a.toList
+ def toXHTML =
+ <table cellpadding="2" cellspacing="0">
+ <tr>
+ <th>Last Name</th>
+ <th>First Name</th>
+ </tr>
+ { for (p <- people) yield
+ <tr>
+ <td> { p.name } </td>
+ <td> { p.age.toString() } </td>
+ </tr> }
+ </table>;
+ }
+
+ val people = new AddressBook(
+ Person("Tom", 20),
+ Person("Bob", 22),
+ Person("James", 19))
+
+ val x2 =
+ <html>
+ <body>
+ { people.toXHTML }
+ </body>
+ </html>;
+
+ try {
+ val y1: scala.xml.Elem = read(write(x1))
+ val y2: scala.xml.Elem = read(write(x2))
+
+ check(x1, y1)
+ check(x2, y2)
+ }
+ catch {
+ case e: Exception =>
+ println("Error in Test4_xml: " + e)
+ }
+}
+
+//############################################################################
+// Test user-defined classes WITHOUT nesting
+
+@serializable
+class Person(_name: String) {
+ private var name = _name
+ override def toString() = name
+ override def equals(that: Any): Boolean =
+ that.isInstanceOf[Person] &&
+ (name == that.asInstanceOf[Person].name)
+}
+
+@serializable
+class Employee(_name: String) {
+ private var name = _name
+ override def toString() = name
+}
+@serializable
+object bob extends Employee("Bob")
+
+object Test5 {
+ val x1 = new Person("Tim")
+ val x2 = bob
+
+ try {
+ val y1: Person = read(write(x1))
+ val y2: Employee = read(write(x2))
+
+ check(x1, y1)
+ check(x2, y2)
+ }
+ catch {
+ case e: Exception =>
+ println("Error in Test5: " + e)
+ }
+}
+
+//############################################################################
+// Test user-defined classes WITH nesting
+
+@serializable
+object Test6 {
+ @serializable
+ object bill extends Employee("Bill") {
+ val x = paul
+ }
+ @serializable
+ object paul extends Person("Paul") {
+ val x = 4 // bill; => StackOverflowException !!!
+ }
+ val x1 = new Person("John")
+ val x2 = bill
+ val x3 = paul
+
+ try {
+ val y1: Person = read(write(x1))
+ val y2: Employee = read(write(x2))
+ val y3: Person = read(write(x3))
+
+ check(x1, y1)
+ check(x2, y2)
+ check(x3, y3)
+ }
+ catch {
+ case e: Exception =>
+ println("Error in Test6: " + e)
+ }
+}
+
+//############################################################################
+// Test code
+
+object Test {
+ def main(args: Array[String]) {
+ Test1_scala
+ Test2_immutable
+ Test3_mutable
+ Test4_xml
+ Test5
+ Test6
+ }
+}
+
+//############################################################################
+
diff --git a/test/pending/pos/t3636.scala b/test/pending/pos/t3636.scala
deleted file mode 100644
index 24d18c653d..0000000000
--- a/test/pending/pos/t3636.scala
+++ /dev/null
@@ -1,49 +0,0 @@
-class CTxnLocal[ T ] {
- def set( x: T )( implicit t: Txn ) {}
- def get( implicit t: Txn ) : T = null.asInstanceOf[ T ]
- def initialValue( t: Txn ) : T = null.asInstanceOf[ T ]
-}
-
-trait Txn
-
-trait ProcTxn {
- def ccstm: Txn
-}
-
-trait TxnLocal[ @specialized T ] {
- def apply()( implicit tx: ProcTxn ) : T
- def set( v: T )( implicit tx: ProcTxn ) : Unit
- def swap( v: T )( implicit tx: ProcTxn ) : T
- def transform( f: T => T )( implicit tx: ProcTxn ) : Unit
-}
-
-object TxnLocal {
- def apply[ @specialized T ] : TxnLocal[ T ] = new Impl( new CTxnLocal[ T ])
- def apply[ @specialized T ]( initValue: => T ) : TxnLocal[ T ] = new Impl( new CTxnLocal[ T ] {
- override def initialValue( tx: Txn ): T = initValue
- })
-
- private class Impl[ T ]( c: CTxnLocal[ T ]) extends TxnLocal[ T ] {
- def apply()( implicit tx: ProcTxn ) : T = c.get( tx.ccstm )
- def set( v: T )( implicit tx: ProcTxn ) : Unit = c.set( v )( tx.ccstm )
- def swap( v: T )( implicit tx: ProcTxn ) : T = {
- // currently not implemented in CTxnLocal
- val oldV = apply
- set( v )
- oldV
- }
- def transform( f: T => T )( implicit tx: ProcTxn ) {
- set( f( apply ))
- }
- }
-}
-
-
-object Transition {
- private val currentRef = TxnLocal[ Transition ]( Instant )
- def current( implicit tx: ProcTxn ) : Transition = currentRef()
-}
-
-sealed abstract class Transition
-case object Instant extends Transition
-